mirror of
https://github.com/jointakahe/takahe.git
synced 2024-06-12 18:19:24 +00:00
Compare commits
397 commits
Author | SHA1 | Date | |
---|---|---|---|
024b01a144 | |||
8f17b81912 | |||
7c34ac78ed | |||
72eb6a6271 | |||
b2223ddf42 | |||
045a499ddf | |||
0fa48578f2 | |||
f86f3a49e4 | |||
2f4daa02bd | |||
798222dcdb | |||
74b3ac551a | |||
4a09379e09 | |||
448092d6d9 | |||
5d508a17ec | |||
d07482f5a8 | |||
123c20efb1 | |||
83607779cd | |||
837320f461 | |||
5f28d702f8 | |||
ac7fef4b28 | |||
6855e74c6f | |||
a58d7ccd8f | |||
1a728ea023 | |||
b031880e41 | |||
81d019ad0d | |||
5267e4108c | |||
b122e2beda | |||
ae1bfc49a7 | |||
1ceef59bec | |||
2f546dfa74 | |||
cc9e397f60 | |||
dc397903b2 | |||
debf4670e8 | |||
e49bfc4775 | |||
308dd033e1 | |||
460d1d7e1c | |||
eb0b0d775c | |||
74f69a3813 | |||
9fc497f826 | |||
ab3648e05d | |||
476f817464 | |||
99e7fb8639 | |||
87344b47b5 | |||
aa39ef0571 | |||
110a5e64dc | |||
bae76c3063 | |||
9bb40ca7f6 | |||
af7f1173fc | |||
30e9b1f62d | |||
95089c0c61 | |||
d815aa53e1 | |||
e6e64f1000 | |||
c3bf7563b4 | |||
e577d020ee | |||
57cefa967c | |||
6fdfdca442 | |||
e17f17385a | |||
5cc74900b1 | |||
24577761ed | |||
039adae797 | |||
9368996a5b | |||
84ded2f3a5 | |||
07d187309e | |||
8cc1691857 | |||
b60e807b91 | |||
1e8a392e57 | |||
8c832383e0 | |||
6c83d7b67b | |||
dd532e4425 | |||
1e76430f74 | |||
ddf24d376e | |||
2a0bbf0d5d | |||
555046ac4d | |||
b003af64cc | |||
671807beb8 | |||
2a50928f27 | |||
70b9e3b900 | |||
faa181807c | |||
679f0def99 | |||
1262c619bb | |||
0c72327ab7 | |||
84703bbc45 | |||
93dfc85cf7 | |||
67d755e6d3 | |||
4a9109271d | |||
a69499c742 | |||
c4a2b62016 | |||
1b7bb8c501 | |||
f3bab95827 | |||
4a8bdec90c | |||
cc6355f60b | |||
83b57a0998 | |||
aac75dd4c3 | |||
759d5ac052 | |||
1dd076ff7d | |||
d6cdcb1d83 | |||
188e5a2446 | |||
0915b17c4b | |||
31c743319e | |||
11e3ca12d4 | |||
824f5b289c | |||
2d140f2e97 | |||
b2a9b334be | |||
5549d21528 | |||
5f49f9b2bb | |||
1cc9c16b8c | |||
91cf2f3a30 | |||
68eea142b1 | |||
3f8213f54a | |||
2523de4249 | |||
933f6660d5 | |||
2fda9ad2b4 | |||
4458594f04 | |||
c93a27e418 | |||
709f2527ac | |||
7f483af8d3 | |||
e34e4c0c77 | |||
542e3836af | |||
82a9c18205 | |||
a8b31e9f6a | |||
d6e891426c | |||
226a60bec7 | |||
9038e498d5 | |||
bb8f589da7 | |||
f88ad38294 | |||
2040124147 | |||
68dc2dc9ed | |||
568b87dadb | |||
79e1f0da14 | |||
cec04e8ddb | |||
b2768e7f2e | |||
9bc18a1190 | |||
5297b98273 | |||
888f4ad36c | |||
46679a5c73 | |||
f4bbe78bd5 | |||
f5a3971ef8 | |||
31c4f89124 | |||
6a94dcfcc6 | |||
67f64a4313 | |||
1fb02b06e1 | |||
d6c9ba0819 | |||
744c2825d9 | |||
b3b58df2b1 | |||
9775fa8991 | |||
51ffcc6192 | |||
dcc4a5723e | |||
f256217d1b | |||
b6d9f1dc95 | |||
930aab384e | |||
eeee385a61 | |||
a4e6033a0b | |||
dbc25f538d | |||
7862795993 | |||
8ff6100e94 | |||
709dc86162 | |||
8f57aa5f37 | |||
7331591432 | |||
ac54c7ff81 | |||
5759e1d5c1 | |||
7d1558a2ab | |||
b31c5156ff | |||
216915ddb8 | |||
96bc64fd01 | |||
ba4414dbce | |||
e45195bb02 | |||
ea7d5f307c | |||
1994671541 | |||
c702b1b24d | |||
c94b54dde8 | |||
74c4819ee2 | |||
79c1be03a6 | |||
902891ff9e | |||
542678cab5 | |||
cedcc8fa7c | |||
758e6633c4 | |||
61830a9a9c | |||
6e8149675c | |||
3bd01b2b3d | |||
6f4abd5aae | |||
56da914340 | |||
1b9cf24d09 | |||
85b4910829 | |||
5ea3d5d143 | |||
05992d6553 | |||
afc94f6313 | |||
bd6d1ae8de | |||
78eacf165e | |||
552a150e57 | |||
6411a375ba | |||
026e1be357 | |||
9e016aaa5f | |||
d9cab99859 | |||
9aff13118a | |||
2b56b33e38 | |||
6fb9a5ea96 | |||
42d6eb6000 | |||
c3f5cf8d05 | |||
179dc0e2cf | |||
e625fae13d | |||
1f3f28e8ff | |||
b677bf5123 | |||
5f39e50c63 | |||
aa592b496a | |||
79aea1b8ba | |||
e43d0a052c | |||
427744f5a7 | |||
33b24d99bc | |||
e382994e73 | |||
cc0bbfb93b | |||
2cb125845b | |||
91738748ec | |||
8adc17c5ea | |||
1ac6519316 | |||
9a0008db06 | |||
1c5ef675f0 | |||
04ad97c69b | |||
43ecf19cd1 | |||
7863f89aba | |||
5d2ed9edfe | |||
99a0a16fb7 | |||
21e286c5d2 | |||
94271b34ac | |||
4eada68d9f | |||
51b7c0d919 | |||
f7948a55bf | |||
36676fad59 | |||
d8fc81a9a6 | |||
d8113ed707 | |||
4744f7d47c | |||
0f66900d9c | |||
b9e8f19e90 | |||
28ed74df24 | |||
1ffbb2e860 | |||
a6922cb9d6 | |||
93c0af992b | |||
aeba38b8ae | |||
e6f827dd7f | |||
8ea1fb9ce6 | |||
1670c92269 | |||
9779f867bf | |||
a22ba4859b | |||
773c9b2afc | |||
6437a5aeb7 | |||
8a06b3d78f | |||
653ab07fd6 | |||
75ba7e3160 | |||
5dd2ebee29 | |||
425c77e085 | |||
f967f6c697 | |||
ccded99a63 | |||
0ba0971baa | |||
dc55ee109a | |||
a2e8d05349 | |||
46947279b0 | |||
349e1b8801 | |||
c5d51a8b11 | |||
201aa20f27 | |||
f150a3eee9 | |||
10c7da96c7 | |||
ee3b51c28f | |||
51ae78a33c | |||
47d9dc2488 | |||
ec85060df8 | |||
536f0a4488 | |||
cfb4975fdf | |||
edef03efb1 | |||
9b5152723a | |||
a7be3b2072 | |||
8ff4575976 | |||
e5485b1430 | |||
0d115bac15 | |||
cc75863b8f | |||
bd70769329 | |||
18623358be | |||
a34cab6dfb | |||
a18940aacf | |||
77ba7b02b9 | |||
50f8c8040d | |||
5309ef425a | |||
333317875b | |||
fd9015a7c5 | |||
ef4862ff46 | |||
9b6ceee490 | |||
54e7755080 | |||
c3caf26f22 | |||
0a848aa884 | |||
cfe18932b8 | |||
b721833b4f | |||
18ab453baa | |||
6c72f25fbe | |||
2ba0c1759e | |||
71234ef159 | |||
57dfc330e0 | |||
de9261251e | |||
e39355ceb5 | |||
06f6257645 | |||
9dded19172 | |||
34b4a6cc10 | |||
9983544c2a | |||
b44be55609 | |||
f689110e0b | |||
5e65d569bf | |||
94fe247b17 | |||
1f44e93518 | |||
f69c7304c1 | |||
3f3e815407 | |||
1ffcd5e6d1 | |||
76076e3387 | |||
ade954c2cd | |||
2675ed69ee | |||
7f23ae5f52 | |||
21d565d282 | |||
feb7a673eb | |||
71ab6b98a6 | |||
3679448cdf | |||
9876b81f66 | |||
8b3106b852 | |||
81fa9a6d34 | |||
cb66b9559d | |||
fc8b5be4a0 | |||
18b50ce0e6 | |||
fa688a5a73 | |||
ae74d65157 | |||
e68158202e | |||
e20aea1b9c | |||
d51a08ef8c | |||
113db4ab3a | |||
d6f558f89a | |||
28cfbf2ef5 | |||
150cd9503d | |||
0c1e51322f | |||
024d956e5e | |||
4a1e375e3c | |||
97d40912ed | |||
fb881dd5de | |||
91116fe6f8 | |||
ecec5d6c0a | |||
0cfd0813f2 | |||
fa003e2be6 | |||
4bea1006b1 | |||
f16dcd04e4 | |||
4276260c2e | |||
a875dd7a54 | |||
f4a8a96b81 | |||
b5b4a8ac5d | |||
c5c0e2bfe5 | |||
d51f2883aa | |||
eede9682b2 | |||
20edb20563 | |||
d8cee4097f | |||
db186fcd73 | |||
35b14066a5 | |||
925ff6edcd | |||
326bdec94a | |||
be7ce6ed62 | |||
1425ae0bde | |||
af47e9dfd6 | |||
f1fa4aa1e2 | |||
1768781fed | |||
2da399db42 | |||
936bb81b45 | |||
9ae9e03b9d | |||
69b0430819 | |||
4547ed1c71 | |||
801fe2e58a | |||
a7a292a84c | |||
252737f846 | |||
68e764a36e | |||
e1363f0dac | |||
110ae452b6 | |||
73adcadf27 | |||
c10aa46fc5 | |||
03bfc417df | |||
bb0ee1b152 | |||
efd5f481e9 | |||
b19f05859d | |||
5f1d7b5253 | |||
43ba09a16e | |||
1e54a0b2be | |||
6e34313aaf | |||
6028232d3d | |||
b522f43ffc | |||
002276ab9a | |||
ae12aadd38 | |||
15159418df | |||
0fc8ff4965 | |||
e5ef34a1b9 | |||
63fcd0b665 | |||
b09e7187ce | |||
13fc4b42de | |||
8fe99718f3 | |||
4956c7cd7a | |||
8d6f78e29e | |||
d8ff43dc28 | |||
05adbace24 | |||
eea83214cb |
|
@ -5,7 +5,10 @@
|
|||
.mypy_cache
|
||||
.pre-commit-config.yaml
|
||||
.venv
|
||||
fly.toml
|
||||
/fly.*
|
||||
/static-collected
|
||||
/takahe/local_settings.py
|
||||
__pycache__/
|
||||
media
|
||||
notes.md
|
||||
venv
|
||||
|
|
3
.github/workflows/docker-dev.yml
vendored
3
.github/workflows/docker-dev.yml
vendored
|
@ -8,6 +8,7 @@ on:
|
|||
jobs:
|
||||
push_to_registry:
|
||||
name: Push Docker image to Docker Hub
|
||||
if: github.repository_owner == 'jointakahe'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
|
@ -18,6 +19,8 @@ jobs:
|
|||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
with:
|
||||
version: v0.9.1
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a
|
||||
|
|
2
.github/workflows/docker-release.yml
vendored
2
.github/workflows/docker-release.yml
vendored
|
@ -18,6 +18,8 @@ jobs:
|
|||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
with:
|
||||
version: v0.9.1
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a
|
||||
|
|
2
.github/workflows/test-docs.yml
vendored
2
.github/workflows/test-docs.yml
vendored
|
@ -8,7 +8,7 @@ jobs:
|
|||
timeout-minutes: 5
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.10"]
|
||||
python-version: ["3.11"]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
|
|
9
.github/workflows/test.yml
vendored
9
.github/workflows/test.yml
vendored
|
@ -4,6 +4,8 @@ on:
|
|||
push:
|
||||
paths-ignore:
|
||||
- 'docs/**'
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- 'docs/**'
|
||||
|
@ -15,17 +17,13 @@ jobs:
|
|||
timeout-minutes: 8
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.10", "3.11"]
|
||||
python-version: ["3.11", "3.12"]
|
||||
db:
|
||||
- "postgres://postgres:postgres@localhost/postgres"
|
||||
- "sqlite:///takahe.db"
|
||||
include:
|
||||
- db: "postgres://postgres:postgres@localhost/postgres"
|
||||
db_name: postgres
|
||||
search: true
|
||||
- db: "sqlite:///takahe.db"
|
||||
db_name: sqlite
|
||||
search: false
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:15
|
||||
|
@ -48,6 +46,7 @@ jobs:
|
|||
cache: pip
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
sudo apt-get install -y libmemcached-dev libwebp-dev libjpeg-dev
|
||||
python -m pip install -r requirements-dev.txt
|
||||
- name: Run pytest
|
||||
env:
|
||||
|
|
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -11,11 +11,11 @@
|
|||
/build
|
||||
/cache/
|
||||
/docs/_build
|
||||
/fly.*
|
||||
/media/
|
||||
/static-collected
|
||||
/takahe/local_settings.py
|
||||
__pycache__/
|
||||
api-test.*
|
||||
fly.toml
|
||||
notes.md
|
||||
notes.py
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.4.0
|
||||
rev: v4.5.0
|
||||
hooks:
|
||||
- id: check-case-conflict
|
||||
- id: check-merge-conflict
|
||||
|
@ -15,43 +15,42 @@ repos:
|
|||
- id: trailing-whitespace
|
||||
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: "v3.3.0"
|
||||
rev: "v3.15.0"
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: [--py310-plus]
|
||||
args: [--py311-plus]
|
||||
|
||||
- repo: https://github.com/adamchainz/django-upgrade
|
||||
rev: "1.12.0"
|
||||
rev: "1.15.0"
|
||||
hooks:
|
||||
- id: django-upgrade
|
||||
args: [--target-version, "4.1"]
|
||||
args: [--target-version, "4.2"]
|
||||
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 22.10.0
|
||||
- repo: https://github.com/psf/black-pre-commit-mirror
|
||||
rev: 23.11.0
|
||||
hooks:
|
||||
- id: black
|
||||
|
||||
- repo: https://github.com/pycqa/isort
|
||||
rev: 5.10.1
|
||||
rev: 5.12.0
|
||||
hooks:
|
||||
- id: isort
|
||||
args: ["--profile=black"]
|
||||
|
||||
- repo: https://github.com/pycqa/flake8
|
||||
rev: 6.0.0
|
||||
rev: 6.1.0
|
||||
hooks:
|
||||
- id: flake8
|
||||
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: v0.991
|
||||
rev: v1.6.1
|
||||
hooks:
|
||||
- id: mypy
|
||||
exclude: "^tests/"
|
||||
additional_dependencies:
|
||||
[
|
||||
types-pyopenssl,
|
||||
types-bleach,
|
||||
types-mock,
|
||||
types-cachetools,
|
||||
types-python-dateutil,
|
||||
]
|
||||
[types-pyopenssl, types-mock, types-cachetools, types-python-dateutil]
|
||||
|
||||
- repo: https://github.com/rtts/djhtml
|
||||
rev: 3.0.6
|
||||
hooks:
|
||||
- id: djhtml
|
||||
|
|
|
@ -4,7 +4,7 @@ version: 2
|
|||
build:
|
||||
os: ubuntu-22.04
|
||||
tools:
|
||||
python: "3.10"
|
||||
python: "3.11"
|
||||
|
||||
# Build documentation in the docs/ directory with Sphinx
|
||||
sphinx:
|
||||
|
|
41
Makefile
41
Makefile
|
@ -14,3 +14,44 @@ compose_up:
|
|||
|
||||
compose_down:
|
||||
docker-compose -f docker/docker-compose.yml down
|
||||
|
||||
# Development Setup
|
||||
.venv:
|
||||
python3 -m venv .venv
|
||||
. .venv/bin/activate
|
||||
python3 -m pip install -r requirements-dev.txt
|
||||
|
||||
.git/hooks/pre-commit: .venv
|
||||
python3 -m pre_commit install
|
||||
|
||||
.env:
|
||||
cp development.env .env
|
||||
|
||||
_PHONY: setup_local
|
||||
setup_local: .venv .env .git/hooks/pre-commit
|
||||
|
||||
_PHONY: startdb stopdb
|
||||
startdb:
|
||||
docker compose -f docker/docker-compose.yml up db -d
|
||||
|
||||
stopdb:
|
||||
docker compose -f docker/docker-compose.yml stop db
|
||||
|
||||
_PHONY: superuser
|
||||
createsuperuser: setup_local startdb
|
||||
python3 -m manage createsuperuser
|
||||
|
||||
_PHONY: test
|
||||
test: setup_local
|
||||
python3 -m pytest
|
||||
|
||||
# Active development
|
||||
_PHONY: migrations server stator
|
||||
migrations: setup_local startdb
|
||||
python3 -m manage migrate
|
||||
|
||||
runserver: setup_local startdb
|
||||
python3 -m manage runserver
|
||||
|
||||
runstator: setup_local startdb
|
||||
python3 -m manage runstator
|
||||
|
|
13
README.md
13
README.md
|
@ -1,17 +1,16 @@
|
|||
![takahē](static/img/logo-128.png)
|
||||
|
||||
A *beta* Fediverse server for microblogging/"toots". Not fully functional yet -
|
||||
A *beta* Fediverse server for microblogging. Not fully polished yet -
|
||||
we're still working towards a 1.0!
|
||||
|
||||
**Current version: [0.6.1](https://docs.jointakahe.org/en/latest/releases/0.6/)**
|
||||
**Current version: [0.11.0](https://docs.jointakahe.org/en/latest/releases/0.11/)**
|
||||
|
||||
Key features:
|
||||
|
||||
- Multiple domain support
|
||||
- Multiple identities per user (and can be shared between users)
|
||||
- Desktop, mobile and PWA-compatible web UI (with minimal JavaScript)
|
||||
- Mastodon-compatible client API (supports existing apps)
|
||||
- Easy deployment (web worker, background worker, and one database)
|
||||
- **Multiple domain support**, allowing you to host as many domains as you like on a single running instance.
|
||||
- **Multiple identity support**, allowing users to represent themselves in as many different ways as they want.
|
||||
- **Easy deployment and maintenance**, so you can focus on looking after your community rather than trying to wrangle with the software.
|
||||
- **Mastodon Client API compatible**, so users can use their favourite client apps just as they would with Mastodon or other compatible servers.
|
||||
|
||||
For more background and information, see [jointakahe.org](https://jointakahe.org/).
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from asgiref.sync import async_to_sync
|
||||
from django.contrib import admin
|
||||
from django.db import models
|
||||
from django.utils.safestring import mark_safe
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
|
@ -73,7 +73,15 @@ class EmojiAdmin(admin.ModelAdmin):
|
|||
|
||||
readonly_fields = ["preview", "created", "updated", "to_ap_tag"]
|
||||
|
||||
actions = ["force_execution", "approve_emoji", "reject_emoji"]
|
||||
actions = ["force_execution", "approve_emoji", "reject_emoji", "copy_to_local"]
|
||||
|
||||
def delete_queryset(self, request, queryset):
|
||||
for instance in queryset:
|
||||
# individual deletes to ensure file is deleted
|
||||
instance.delete()
|
||||
|
||||
def delete_model(self, request, obj):
|
||||
super().delete_model(request, obj)
|
||||
|
||||
@admin.action(description="Force Execution")
|
||||
def force_execution(self, request, queryset):
|
||||
|
@ -96,10 +104,53 @@ class EmojiAdmin(admin.ModelAdmin):
|
|||
f'<img src="{instance.full_url().relative}" style="height: 22px">'
|
||||
)
|
||||
|
||||
@admin.action(description="Copy Emoji to Local")
|
||||
def copy_to_local(self, request, queryset):
|
||||
emojis = {}
|
||||
for instance in queryset:
|
||||
emoji = instance.copy_to_local(save=False)
|
||||
if emoji:
|
||||
emojis[emoji.shortcode] = emoji
|
||||
|
||||
Emoji.objects.bulk_create(emojis.values(), batch_size=50, ignore_conflicts=True)
|
||||
Emoji.locals = Emoji.load_locals()
|
||||
|
||||
|
||||
@admin.register(PostAttachment)
|
||||
class PostAttachmentAdmin(admin.ModelAdmin):
|
||||
list_display = ["id", "post", "created"]
|
||||
list_display = ["id", "post", "state", "created"]
|
||||
list_filter = ["state", "mimetype"]
|
||||
search_fields = ["name", "remote_url", "search_handle", "search_service_handle"]
|
||||
raw_id_fields = ["post"]
|
||||
|
||||
actions = ["guess_mimetypes"]
|
||||
|
||||
def get_search_results(self, request, queryset, search_term):
|
||||
from django.db.models.functions import Concat
|
||||
|
||||
queryset = queryset.annotate(
|
||||
search_handle=Concat(
|
||||
"post__author__username", models.Value("@"), "post__author__domain_id"
|
||||
),
|
||||
search_service_handle=Concat(
|
||||
"post__author__username",
|
||||
models.Value("@"),
|
||||
"post__author__domain__service_domain",
|
||||
),
|
||||
)
|
||||
return super().get_search_results(request, queryset, search_term)
|
||||
|
||||
@admin.action(description="Update mimetype based upon filename")
|
||||
def guess_mimetypes(self, request, queryset):
|
||||
import mimetypes
|
||||
|
||||
for instance in queryset:
|
||||
if instance.remote_url:
|
||||
mimetype, _ = mimetypes.guess_type(instance.remote_url)
|
||||
if not mimetype:
|
||||
mimetype = "application/octet-stream"
|
||||
instance.mimetype = mimetype
|
||||
instance.save()
|
||||
|
||||
|
||||
class PostAttachmentInline(admin.StackedInline):
|
||||
|
@ -111,18 +162,24 @@ class PostAttachmentInline(admin.StackedInline):
|
|||
class PostAdmin(admin.ModelAdmin):
|
||||
list_display = ["id", "type", "author", "state", "created"]
|
||||
list_filter = ("type", "local", "visibility", "state", "created")
|
||||
raw_id_fields = ["to", "mentions", "author", "emojis"]
|
||||
actions = ["reparse_hashtags"]
|
||||
search_fields = ["content"]
|
||||
raw_id_fields = ["emojis"]
|
||||
autocomplete_fields = ["to", "mentions", "author"]
|
||||
search_fields = ["content", "search_handle", "search_service_handle"]
|
||||
inlines = [PostAttachmentInline]
|
||||
readonly_fields = ["created", "updated", "state_changed", "object_json"]
|
||||
|
||||
@admin.action(description="Reprocess content for hashtags")
|
||||
def reparse_hashtags(self, request, queryset):
|
||||
for instance in queryset:
|
||||
instance.hashtags = Hashtag.hashtags_from_content(instance.content) or None
|
||||
instance.save()
|
||||
async_to_sync(instance.ensure_hashtags)()
|
||||
def get_search_results(self, request, queryset, search_term):
|
||||
from django.db.models.functions import Concat
|
||||
|
||||
queryset = queryset.annotate(
|
||||
search_handle=Concat(
|
||||
"author__username", models.Value("@"), "author__domain_id"
|
||||
),
|
||||
search_service_handle=Concat(
|
||||
"author__username", models.Value("@"), "author__domain__service_domain"
|
||||
),
|
||||
)
|
||||
return super().get_search_results(request, queryset, search_term)
|
||||
|
||||
@admin.display(description="ActivityPub JSON")
|
||||
def object_json(self, instance):
|
||||
|
@ -140,8 +197,8 @@ class TimelineEventAdmin(admin.ModelAdmin):
|
|||
list_display = ["id", "identity", "published", "type"]
|
||||
list_filter = (IdentityLocalFilter, "type")
|
||||
readonly_fields = ["created"]
|
||||
autocomplete_fields = ["identity"]
|
||||
raw_id_fields = [
|
||||
"identity",
|
||||
"subject_post",
|
||||
"subject_identity",
|
||||
"subject_post_interaction",
|
||||
|
@ -153,9 +210,10 @@ class TimelineEventAdmin(admin.ModelAdmin):
|
|||
|
||||
@admin.register(FanOut)
|
||||
class FanOutAdmin(admin.ModelAdmin):
|
||||
list_display = ["id", "state", "created", "state_attempted", "type", "identity"]
|
||||
list_filter = (IdentityLocalFilter, "type", "state", "state_attempted")
|
||||
raw_id_fields = ["identity", "subject_post", "subject_post_interaction"]
|
||||
list_display = ["id", "state", "created", "state_next_attempt", "type", "identity"]
|
||||
list_filter = (IdentityLocalFilter, "type", "state")
|
||||
raw_id_fields = ["subject_post", "subject_post_interaction"]
|
||||
autocomplete_fields = ["identity"]
|
||||
readonly_fields = ["created", "updated", "state_changed"]
|
||||
actions = ["force_execution"]
|
||||
search_fields = ["identity__username"]
|
||||
|
@ -171,9 +229,10 @@ class FanOutAdmin(admin.ModelAdmin):
|
|||
|
||||
@admin.register(PostInteraction)
|
||||
class PostInteractionAdmin(admin.ModelAdmin):
|
||||
list_display = ["id", "state", "state_attempted", "type", "identity", "post"]
|
||||
list_display = ["id", "state", "state_next_attempt", "type", "identity", "post"]
|
||||
list_filter = (IdentityLocalFilter, "type", "state")
|
||||
raw_id_fields = ["identity", "post"]
|
||||
raw_id_fields = ["post"]
|
||||
autocomplete_fields = ["identity"]
|
||||
|
||||
def has_add_permission(self, request, obj=None):
|
||||
return False
|
||||
|
|
0
activities/management/__init__.py
Normal file
0
activities/management/__init__.py
Normal file
0
activities/management/commands/__init__.py
Normal file
0
activities/management/commands/__init__.py
Normal file
83
activities/management/commands/pruneposts.py
Normal file
83
activities/management/commands/pruneposts.py
Normal file
|
@ -0,0 +1,83 @@
|
|||
import datetime
|
||||
import sys
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db.models import Q
|
||||
from django.utils import timezone
|
||||
|
||||
from activities.models import Post
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Prunes posts that are old, not local and have no local interaction"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"--number",
|
||||
"-n",
|
||||
type=int,
|
||||
default=500,
|
||||
help="The maximum number of posts to prune at once",
|
||||
)
|
||||
|
||||
def handle(self, number: int, *args, **options):
|
||||
if not settings.SETUP.REMOTE_PRUNE_HORIZON:
|
||||
print("Pruning has been disabled as REMOTE_PRUNE_HORIZON=0")
|
||||
sys.exit(2)
|
||||
# Find a set of posts that match the initial criteria
|
||||
print(f"Running query to find up to {number} old posts...")
|
||||
posts = (
|
||||
Post.objects.filter(
|
||||
local=False,
|
||||
created__lt=timezone.now()
|
||||
- datetime.timedelta(days=settings.SETUP.REMOTE_PRUNE_HORIZON),
|
||||
)
|
||||
.exclude(
|
||||
Q(interactions__identity__local=True)
|
||||
| Q(visibility=Post.Visibilities.mentioned)
|
||||
)
|
||||
.order_by("?")[:number]
|
||||
)
|
||||
post_ids_and_uris = dict(posts.values_list("object_uri", "id"))
|
||||
print(f" found {len(post_ids_and_uris)}")
|
||||
|
||||
# Fetch all of their replies and exclude any that have local replies
|
||||
print("Excluding ones with local replies...")
|
||||
replies = Post.objects.filter(
|
||||
local=True,
|
||||
in_reply_to__in=post_ids_and_uris.keys(),
|
||||
).values_list("in_reply_to", flat=True)
|
||||
for reply in replies:
|
||||
if reply and reply in post_ids_and_uris:
|
||||
del post_ids_and_uris[reply]
|
||||
print(f" narrowed down to {len(post_ids_and_uris)}")
|
||||
|
||||
# Fetch all the posts that they are replies to, and don't delete ones
|
||||
# that are replies to local posts
|
||||
print("Excluding ones that are replies to local posts...")
|
||||
in_reply_tos = (
|
||||
Post.objects.filter(id__in=post_ids_and_uris.values())
|
||||
.values_list("in_reply_to", flat=True)
|
||||
.distinct()
|
||||
)
|
||||
local_object_uris = Post.objects.filter(
|
||||
local=True, object_uri__in=in_reply_tos
|
||||
).values_list("object_uri", flat=True)
|
||||
final_post_ids = list(
|
||||
Post.objects.filter(id__in=post_ids_and_uris.values())
|
||||
.exclude(in_reply_to__in=local_object_uris)
|
||||
.values_list("id", flat=True)
|
||||
)
|
||||
print(f" narrowed down to {len(final_post_ids)}")
|
||||
|
||||
# Delete them
|
||||
if not final_post_ids:
|
||||
sys.exit(0)
|
||||
|
||||
print("Deleting...")
|
||||
_, deleted = Post.objects.filter(id__in=final_post_ids).delete()
|
||||
print("Deleted:")
|
||||
for model, model_deleted in deleted.items():
|
||||
print(f" {model}: {model_deleted}")
|
||||
sys.exit(1)
|
|
@ -10,12 +10,12 @@ import activities.models.fan_out
|
|||
import activities.models.post
|
||||
import activities.models.post_attachment
|
||||
import activities.models.post_interaction
|
||||
import core.snowflake
|
||||
import core.uploads
|
||||
import stator.models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
|
@ -28,11 +28,10 @@ class Migration(migrations.Migration):
|
|||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
models.BigIntegerField(
|
||||
default=core.snowflake.Snowflake.generate_post,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("state_ready", models.BooleanField(default=True)),
|
||||
|
@ -111,11 +110,10 @@ class Migration(migrations.Migration):
|
|||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
models.BigIntegerField(
|
||||
default=core.snowflake.Snowflake.generate_post_interaction,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("state_ready", models.BooleanField(default=True)),
|
||||
|
@ -264,6 +262,8 @@ class Migration(migrations.Migration):
|
|||
("undo_interaction", "Undo Interaction"),
|
||||
("identity_edited", "Identity Edited"),
|
||||
("identity_deleted", "Identity Deleted"),
|
||||
("identity_created", "Identity Created"),
|
||||
("identity_moved", "Identity Moved"),
|
||||
],
|
||||
max_length=100,
|
||||
),
|
||||
|
@ -324,8 +324,10 @@ class Migration(migrations.Migration):
|
|||
("mentioned", "Mentioned"),
|
||||
("liked", "Liked"),
|
||||
("followed", "Followed"),
|
||||
("follow_requested", "Follow Requested"),
|
||||
("boosted", "Boosted"),
|
||||
("announcement", "Announcement"),
|
||||
("identity_created", "Identity Created"),
|
||||
],
|
||||
max_length=100,
|
||||
),
|
||||
|
|
|
@ -8,7 +8,6 @@ import stator.models
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("activities", "0001_initial"),
|
||||
]
|
||||
|
|
|
@ -10,7 +10,6 @@ import core.uploads
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("activities", "0002_hashtag"),
|
||||
]
|
||||
|
|
|
@ -11,7 +11,6 @@ import stator.models
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("users", "0003_identity_followers_etc"),
|
||||
("activities", "0003_postattachment_null_thumb"),
|
||||
|
|
|
@ -14,7 +14,6 @@ def timelineevent_populate_published(apps, schema_editor):
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("activities", "0004_emoji_post_emojis"),
|
||||
]
|
||||
|
|
|
@ -5,7 +5,6 @@ from django.db import migrations, models
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("users", "0005_report"),
|
||||
("activities", "0005_post_type_timeline_urls"),
|
||||
|
|
17
activities/migrations/0007_post_stats.py
Normal file
17
activities/migrations/0007_post_stats.py
Normal file
|
@ -0,0 +1,17 @@
|
|||
# Generated by Django 4.1.4 on 2022-12-31 20:41
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("activities", "0006_fanout_subject_identity_alter_fanout_type"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="post",
|
||||
name="stats",
|
||||
field=models.JSONField(blank=True, null=True),
|
||||
),
|
||||
]
|
83
activities/migrations/0008_state_and_post_indexes.py
Normal file
83
activities/migrations/0008_state_and_post_indexes.py
Normal file
|
@ -0,0 +1,83 @@
|
|||
# Generated by Django 4.1.4 on 2023-01-01 17:34
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("activities", "0007_post_stats"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="fanout",
|
||||
index=models.Index(
|
||||
fields=["state", "state_attempted"], name="ix_fanout_state_attempted"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="fanout",
|
||||
index=models.Index(
|
||||
condition=models.Q(("state_locked_until__isnull", False)),
|
||||
fields=["state_locked_until", "state"],
|
||||
name="ix_fanout_state_locked",
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="hashtag",
|
||||
index=models.Index(
|
||||
fields=["state", "state_attempted"], name="ix_hashtag_state_attempted"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="hashtag",
|
||||
index=models.Index(
|
||||
condition=models.Q(("state_locked_until__isnull", False)),
|
||||
fields=["state_locked_until", "state"],
|
||||
name="ix_hashtag_state_locked",
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="post",
|
||||
index=models.Index(
|
||||
fields=["visibility", "local", "published"],
|
||||
name="ix_post_local_public_published",
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="post",
|
||||
index=models.Index(
|
||||
fields=["visibility", "local", "created"],
|
||||
name="ix_post_local_public_created",
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="post",
|
||||
index=models.Index(
|
||||
fields=["state", "state_attempted"], name="ix_post_state_attempted"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="post",
|
||||
index=models.Index(
|
||||
condition=models.Q(("state_locked_until__isnull", False)),
|
||||
fields=["state_locked_until", "state"],
|
||||
name="ix_post_state_locked",
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="postattachment",
|
||||
index=models.Index(
|
||||
fields=["state", "state_attempted"],
|
||||
name="ix_postattachm_state_attempted",
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="postattachment",
|
||||
index=models.Index(
|
||||
condition=models.Q(("state_locked_until__isnull", False)),
|
||||
fields=["state_locked_until", "state"],
|
||||
name="ix_postattachm_state_locked",
|
||||
),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,21 @@
|
|||
# Generated by Django 4.1.4 on 2023-01-14 19:01
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("users", "0011_announcement"),
|
||||
("activities", "0008_state_and_post_indexes"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterIndexTogether(
|
||||
name="timelineevent",
|
||||
index_together={
|
||||
("identity", "type", "subject_post", "subject_identity"),
|
||||
("identity", "type", "subject_identity"),
|
||||
("identity", "created"),
|
||||
},
|
||||
),
|
||||
]
|
40
activities/migrations/0010_stator_indexes.py
Normal file
40
activities/migrations/0010_stator_indexes.py
Normal file
|
@ -0,0 +1,40 @@
|
|||
# Generated by Django 4.1.4 on 2023-02-04 05:23
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("users", "0013_stator_indexes"),
|
||||
("activities", "0009_alter_timelineevent_index_together"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterIndexTogether(
|
||||
name="emoji",
|
||||
index_together={("state_ready", "state_locked_until", "state")},
|
||||
),
|
||||
migrations.AlterIndexTogether(
|
||||
name="fanout",
|
||||
index_together={("state_ready", "state_locked_until", "state")},
|
||||
),
|
||||
migrations.AlterIndexTogether(
|
||||
name="hashtag",
|
||||
index_together={("state_ready", "state_locked_until", "state")},
|
||||
),
|
||||
migrations.AlterIndexTogether(
|
||||
name="post",
|
||||
index_together={("state_ready", "state_locked_until", "state")},
|
||||
),
|
||||
migrations.AlterIndexTogether(
|
||||
name="postattachment",
|
||||
index_together={("state_ready", "state_locked_until", "state")},
|
||||
),
|
||||
migrations.AlterIndexTogether(
|
||||
name="postinteraction",
|
||||
index_together={
|
||||
("type", "identity", "post"),
|
||||
("state_ready", "state_locked_until", "state"),
|
||||
},
|
||||
),
|
||||
]
|
|
@ -0,0 +1,25 @@
|
|||
# Generated by Django 4.1.7 on 2023-02-14 22:30
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("activities", "0010_stator_indexes"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="postinteraction",
|
||||
name="value",
|
||||
field=models.CharField(blank=True, max_length=50, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="postinteraction",
|
||||
name="type",
|
||||
field=models.CharField(
|
||||
choices=[("like", "Like"), ("boost", "Boost"), ("vote", "Vote")],
|
||||
max_length=100,
|
||||
),
|
||||
),
|
||||
]
|
19
activities/migrations/0012_in_reply_to_index.py
Normal file
19
activities/migrations/0012_in_reply_to_index.py
Normal file
|
@ -0,0 +1,19 @@
|
|||
# Generated by Django 4.1.4 on 2023-03-05 17:33
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("activities", "0011_postinteraction_value_alter_postinteraction_type"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="post",
|
||||
name="in_reply_to",
|
||||
field=models.CharField(
|
||||
blank=True, db_index=True, max_length=500, null=True
|
||||
),
|
||||
),
|
||||
]
|
25
activities/migrations/0013_postattachment_author.py
Normal file
25
activities/migrations/0013_postattachment_author.py
Normal file
|
@ -0,0 +1,25 @@
|
|||
# Generated by Django 4.1.4 on 2023-03-12 22:14
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("users", "0015_bookmark"),
|
||||
("activities", "0012_in_reply_to_index"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="postattachment",
|
||||
name="author",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="attachments",
|
||||
to="users.identity",
|
||||
),
|
||||
),
|
||||
]
|
23
activities/migrations/0014_post_content_vector_gin.py
Normal file
23
activities/migrations/0014_post_content_vector_gin.py
Normal file
|
@ -0,0 +1,23 @@
|
|||
# Generated by Django 4.2 on 2023-04-29 18:49
|
||||
|
||||
import django.contrib.postgres.indexes
|
||||
import django.contrib.postgres.search
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("activities", "0013_postattachment_author"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="post",
|
||||
index=django.contrib.postgres.indexes.GinIndex(
|
||||
django.contrib.postgres.search.SearchVector(
|
||||
"content", config="english"
|
||||
),
|
||||
name="content_vector_gin",
|
||||
),
|
||||
),
|
||||
]
|
25
activities/migrations/0015_alter_postinteraction_type.py
Normal file
25
activities/migrations/0015_alter_postinteraction_type.py
Normal file
|
@ -0,0 +1,25 @@
|
|||
# Generated by Django 4.1.7 on 2023-04-24 08:04
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("activities", "0014_post_content_vector_gin"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="postinteraction",
|
||||
name="type",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("like", "Like"),
|
||||
("boost", "Boost"),
|
||||
("vote", "Vote"),
|
||||
("pin", "Pin"),
|
||||
],
|
||||
max_length=100,
|
||||
),
|
||||
),
|
||||
]
|
91
activities/migrations/0016_index_together_migration.py
Normal file
91
activities/migrations/0016_index_together_migration.py
Normal file
|
@ -0,0 +1,91 @@
|
|||
# Generated by Django 4.2.1 on 2023-05-13 17:29
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("activities", "0015_alter_postinteraction_type"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RenameIndex(
|
||||
model_name="emoji",
|
||||
new_name="activities__state_r_aa72ec_idx",
|
||||
old_fields=("state_ready", "state_locked_until", "state"),
|
||||
),
|
||||
migrations.RenameIndex(
|
||||
model_name="fanout",
|
||||
new_name="activities__state_r_aae3b4_idx",
|
||||
old_fields=("state_ready", "state_locked_until", "state"),
|
||||
),
|
||||
migrations.RenameIndex(
|
||||
model_name="hashtag",
|
||||
new_name="activities__state_r_5703be_idx",
|
||||
old_fields=("state_ready", "state_locked_until", "state"),
|
||||
),
|
||||
migrations.RenameIndex(
|
||||
model_name="post",
|
||||
new_name="activities__state_r_b8f1ff_idx",
|
||||
old_fields=("state_ready", "state_locked_until", "state"),
|
||||
),
|
||||
migrations.RenameIndex(
|
||||
model_name="postattachment",
|
||||
new_name="activities__state_r_4e981c_idx",
|
||||
old_fields=("state_ready", "state_locked_until", "state"),
|
||||
),
|
||||
migrations.RenameIndex(
|
||||
model_name="postinteraction",
|
||||
new_name="activities__state_r_981d8c_idx",
|
||||
old_fields=("state_ready", "state_locked_until", "state"),
|
||||
),
|
||||
migrations.RenameIndex(
|
||||
model_name="postinteraction",
|
||||
new_name="activities__type_75d2e4_idx",
|
||||
old_fields=("type", "identity", "post"),
|
||||
),
|
||||
migrations.RenameIndex(
|
||||
model_name="timelineevent",
|
||||
new_name="activities__identit_0b93c3_idx",
|
||||
old_fields=("identity", "type", "subject_post", "subject_identity"),
|
||||
),
|
||||
migrations.RenameIndex(
|
||||
model_name="timelineevent",
|
||||
new_name="activities__identit_cc2290_idx",
|
||||
old_fields=("identity", "type", "subject_identity"),
|
||||
),
|
||||
migrations.RenameIndex(
|
||||
model_name="timelineevent",
|
||||
new_name="activities__identit_872fbb_idx",
|
||||
old_fields=("identity", "created"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="emoji",
|
||||
index=models.Index(
|
||||
fields=["state", "state_attempted"], name="ix_emoji_state_attempted"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="emoji",
|
||||
index=models.Index(
|
||||
condition=models.Q(("state_locked_until__isnull", False)),
|
||||
fields=["state_locked_until", "state"],
|
||||
name="ix_emoji_state_locked",
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="postinteraction",
|
||||
index=models.Index(
|
||||
fields=["state", "state_attempted"],
|
||||
name="ix_postinterac_state_attempted",
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="postinteraction",
|
||||
index=models.Index(
|
||||
condition=models.Q(("state_locked_until__isnull", False)),
|
||||
fields=["state_locked_until", "state"],
|
||||
name="ix_postinterac_state_locked",
|
||||
),
|
||||
),
|
||||
]
|
234
activities/migrations/0017_stator_next_change.py
Normal file
234
activities/migrations/0017_stator_next_change.py
Normal file
|
@ -0,0 +1,234 @@
|
|||
# Generated by Django 4.2.1 on 2023-07-05 22:18
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("activities", "0016_index_together_migration"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveIndex(
|
||||
model_name="emoji",
|
||||
name="activities__state_r_aa72ec_idx",
|
||||
),
|
||||
migrations.RemoveIndex(
|
||||
model_name="emoji",
|
||||
name="ix_emoji_state_attempted",
|
||||
),
|
||||
migrations.RemoveIndex(
|
||||
model_name="emoji",
|
||||
name="ix_emoji_state_locked",
|
||||
),
|
||||
migrations.RemoveIndex(
|
||||
model_name="fanout",
|
||||
name="ix_fanout_state_attempted",
|
||||
),
|
||||
migrations.RemoveIndex(
|
||||
model_name="fanout",
|
||||
name="ix_fanout_state_locked",
|
||||
),
|
||||
migrations.RemoveIndex(
|
||||
model_name="fanout",
|
||||
name="activities__state_r_aae3b4_idx",
|
||||
),
|
||||
migrations.RemoveIndex(
|
||||
model_name="hashtag",
|
||||
name="ix_hashtag_state_attempted",
|
||||
),
|
||||
migrations.RemoveIndex(
|
||||
model_name="hashtag",
|
||||
name="ix_hashtag_state_locked",
|
||||
),
|
||||
migrations.RemoveIndex(
|
||||
model_name="hashtag",
|
||||
name="activities__state_r_5703be_idx",
|
||||
),
|
||||
migrations.RemoveIndex(
|
||||
model_name="post",
|
||||
name="ix_post_state_attempted",
|
||||
),
|
||||
migrations.RemoveIndex(
|
||||
model_name="post",
|
||||
name="ix_post_state_locked",
|
||||
),
|
||||
migrations.RemoveIndex(
|
||||
model_name="post",
|
||||
name="activities__state_r_b8f1ff_idx",
|
||||
),
|
||||
migrations.RemoveIndex(
|
||||
model_name="postattachment",
|
||||
name="ix_postattachm_state_attempted",
|
||||
),
|
||||
migrations.RemoveIndex(
|
||||
model_name="postattachment",
|
||||
name="ix_postattachm_state_locked",
|
||||
),
|
||||
migrations.RemoveIndex(
|
||||
model_name="postattachment",
|
||||
name="activities__state_r_4e981c_idx",
|
||||
),
|
||||
migrations.RemoveIndex(
|
||||
model_name="postinteraction",
|
||||
name="activities__state_r_981d8c_idx",
|
||||
),
|
||||
migrations.RemoveIndex(
|
||||
model_name="postinteraction",
|
||||
name="ix_postinterac_state_attempted",
|
||||
),
|
||||
migrations.RemoveIndex(
|
||||
model_name="postinteraction",
|
||||
name="ix_postinterac_state_locked",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="emoji",
|
||||
name="state_attempted",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="emoji",
|
||||
name="state_ready",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="fanout",
|
||||
name="state_attempted",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="fanout",
|
||||
name="state_ready",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="hashtag",
|
||||
name="state_attempted",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="hashtag",
|
||||
name="state_ready",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="post",
|
||||
name="state_attempted",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="post",
|
||||
name="state_ready",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="postattachment",
|
||||
name="state_attempted",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="postattachment",
|
||||
name="state_ready",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="postinteraction",
|
||||
name="state_attempted",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="postinteraction",
|
||||
name="state_ready",
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="emoji",
|
||||
name="state_next_attempt",
|
||||
field=models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="fanout",
|
||||
name="state_next_attempt",
|
||||
field=models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="hashtag",
|
||||
name="state_next_attempt",
|
||||
field=models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="post",
|
||||
name="state_next_attempt",
|
||||
field=models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="postattachment",
|
||||
name="state_next_attempt",
|
||||
field=models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="postinteraction",
|
||||
name="state_next_attempt",
|
||||
field=models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="emoji",
|
||||
name="state_locked_until",
|
||||
field=models.DateTimeField(blank=True, db_index=True, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="fanout",
|
||||
name="state_locked_until",
|
||||
field=models.DateTimeField(blank=True, db_index=True, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="hashtag",
|
||||
name="state_locked_until",
|
||||
field=models.DateTimeField(blank=True, db_index=True, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="post",
|
||||
name="state_locked_until",
|
||||
field=models.DateTimeField(blank=True, db_index=True, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="postattachment",
|
||||
name="state_locked_until",
|
||||
field=models.DateTimeField(blank=True, db_index=True, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="postinteraction",
|
||||
name="state_locked_until",
|
||||
field=models.DateTimeField(blank=True, db_index=True, null=True),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="emoji",
|
||||
index=models.Index(
|
||||
fields=["state", "state_next_attempt", "state_locked_until"],
|
||||
name="ix_emoji_state_next",
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="fanout",
|
||||
index=models.Index(
|
||||
fields=["state", "state_next_attempt", "state_locked_until"],
|
||||
name="ix_fanout_state_next",
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="hashtag",
|
||||
index=models.Index(
|
||||
fields=["state", "state_next_attempt", "state_locked_until"],
|
||||
name="ix_hashtag_state_next",
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="post",
|
||||
index=models.Index(
|
||||
fields=["state", "state_next_attempt", "state_locked_until"],
|
||||
name="ix_post_state_next",
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="postattachment",
|
||||
index=models.Index(
|
||||
fields=["state", "state_next_attempt", "state_locked_until"],
|
||||
name="ix_postattachm_state_next",
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="postinteraction",
|
||||
index=models.Index(
|
||||
fields=["state", "state_next_attempt", "state_locked_until"],
|
||||
name="ix_postinterac_state_next",
|
||||
),
|
||||
),
|
||||
]
|
17
activities/migrations/0018_timelineevent_dismissed.py
Normal file
17
activities/migrations/0018_timelineevent_dismissed.py
Normal file
|
@ -0,0 +1,17 @@
|
|||
# Generated by Django 4.2.2 on 2023-07-09 17:25
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("activities", "0017_stator_next_change"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="timelineevent",
|
||||
name="dismissed",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,22 @@
|
|||
# Generated by Django 4.2.3 on 2023-10-30 07:44
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("activities", "0018_timelineevent_dismissed"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="postattachment",
|
||||
name="focal_x",
|
||||
field=models.FloatField(blank=True, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="postattachment",
|
||||
name="focal_y",
|
||||
field=models.FloatField(blank=True, null=True),
|
||||
),
|
||||
]
|
|
@ -1,19 +1,19 @@
|
|||
import mimetypes
|
||||
import re
|
||||
from functools import partial
|
||||
from typing import ClassVar
|
||||
|
||||
import httpx
|
||||
import urlman
|
||||
from asgiref.sync import sync_to_async
|
||||
from cachetools import TTLCache, cached
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.files.base import ContentFile
|
||||
from django.db import models
|
||||
from django.utils.safestring import mark_safe
|
||||
from PIL import Image
|
||||
|
||||
from core.files import get_remote_file
|
||||
from core.html import strip_html
|
||||
from core.html import FediverseHtmlParser
|
||||
from core.ld import format_ld_date
|
||||
from core.models import Config
|
||||
from core.uploads import upload_emoji_namer
|
||||
|
@ -34,23 +34,27 @@ class EmojiStates(StateGraph):
|
|||
outdated.transitions_to(updated)
|
||||
|
||||
@classmethod
|
||||
async def handle_outdated(cls, instance: "Emoji"):
|
||||
def handle_outdated(cls, instance: "Emoji"):
|
||||
"""
|
||||
Fetches remote emoji and uploads to file for local caching
|
||||
"""
|
||||
if instance.remote_url and not instance.file:
|
||||
try:
|
||||
file, mimetype = await get_remote_file(
|
||||
file, mimetype = get_remote_file(
|
||||
instance.remote_url,
|
||||
timeout=settings.SETUP.REMOTE_TIMEOUT,
|
||||
max_size=settings.SETUP.EMOJI_MAX_IMAGE_FILESIZE_KB * 1024,
|
||||
)
|
||||
except httpx.RequestError:
|
||||
return
|
||||
|
||||
if file:
|
||||
if mimetype == "application/octet-stream":
|
||||
mimetype = Image.open(file).get_format_mimetype()
|
||||
|
||||
instance.file = file
|
||||
instance.mimetype = mimetype
|
||||
await sync_to_async(instance.save)()
|
||||
instance.save()
|
||||
|
||||
return cls.updated
|
||||
|
||||
|
@ -81,7 +85,6 @@ class EmojiManager(models.Manager):
|
|||
|
||||
|
||||
class Emoji(StatorModel):
|
||||
|
||||
# Normalized Emoji without the ':'
|
||||
shortcode = models.SlugField(max_length=100, db_index=True)
|
||||
|
||||
|
@ -123,14 +126,21 @@ class Emoji(StatorModel):
|
|||
|
||||
class Meta:
|
||||
unique_together = ("domain", "shortcode")
|
||||
indexes: list = [] # We need this so Stator can add its own
|
||||
|
||||
class urls(urlman.Urls):
|
||||
root = "/admin/emoji/"
|
||||
create = "{root}/create/"
|
||||
edit = "{root}{self.Emoji}/"
|
||||
delete = "{edit}delete/"
|
||||
admin = "/admin/emoji/"
|
||||
admin_create = "{admin}create/"
|
||||
admin_edit = "{admin}{self.pk}/"
|
||||
admin_delete = "{admin}{self.pk}/delete/"
|
||||
admin_enable = "{admin}{self.pk}/enable/"
|
||||
admin_disable = "{admin}{self.pk}/disable/"
|
||||
admin_copy = "{admin}{self.pk}/copy/"
|
||||
|
||||
emoji_regex = re.compile(r"\B:([a-zA-Z0-9(_)-]+):\B")
|
||||
def delete(self, using=None, keep_parents=False):
|
||||
if self.file:
|
||||
self.file.delete()
|
||||
return super().delete(using=using, keep_parents=keep_parents)
|
||||
|
||||
def clean(self):
|
||||
super().clean()
|
||||
|
@ -146,15 +156,18 @@ class Emoji(StatorModel):
|
|||
|
||||
@classmethod
|
||||
@cached(cache=TTLCache(maxsize=1000, ttl=60))
|
||||
def get_by_domain(cls, shortcode, domain: Domain | None) -> "Emoji":
|
||||
def get_by_domain(cls, shortcode, domain: Domain | None) -> "Emoji | None":
|
||||
"""
|
||||
Given an emoji shortcode and optional domain, looks up the single
|
||||
emoji and returns it. Raises Emoji.DoesNotExist if there isn't one.
|
||||
"""
|
||||
if domain is None or domain.local:
|
||||
return cls.objects.get(local=True, shortcode=shortcode)
|
||||
else:
|
||||
return cls.objects.get(domain=domain, shortcode=shortcode)
|
||||
try:
|
||||
if domain is None or domain.local:
|
||||
return cls.objects.get(local=True, shortcode=shortcode)
|
||||
else:
|
||||
return cls.objects.get(domain=domain, shortcode=shortcode)
|
||||
except Emoji.DoesNotExist:
|
||||
return None
|
||||
|
||||
@property
|
||||
def fullcode(self):
|
||||
|
@ -169,8 +182,11 @@ class Emoji(StatorModel):
|
|||
self.public is None and Config.system.emoji_unreviewed_are_public
|
||||
)
|
||||
|
||||
def full_url(self) -> RelativeAbsoluteUrl:
|
||||
if self.is_usable:
|
||||
def full_url_admin(self) -> RelativeAbsoluteUrl:
|
||||
return self.full_url(always_show=True)
|
||||
|
||||
def full_url(self, always_show=False) -> RelativeAbsoluteUrl:
|
||||
if self.is_usable or always_show:
|
||||
if self.file:
|
||||
return AutoAbsoluteUrl(self.file.url)
|
||||
elif self.remote_url:
|
||||
|
@ -187,14 +203,50 @@ class Emoji(StatorModel):
|
|||
)
|
||||
return self.fullcode
|
||||
|
||||
@property
|
||||
def can_copy_local(self):
|
||||
if not hasattr(Emoji, "locals"):
|
||||
Emoji.locals = Emoji.load_locals()
|
||||
return not self.local and self.is_usable and self.shortcode not in Emoji.locals
|
||||
|
||||
def copy_to_local(self, *, save: bool = True):
|
||||
"""
|
||||
Copy this (non-local) Emoji to local for use by Users of this instance. Returns
|
||||
the Emoji instance, or None if the copy failed to happen. Specify save=False to
|
||||
return the object without saving to database (for bulk saving).
|
||||
"""
|
||||
if not self.can_copy_local:
|
||||
return None
|
||||
|
||||
emoji = None
|
||||
if self.file:
|
||||
# new emoji gets its own copy of the file
|
||||
file = ContentFile(self.file.read())
|
||||
file.name = self.file.name
|
||||
emoji = Emoji(
|
||||
shortcode=self.shortcode,
|
||||
domain=None,
|
||||
local=True,
|
||||
mimetype=self.mimetype,
|
||||
file=file,
|
||||
category=self.category,
|
||||
)
|
||||
if save:
|
||||
emoji.save()
|
||||
# add this new one to the locals cache
|
||||
Emoji.locals[self.shortcode] = emoji
|
||||
return emoji
|
||||
|
||||
@classmethod
|
||||
def emojis_from_content(cls, content: str, domain: Domain | None) -> list["Emoji"]:
|
||||
"""
|
||||
Return a parsed and sanitized of emoji found in content without
|
||||
the surrounding ':'.
|
||||
"""
|
||||
emoji_hits = cls.emoji_regex.findall(strip_html(content))
|
||||
emojis = sorted({emoji.lower() for emoji in emoji_hits})
|
||||
emoji_hits = FediverseHtmlParser(
|
||||
content, find_emojis=True, emoji_domain=domain
|
||||
).emojis
|
||||
emojis = sorted({emoji for emoji in emoji_hits})
|
||||
return list(
|
||||
cls.objects.filter(local=(domain is None) or domain.local)
|
||||
.usable(domain)
|
||||
|
@ -229,7 +281,7 @@ class Emoji(StatorModel):
|
|||
# Name could be a direct property, or in a language'd value
|
||||
if "name" in data:
|
||||
name = data["name"]
|
||||
elif "nameMap" in data:
|
||||
elif "nameMap" in data and "und" in data["nameMap"]:
|
||||
name = data["nameMap"]["und"]
|
||||
else:
|
||||
raise ValueError("No name on emoji JSON")
|
||||
|
@ -239,18 +291,41 @@ class Emoji(StatorModel):
|
|||
mimetype = icon.get("mediaType")
|
||||
if not mimetype:
|
||||
mimetype, _ = mimetypes.guess_type(icon["url"])
|
||||
if mimetype is None:
|
||||
raise ValueError("No mimetype on emoji JSON")
|
||||
|
||||
# create
|
||||
shortcode = name.lower().strip(":")
|
||||
shortcode = name.strip(":")
|
||||
category = (icon.get("category") or "")[:100]
|
||||
|
||||
if not domain.local:
|
||||
try:
|
||||
emoji = cls.objects.get(shortcode=shortcode, domain=domain)
|
||||
except cls.DoesNotExist:
|
||||
pass
|
||||
else:
|
||||
# default to previously discovered mimetype if not provided
|
||||
# by the instance to avoid infinite outdated state
|
||||
if mimetype is None:
|
||||
mimetype = emoji.mimetype
|
||||
|
||||
# Domain previously provided this shortcode. Trample in the new emoji
|
||||
if emoji.remote_url != icon["url"] or emoji.mimetype != mimetype:
|
||||
emoji.object_uri = data["id"]
|
||||
emoji.remote_url = icon["url"]
|
||||
emoji.mimetype = mimetype
|
||||
emoji.category = category
|
||||
if emoji.file:
|
||||
emoji.file.delete(save=True)
|
||||
else:
|
||||
emoji.save()
|
||||
emoji.transition_perform("outdated")
|
||||
return emoji
|
||||
|
||||
emoji = cls.objects.create(
|
||||
shortcode=shortcode,
|
||||
domain=None if domain.local else domain,
|
||||
local=domain.local,
|
||||
object_uri=data["id"],
|
||||
mimetype=mimetype,
|
||||
mimetype=mimetype or "application/octet-stream",
|
||||
category=category,
|
||||
remote_url=icon["url"],
|
||||
)
|
||||
|
|
|
@ -1,78 +1,87 @@
|
|||
import httpx
|
||||
from asgiref.sync import sync_to_async
|
||||
from django.db import models
|
||||
|
||||
from activities.models.timeline_event import TimelineEvent
|
||||
from core.ld import canonicalise
|
||||
from stator.models import State, StateField, StateGraph, StatorModel
|
||||
from users.models import FollowStates
|
||||
from users.models import Block, FollowStates
|
||||
|
||||
|
||||
class FanOutStates(StateGraph):
|
||||
new = State(try_interval=600)
|
||||
sent = State()
|
||||
failed = State()
|
||||
sent = State(delete_after=86400)
|
||||
skipped = State(delete_after=86400)
|
||||
failed = State(delete_after=86400)
|
||||
|
||||
new.transitions_to(sent)
|
||||
new.transitions_to(skipped)
|
||||
new.times_out_to(failed, seconds=86400 * 3)
|
||||
|
||||
@classmethod
|
||||
async def handle_new(cls, instance: "FanOut"):
|
||||
def handle_new(cls, instance: "FanOut"):
|
||||
"""
|
||||
Sends the fan-out to the right inbox.
|
||||
"""
|
||||
|
||||
fan_out = await instance.afetch_full()
|
||||
|
||||
# Don't try to fan out to identities that are not fetched yet
|
||||
if not (fan_out.identity.local or fan_out.identity.inbox_uri):
|
||||
if not (instance.identity.local or instance.identity.inbox_uri):
|
||||
return
|
||||
|
||||
match (fan_out.type, fan_out.identity.local):
|
||||
match (instance.type, instance.identity.local):
|
||||
# Handle creating/updating local posts
|
||||
case ((FanOut.Types.post | FanOut.Types.post_edited), True):
|
||||
post = await fan_out.subject_post.afetch_full()
|
||||
post = instance.subject_post
|
||||
# If the author of the post is blocked or muted, skip out
|
||||
if (
|
||||
Block.objects.active()
|
||||
.filter(source=instance.identity, target=post.author)
|
||||
.exists()
|
||||
):
|
||||
return cls.skipped
|
||||
# Make a timeline event directly
|
||||
# If it's a reply, we only add it if we follow at least one
|
||||
# of the people mentioned AND the author, or we're mentioned,
|
||||
# or it's a reply to us or the author
|
||||
add = True
|
||||
mentioned = {identity.id for identity in post.mentions.all()}
|
||||
followed = await sync_to_async(set)(
|
||||
fan_out.identity.outbound_follows.filter(
|
||||
state__in=FollowStates.group_active()
|
||||
).values_list("target_id", flat=True)
|
||||
)
|
||||
if post.in_reply_to:
|
||||
followed = set(
|
||||
instance.identity.outbound_follows.filter(
|
||||
state__in=FollowStates.group_active()
|
||||
).values_list("target_id", flat=True)
|
||||
)
|
||||
interested_in = followed.union(
|
||||
{post.author_id, fan_out.identity_id}
|
||||
{post.author_id, instance.identity_id}
|
||||
)
|
||||
add = (post.author_id in followed) and (
|
||||
bool(mentioned.intersection(interested_in))
|
||||
)
|
||||
if add:
|
||||
await sync_to_async(TimelineEvent.add_post)(
|
||||
identity=fan_out.identity,
|
||||
TimelineEvent.add_post(
|
||||
identity=instance.identity,
|
||||
post=post,
|
||||
)
|
||||
# We might have been mentioned
|
||||
if (
|
||||
fan_out.identity.id in mentioned
|
||||
and fan_out.identity_id != post.author_id
|
||||
instance.identity.id in mentioned
|
||||
and instance.identity_id != post.author_id
|
||||
):
|
||||
await sync_to_async(TimelineEvent.add_mentioned)(
|
||||
identity=fan_out.identity,
|
||||
TimelineEvent.add_mentioned(
|
||||
identity=instance.identity,
|
||||
post=post,
|
||||
)
|
||||
|
||||
# Handle sending remote posts create
|
||||
case (FanOut.Types.post, False):
|
||||
post = await fan_out.subject_post.afetch_full()
|
||||
post = instance.subject_post
|
||||
# Sign it and send it
|
||||
try:
|
||||
await post.author.signed_request(
|
||||
post.author.signed_request(
|
||||
method="post",
|
||||
uri=fan_out.identity.inbox_uri,
|
||||
uri=(
|
||||
instance.identity.shared_inbox_uri
|
||||
or instance.identity.inbox_uri
|
||||
),
|
||||
body=canonicalise(post.to_create_ap()),
|
||||
)
|
||||
except httpx.RequestError:
|
||||
|
@ -80,12 +89,15 @@ class FanOutStates(StateGraph):
|
|||
|
||||
# Handle sending remote posts update
|
||||
case (FanOut.Types.post_edited, False):
|
||||
post = await fan_out.subject_post.afetch_full()
|
||||
post = instance.subject_post
|
||||
# Sign it and send it
|
||||
try:
|
||||
await post.author.signed_request(
|
||||
post.author.signed_request(
|
||||
method="post",
|
||||
uri=fan_out.identity.inbox_uri,
|
||||
uri=(
|
||||
instance.identity.shared_inbox_uri
|
||||
or instance.identity.inbox_uri
|
||||
),
|
||||
body=canonicalise(post.to_update_ap()),
|
||||
)
|
||||
except httpx.RequestError:
|
||||
|
@ -93,22 +105,25 @@ class FanOutStates(StateGraph):
|
|||
|
||||
# Handle deleting local posts
|
||||
case (FanOut.Types.post_deleted, True):
|
||||
post = await fan_out.subject_post.afetch_full()
|
||||
if fan_out.identity.local:
|
||||
post = instance.subject_post
|
||||
if instance.identity.local:
|
||||
# Remove all timeline events mentioning it
|
||||
await TimelineEvent.objects.filter(
|
||||
identity=fan_out.identity,
|
||||
TimelineEvent.objects.filter(
|
||||
identity=instance.identity,
|
||||
subject_post=post,
|
||||
).adelete()
|
||||
).delete()
|
||||
|
||||
# Handle sending remote post deletes
|
||||
case (FanOut.Types.post_deleted, False):
|
||||
post = await fan_out.subject_post.afetch_full()
|
||||
post = instance.subject_post
|
||||
# Send it to the remote inbox
|
||||
try:
|
||||
await post.author.signed_request(
|
||||
post.author.signed_request(
|
||||
method="post",
|
||||
uri=fan_out.identity.inbox_uri,
|
||||
uri=(
|
||||
instance.identity.shared_inbox_uri
|
||||
or instance.identity.inbox_uri
|
||||
),
|
||||
body=canonicalise(post.to_delete_ap()),
|
||||
)
|
||||
except httpx.RequestError:
|
||||
|
@ -116,82 +131,137 @@ class FanOutStates(StateGraph):
|
|||
|
||||
# Handle local boosts/likes
|
||||
case (FanOut.Types.interaction, True):
|
||||
interaction = await fan_out.subject_post_interaction.afetch_full()
|
||||
interaction = instance.subject_post_interaction
|
||||
# If the author of the interaction is blocked or their notifications
|
||||
# are muted, skip out
|
||||
if (
|
||||
Block.objects.active()
|
||||
.filter(
|
||||
models.Q(mute=False) | models.Q(include_notifications=True),
|
||||
source=instance.identity,
|
||||
target=interaction.identity,
|
||||
)
|
||||
.exists()
|
||||
):
|
||||
return cls.skipped
|
||||
# If blocked/muted the underlying post author, skip out
|
||||
if (
|
||||
Block.objects.active()
|
||||
.filter(
|
||||
source=instance.identity,
|
||||
target_id=interaction.post.author_id,
|
||||
)
|
||||
.exists()
|
||||
):
|
||||
return cls.skipped
|
||||
# Make a timeline event directly
|
||||
await sync_to_async(TimelineEvent.add_post_interaction)(
|
||||
identity=fan_out.identity,
|
||||
TimelineEvent.add_post_interaction(
|
||||
identity=instance.identity,
|
||||
interaction=interaction,
|
||||
)
|
||||
|
||||
# Handle sending remote boosts/likes
|
||||
# Handle sending remote boosts/likes/votes/pins
|
||||
case (FanOut.Types.interaction, False):
|
||||
interaction = await fan_out.subject_post_interaction.afetch_full()
|
||||
interaction = instance.subject_post_interaction
|
||||
# Send it to the remote inbox
|
||||
try:
|
||||
await interaction.identity.signed_request(
|
||||
if interaction.type == interaction.Types.vote:
|
||||
body = interaction.to_create_ap()
|
||||
elif interaction.type == interaction.Types.pin:
|
||||
body = interaction.to_add_ap()
|
||||
else:
|
||||
body = interaction.to_ap()
|
||||
interaction.identity.signed_request(
|
||||
method="post",
|
||||
uri=fan_out.identity.inbox_uri,
|
||||
body=canonicalise(interaction.to_ap()),
|
||||
uri=(
|
||||
instance.identity.shared_inbox_uri
|
||||
or instance.identity.inbox_uri
|
||||
),
|
||||
body=canonicalise(body),
|
||||
)
|
||||
except httpx.RequestError:
|
||||
return
|
||||
|
||||
# Handle undoing local boosts/likes
|
||||
case (FanOut.Types.undo_interaction, True): # noqa:F841
|
||||
interaction = await fan_out.subject_post_interaction.afetch_full()
|
||||
interaction = instance.subject_post_interaction
|
||||
|
||||
# Delete any local timeline events
|
||||
await sync_to_async(TimelineEvent.delete_post_interaction)(
|
||||
identity=fan_out.identity,
|
||||
TimelineEvent.delete_post_interaction(
|
||||
identity=instance.identity,
|
||||
interaction=interaction,
|
||||
)
|
||||
|
||||
# Handle sending remote undoing boosts/likes
|
||||
# Handle sending remote undoing boosts/likes/pins
|
||||
case (FanOut.Types.undo_interaction, False): # noqa:F841
|
||||
interaction = await fan_out.subject_post_interaction.afetch_full()
|
||||
interaction = instance.subject_post_interaction
|
||||
# Send an undo to the remote inbox
|
||||
try:
|
||||
await interaction.identity.signed_request(
|
||||
if interaction.type == interaction.Types.pin:
|
||||
body = interaction.to_remove_ap()
|
||||
else:
|
||||
body = interaction.to_undo_ap()
|
||||
interaction.identity.signed_request(
|
||||
method="post",
|
||||
uri=fan_out.identity.inbox_uri,
|
||||
body=canonicalise(interaction.to_undo_ap()),
|
||||
uri=(
|
||||
instance.identity.shared_inbox_uri
|
||||
or instance.identity.inbox_uri
|
||||
),
|
||||
body=canonicalise(body),
|
||||
)
|
||||
except httpx.RequestError:
|
||||
return
|
||||
|
||||
# Handle sending identity edited to remote
|
||||
case (FanOut.Types.identity_edited, False):
|
||||
identity = await fan_out.subject_identity.afetch_full()
|
||||
identity = instance.subject_identity
|
||||
try:
|
||||
await identity.signed_request(
|
||||
identity.signed_request(
|
||||
method="post",
|
||||
uri=fan_out.identity.inbox_uri,
|
||||
body=canonicalise(fan_out.subject_identity.to_update_ap()),
|
||||
uri=(
|
||||
instance.identity.shared_inbox_uri
|
||||
or instance.identity.inbox_uri
|
||||
),
|
||||
body=canonicalise(instance.subject_identity.to_update_ap()),
|
||||
)
|
||||
except httpx.RequestError:
|
||||
return
|
||||
|
||||
# Handle sending identity deleted to remote
|
||||
case (FanOut.Types.identity_deleted, False):
|
||||
identity = await fan_out.subject_identity.afetch_full()
|
||||
identity = instance.subject_identity
|
||||
try:
|
||||
await identity.signed_request(
|
||||
identity.signed_request(
|
||||
method="post",
|
||||
uri=fan_out.identity.inbox_uri,
|
||||
body=canonicalise(fan_out.subject_identity.to_delete_ap()),
|
||||
uri=(
|
||||
instance.identity.shared_inbox_uri
|
||||
or instance.identity.inbox_uri
|
||||
),
|
||||
body=canonicalise(instance.subject_identity.to_delete_ap()),
|
||||
)
|
||||
except httpx.RequestError:
|
||||
return
|
||||
|
||||
# Handle sending identity moved to remote
|
||||
case (FanOut.Types.identity_moved, False):
|
||||
raise NotImplementedError()
|
||||
|
||||
# Sending identity edited/deleted to local is a no-op
|
||||
case (FanOut.Types.identity_edited, True):
|
||||
pass
|
||||
case (FanOut.Types.identity_deleted, True):
|
||||
pass
|
||||
|
||||
# Created identities make a timeline event
|
||||
case (FanOut.Types.identity_created, True):
|
||||
TimelineEvent.add_identity_created(
|
||||
identity=instance.identity,
|
||||
new_identity=instance.subject_identity,
|
||||
)
|
||||
|
||||
case _:
|
||||
raise ValueError(
|
||||
f"Cannot fan out with type {fan_out.type} local={fan_out.identity.local}"
|
||||
f"Cannot fan out with type {instance.type} local={instance.identity.local}"
|
||||
)
|
||||
|
||||
return cls.sent
|
||||
|
@ -210,10 +280,15 @@ class FanOut(StatorModel):
|
|||
undo_interaction = "undo_interaction"
|
||||
identity_edited = "identity_edited"
|
||||
identity_deleted = "identity_deleted"
|
||||
identity_created = "identity_created"
|
||||
identity_moved = "identity_moved"
|
||||
|
||||
state = StateField(FanOutStates)
|
||||
|
||||
# The user this event is targeted at
|
||||
# We always need this, but if there is a shared inbox URL on the user
|
||||
# we'll deliver to that and won't have fanouts for anyone else with the
|
||||
# same one.
|
||||
identity = models.ForeignKey(
|
||||
"users.Identity",
|
||||
on_delete=models.CASCADE,
|
||||
|
@ -248,23 +323,3 @@ class FanOut(StatorModel):
|
|||
|
||||
created = models.DateTimeField(auto_now_add=True)
|
||||
updated = models.DateTimeField(auto_now=True)
|
||||
|
||||
### Async helpers ###
|
||||
|
||||
async def afetch_full(self):
|
||||
"""
|
||||
Returns a version of the object with all relations pre-loaded
|
||||
"""
|
||||
return (
|
||||
await FanOut.objects.select_related(
|
||||
"identity",
|
||||
"subject_post",
|
||||
"subject_post_interaction",
|
||||
"subject_identity",
|
||||
"subject_identity__domain",
|
||||
)
|
||||
.prefetch_related(
|
||||
"subject_post__emojis",
|
||||
)
|
||||
.aget(pk=self.pk)
|
||||
)
|
||||
|
|
|
@ -2,11 +2,9 @@ import re
|
|||
from datetime import date, timedelta
|
||||
|
||||
import urlman
|
||||
from asgiref.sync import sync_to_async
|
||||
from django.db import models
|
||||
from django.utils import timezone
|
||||
|
||||
from core.html import strip_html
|
||||
from core.models import Config
|
||||
from stator.models import State, StateField, StateGraph, StatorModel
|
||||
|
||||
|
@ -19,31 +17,27 @@ class HashtagStates(StateGraph):
|
|||
updated.transitions_to(outdated)
|
||||
|
||||
@classmethod
|
||||
async def handle_outdated(cls, instance: "Hashtag"):
|
||||
def handle_outdated(cls, instance: "Hashtag"):
|
||||
"""
|
||||
Computes the stats and other things for a Hashtag
|
||||
"""
|
||||
from time import time
|
||||
|
||||
from .post import Post
|
||||
|
||||
start = time()
|
||||
|
||||
posts_query = Post.objects.local_public().tagged_with(instance)
|
||||
total = await posts_query.acount()
|
||||
total = posts_query.count()
|
||||
|
||||
today = timezone.now().date()
|
||||
total_today = await posts_query.filter(
|
||||
total_today = posts_query.filter(
|
||||
created__gte=today,
|
||||
created__lte=today + timedelta(days=1),
|
||||
).acount()
|
||||
total_month = await posts_query.filter(
|
||||
).count()
|
||||
total_month = posts_query.filter(
|
||||
created__year=today.year,
|
||||
created__month=today.month,
|
||||
).acount()
|
||||
total_year = await posts_query.filter(
|
||||
).count()
|
||||
total_year = posts_query.filter(
|
||||
created__year=today.year,
|
||||
).acount()
|
||||
).count()
|
||||
if total:
|
||||
if not instance.stats:
|
||||
instance.stats = {}
|
||||
|
@ -56,9 +50,8 @@ class HashtagStates(StateGraph):
|
|||
}
|
||||
)
|
||||
instance.stats_updated = timezone.now()
|
||||
await sync_to_async(instance.save)()
|
||||
instance.save()
|
||||
|
||||
print(f"Updated hashtag {instance.hashtag} in {time() - start:.5f} seconds")
|
||||
return cls.updated
|
||||
|
||||
|
||||
|
@ -87,6 +80,7 @@ class HashtagManager(models.Manager):
|
|||
|
||||
|
||||
class Hashtag(StatorModel):
|
||||
MAXIMUM_LENGTH = 100
|
||||
|
||||
# Normalized hashtag without the '#'
|
||||
hashtag = models.SlugField(primary_key=True, max_length=100)
|
||||
|
@ -115,8 +109,12 @@ class Hashtag(StatorModel):
|
|||
|
||||
class urls(urlman.Urls):
|
||||
view = "/tags/{self.hashtag}/"
|
||||
follow = "/tags/{self.hashtag}/follow/"
|
||||
unfollow = "/tags/{self.hashtag}/unfollow/"
|
||||
admin = "/admin/hashtags/"
|
||||
admin_edit = "{admin}{self.hashtag}/"
|
||||
admin_enable = "{admin_edit}enable/"
|
||||
admin_disable = "{admin_edit}disable/"
|
||||
timeline = "/tags/{self.hashtag}/"
|
||||
|
||||
hashtag_regex = re.compile(r"\B#([a-zA-Z0-9(_)]+\b)(?!;)")
|
||||
|
@ -165,19 +163,14 @@ class Hashtag(StatorModel):
|
|||
results[date(year, month, day)] = val
|
||||
return dict(sorted(results.items(), reverse=True)[:num])
|
||||
|
||||
@classmethod
|
||||
def hashtags_from_content(cls, content) -> list[str]:
|
||||
"""
|
||||
Return a parsed and sanitized of hashtags found in content without
|
||||
leading '#'.
|
||||
"""
|
||||
hashtag_hits = cls.hashtag_regex.findall(strip_html(content))
|
||||
hashtags = sorted({tag.lower() for tag in hashtag_hits})
|
||||
return list(hashtags)
|
||||
|
||||
def to_mastodon_json(self):
|
||||
return {
|
||||
def to_mastodon_json(self, following: bool | None = None):
|
||||
value = {
|
||||
"name": self.hashtag,
|
||||
"url": self.urls.view.full(),
|
||||
"url": self.urls.view.full(), # type: ignore
|
||||
"history": [],
|
||||
}
|
||||
|
||||
if following is not None:
|
||||
value["following"] = following
|
||||
|
||||
return value
|
||||
|
|
|
@ -1,16 +1,23 @@
|
|||
import re
|
||||
import datetime
|
||||
import json
|
||||
import logging
|
||||
import mimetypes
|
||||
import ssl
|
||||
from collections.abc import Iterable
|
||||
from typing import Optional
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import httpx
|
||||
import urlman
|
||||
from asgiref.sync import async_to_sync, sync_to_async
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.indexes import GinIndex
|
||||
from django.contrib.postgres.search import SearchVector
|
||||
from django.db import models, transaction
|
||||
from django.db.utils import IntegrityError
|
||||
from django.template import loader
|
||||
from django.template.defaultfilters import linebreaks_filter
|
||||
from django.utils import text, timezone
|
||||
from django.utils import timezone
|
||||
from pyld.jsonld import JsonLdError
|
||||
|
||||
from activities.models.emoji import Emoji
|
||||
from activities.models.fan_out import FanOut
|
||||
|
@ -19,8 +26,10 @@ from activities.models.post_types import (
|
|||
PostTypeData,
|
||||
PostTypeDataDecoder,
|
||||
PostTypeDataEncoder,
|
||||
QuestionData,
|
||||
)
|
||||
from core.html import ContentRenderer, strip_html
|
||||
from core.exceptions import ActivityPubFormatError
|
||||
from core.html import ContentRenderer, FediverseHtmlParser
|
||||
from core.ld import (
|
||||
canonicalise,
|
||||
format_ld_date,
|
||||
|
@ -28,23 +37,29 @@ from core.ld import (
|
|||
get_value_or_map,
|
||||
parse_ld_date,
|
||||
)
|
||||
from core.snowflake import Snowflake
|
||||
from stator.exceptions import TryAgainLater
|
||||
from stator.models import State, StateField, StateGraph, StatorModel
|
||||
from users.models.follow import FollowStates
|
||||
from users.models.hashtag_follow import HashtagFollow
|
||||
from users.models.identity import Identity, IdentityStates
|
||||
from users.models.inbox_message import InboxMessage
|
||||
from users.models.system_actor import SystemActor
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PostStates(StateGraph):
|
||||
new = State(try_interval=300)
|
||||
fanned_out = State(externally_progressed=True)
|
||||
deleted = State(try_interval=300)
|
||||
deleted_fanned_out = State()
|
||||
deleted_fanned_out = State(delete_after=86400)
|
||||
|
||||
edited = State(try_interval=300)
|
||||
edited_fanned_out = State(externally_progressed=True)
|
||||
|
||||
new.transitions_to(fanned_out)
|
||||
fanned_out.transitions_to(deleted_fanned_out)
|
||||
fanned_out.transitions_to(deleted)
|
||||
fanned_out.transitions_to(edited)
|
||||
|
||||
|
@ -54,42 +69,66 @@ class PostStates(StateGraph):
|
|||
edited_fanned_out.transitions_to(deleted)
|
||||
|
||||
@classmethod
|
||||
async def targets_fan_out(cls, post: "Post", type_: str) -> None:
|
||||
def targets_fan_out(cls, post: "Post", type_: str) -> None:
|
||||
# Fan out to each target
|
||||
for follow in await post.aget_targets():
|
||||
await FanOut.objects.acreate(
|
||||
for follow in post.get_targets():
|
||||
FanOut.objects.create(
|
||||
identity=follow,
|
||||
type=type_,
|
||||
subject_post=post,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
async def handle_new(cls, instance: "Post"):
|
||||
def handle_new(cls, instance: "Post"):
|
||||
"""
|
||||
Creates all needed fan-out objects for a new Post.
|
||||
"""
|
||||
post = await instance.afetch_full()
|
||||
await cls.targets_fan_out(post, FanOut.Types.post)
|
||||
await post.ensure_hashtags()
|
||||
# Only fan out if the post was published in the last day or it's local
|
||||
# (we don't want to fan out anything older that that which is remote)
|
||||
if instance.local or (timezone.now() - instance.published) < datetime.timedelta(
|
||||
days=1
|
||||
):
|
||||
cls.targets_fan_out(instance, FanOut.Types.post)
|
||||
instance.ensure_hashtags()
|
||||
return cls.fanned_out
|
||||
|
||||
@classmethod
|
||||
async def handle_deleted(cls, instance: "Post"):
|
||||
def handle_fanned_out(cls, instance: "Post"):
|
||||
"""
|
||||
Creates all needed fan-out objects needed to delete a Post.
|
||||
For remote posts, sees if we can delete them every so often.
|
||||
"""
|
||||
post = await instance.afetch_full()
|
||||
await cls.targets_fan_out(post, FanOut.Types.post_deleted)
|
||||
# Skip all of this if the horizon is zero
|
||||
if settings.SETUP.REMOTE_PRUNE_HORIZON <= 0:
|
||||
return
|
||||
# To be a candidate for deletion, a post must be remote and old enough
|
||||
if instance.local:
|
||||
return
|
||||
if instance.created > timezone.now() - datetime.timedelta(
|
||||
days=settings.SETUP.REMOTE_PRUNE_HORIZON
|
||||
):
|
||||
return
|
||||
# It must have no local interactions
|
||||
if instance.interactions.filter(identity__local=True).exists():
|
||||
return
|
||||
# OK, delete it!
|
||||
instance.delete()
|
||||
return cls.deleted_fanned_out
|
||||
|
||||
@classmethod
|
||||
async def handle_edited(cls, instance: "Post"):
|
||||
def handle_deleted(cls, instance: "Post"):
|
||||
"""
|
||||
Creates all needed fan-out objects needed to delete a Post.
|
||||
"""
|
||||
cls.targets_fan_out(instance, FanOut.Types.post_deleted)
|
||||
return cls.deleted_fanned_out
|
||||
|
||||
@classmethod
|
||||
def handle_edited(cls, instance: "Post"):
|
||||
"""
|
||||
Creates all needed fan-out objects for an edited Post.
|
||||
"""
|
||||
post = await instance.afetch_full()
|
||||
await cls.targets_fan_out(post, FanOut.Types.post_edited)
|
||||
await post.ensure_hashtags()
|
||||
cls.targets_fan_out(instance, FanOut.Types.post_edited)
|
||||
instance.ensure_hashtags()
|
||||
return cls.edited_fanned_out
|
||||
|
||||
|
||||
|
@ -135,7 +174,9 @@ class PostQuerySet(models.QuerySet):
|
|||
return query.filter(in_reply_to__isnull=True)
|
||||
return query
|
||||
|
||||
def visible_to(self, identity, include_replies: bool = False):
|
||||
def visible_to(self, identity: Identity | None, include_replies: bool = False):
|
||||
if identity is None:
|
||||
return self.unlisted(include_replies=include_replies)
|
||||
query = self.filter(
|
||||
models.Q(
|
||||
visibility__in=[
|
||||
|
@ -149,9 +190,9 @@ class PostQuerySet(models.QuerySet):
|
|||
author__inbound_follows__source=identity,
|
||||
)
|
||||
| models.Q(
|
||||
visibility=Post.Visibilities.mentioned,
|
||||
mentions=identity,
|
||||
)
|
||||
| models.Q(author=identity)
|
||||
).distinct()
|
||||
if not include_replies:
|
||||
return query.filter(in_reply_to__isnull=True)
|
||||
|
@ -210,6 +251,8 @@ class Post(StatorModel):
|
|||
question = "Question"
|
||||
video = "Video"
|
||||
|
||||
id = models.BigIntegerField(primary_key=True, default=Snowflake.generate_post)
|
||||
|
||||
# The author (attributedTo) of the post
|
||||
author = models.ForeignKey(
|
||||
"users.Identity",
|
||||
|
@ -254,7 +297,7 @@ class Post(StatorModel):
|
|||
|
||||
# The Post it is replying to as an AP ID URI
|
||||
# (as otherwise we'd have to pull entire threads to use IDs)
|
||||
in_reply_to = models.CharField(max_length=500, blank=True, null=True)
|
||||
in_reply_to = models.CharField(max_length=500, blank=True, null=True, db_index=True)
|
||||
|
||||
# The identities the post is directly to (who can see it if not public)
|
||||
to = models.ManyToManyField(
|
||||
|
@ -279,6 +322,9 @@ class Post(StatorModel):
|
|||
blank=True,
|
||||
)
|
||||
|
||||
# Like/Boost/etc counts
|
||||
stats = models.JSONField(blank=True, null=True)
|
||||
|
||||
# When the post was originally created (as opposed to when we received it)
|
||||
published = models.DateTimeField(default=timezone.now)
|
||||
|
||||
|
@ -293,6 +339,18 @@ class Post(StatorModel):
|
|||
class Meta:
|
||||
indexes = [
|
||||
GinIndex(fields=["hashtags"], name="hashtags_gin"),
|
||||
GinIndex(
|
||||
SearchVector("content", config="english"),
|
||||
name="content_vector_gin",
|
||||
),
|
||||
models.Index(
|
||||
fields=["visibility", "local", "published"],
|
||||
name="ix_post_local_public_published",
|
||||
),
|
||||
models.Index(
|
||||
fields=["visibility", "local", "created"],
|
||||
name="ix_post_local_public_created",
|
||||
),
|
||||
]
|
||||
|
||||
class urls(urlman.Urls):
|
||||
|
@ -302,6 +360,8 @@ class Post(StatorModel):
|
|||
action_unlike = "{view}unlike/"
|
||||
action_boost = "{view}boost/"
|
||||
action_unboost = "{view}unboost/"
|
||||
action_bookmark = "{view}bookmark/"
|
||||
action_unbookmark = "{view}unbookmark/"
|
||||
action_delete = "{view}delete/"
|
||||
action_edit = "{view}edit/"
|
||||
action_report = "{view}report/"
|
||||
|
@ -342,25 +402,23 @@ class Post(StatorModel):
|
|||
.first()
|
||||
)
|
||||
|
||||
ain_reply_to_post = sync_to_async(in_reply_to_post)
|
||||
|
||||
### Content cleanup and extraction ###
|
||||
def clean_type_data(self, value):
|
||||
PostTypeData.parse_obj(value)
|
||||
|
||||
mention_regex = re.compile(
|
||||
r"(^|[^\w\d\-_/])@([\w\d\-_]+(?:@[\w\d\-_\.]+[\w\d\-_]+)?)"
|
||||
)
|
||||
|
||||
def _safe_content_note(self, *, local: bool = True):
|
||||
return ContentRenderer(local=local).render_post(self.content, self)
|
||||
|
||||
# def _safe_content_question(self, *, local: bool = True):
|
||||
# context = {
|
||||
# "post": self,
|
||||
# "typed_data": PostTypeData(self.type_data),
|
||||
# }
|
||||
# return loader.render_to_string("activities/_type_question.html", context)
|
||||
def _safe_content_question(self, *, local: bool = True):
|
||||
if local:
|
||||
context = {
|
||||
"post": self,
|
||||
"sanitized_content": self._safe_content_note(local=local),
|
||||
"local_display": local,
|
||||
}
|
||||
return loader.render_to_string("activities/_type_question.html", context)
|
||||
else:
|
||||
return ContentRenderer(local=local).render_post(self.content, self)
|
||||
|
||||
def _safe_content_typed(self, *, local: bool = True):
|
||||
context = {
|
||||
|
@ -402,19 +460,18 @@ class Post(StatorModel):
|
|||
"""
|
||||
if not self.summary:
|
||||
return ""
|
||||
return "summary-" + text.slugify(self.summary, allow_unicode=True)
|
||||
return "summary-{self.id}"
|
||||
|
||||
### Async helpers ###
|
||||
|
||||
async def afetch_full(self) -> "Post":
|
||||
@property
|
||||
def stats_with_defaults(self):
|
||||
"""
|
||||
Returns a version of the object with all relations pre-loaded
|
||||
Returns the stats dict with counts of likes/etc. in it
|
||||
"""
|
||||
return (
|
||||
await Post.objects.select_related("author", "author__domain")
|
||||
.prefetch_related("mentions", "mentions__domain", "attachments", "emojis")
|
||||
.aget(pk=self.pk)
|
||||
)
|
||||
return {
|
||||
"likes": self.stats.get("likes", 0) if self.stats else 0,
|
||||
"boosts": self.stats.get("boosts", 0) if self.stats else 0,
|
||||
"replies": self.stats.get("replies", 0) if self.stats else 0,
|
||||
}
|
||||
|
||||
### Local creation/editing ###
|
||||
|
||||
|
@ -428,6 +485,7 @@ class Post(StatorModel):
|
|||
visibility: int = Visibilities.public,
|
||||
reply_to: Optional["Post"] = None,
|
||||
attachments: list | None = None,
|
||||
question: dict | None = None,
|
||||
) -> "Post":
|
||||
with transaction.atomic():
|
||||
# Find mentions in this post
|
||||
|
@ -437,12 +495,15 @@ class Post(StatorModel):
|
|||
# Maintain local-only for replies
|
||||
if reply_to.visibility == reply_to.Visibilities.local_only:
|
||||
visibility = reply_to.Visibilities.local_only
|
||||
# Find hashtags in this post
|
||||
hashtags = Hashtag.hashtags_from_content(content) or None
|
||||
# Find emoji in this post
|
||||
emojis = Emoji.emojis_from_content(content, None)
|
||||
# Strip all HTML and apply linebreaks filter
|
||||
content = linebreaks_filter(strip_html(content))
|
||||
# Strip all unwanted HTML and apply linebreaks filter, grabbing hashtags on the way
|
||||
parser = FediverseHtmlParser(linebreaks_filter(content), find_hashtags=True)
|
||||
content = parser.html
|
||||
hashtags = (
|
||||
sorted([tag[: Hashtag.MAXIMUM_LENGTH] for tag in parser.hashtags])
|
||||
or None
|
||||
)
|
||||
# Make the Post object
|
||||
post = cls.objects.create(
|
||||
author=author,
|
||||
|
@ -460,34 +521,57 @@ class Post(StatorModel):
|
|||
post.emojis.set(emojis)
|
||||
if attachments:
|
||||
post.attachments.set(attachments)
|
||||
if question:
|
||||
post.type = question["type"]
|
||||
post.type_data = PostTypeData(__root__=question).__root__
|
||||
post.save()
|
||||
# Recalculate parent stats for replies
|
||||
if reply_to:
|
||||
reply_to.calculate_stats()
|
||||
return post
|
||||
|
||||
def edit_local(
|
||||
self,
|
||||
content: str,
|
||||
summary: str | None = None,
|
||||
sensitive: bool | None = None,
|
||||
visibility: int = Visibilities.public,
|
||||
attachments: list | None = None,
|
||||
attachment_attributes: list | None = None,
|
||||
):
|
||||
with transaction.atomic():
|
||||
# Strip all HTML and apply linebreaks filter
|
||||
self.content = linebreaks_filter(strip_html(content))
|
||||
parser = FediverseHtmlParser(linebreaks_filter(content), find_hashtags=True)
|
||||
self.content = parser.html
|
||||
self.hashtags = (
|
||||
sorted([tag[: Hashtag.MAXIMUM_LENGTH] for tag in parser.hashtags])
|
||||
or None
|
||||
)
|
||||
self.summary = summary or None
|
||||
self.sensitive = bool(summary)
|
||||
self.sensitive = bool(summary) if sensitive is None else sensitive
|
||||
self.visibility = visibility
|
||||
self.edited = timezone.now()
|
||||
self.hashtags = Hashtag.hashtags_from_content(content) or None
|
||||
self.mentions.set(self.mentions_from_content(content, self.author))
|
||||
self.emojis.set(Emoji.emojis_from_content(content, None))
|
||||
self.attachments.set(attachments or [])
|
||||
self.save()
|
||||
|
||||
for attrs in attachment_attributes or []:
|
||||
attachment = next(
|
||||
(a for a in attachments or [] if str(a.id) == attrs.id), None
|
||||
)
|
||||
if attachment is None:
|
||||
continue
|
||||
attachment.name = attrs.description
|
||||
attachment.save()
|
||||
|
||||
self.transition_perform(PostStates.edited)
|
||||
|
||||
@classmethod
|
||||
def mentions_from_content(cls, content, author) -> set[Identity]:
|
||||
mention_hits = cls.mention_regex.findall(content)
|
||||
mention_hits = FediverseHtmlParser(content, find_mentions=True).mentions
|
||||
mentions = set()
|
||||
for precursor, handle in mention_hits:
|
||||
for handle in mention_hits:
|
||||
handle = handle.lower()
|
||||
if "@" in handle:
|
||||
username, domain = handle.split("@", 1)
|
||||
|
@ -499,11 +583,11 @@ class Post(StatorModel):
|
|||
domain=domain,
|
||||
fetch=True,
|
||||
)
|
||||
if identity is not None:
|
||||
if identity is not None and not identity.deleted:
|
||||
mentions.add(identity)
|
||||
return mentions
|
||||
|
||||
async def ensure_hashtags(self) -> None:
|
||||
def ensure_hashtags(self) -> None:
|
||||
"""
|
||||
Ensure any of the already parsed hashtags from this Post
|
||||
have a corresponding Hashtag record.
|
||||
|
@ -511,10 +595,54 @@ class Post(StatorModel):
|
|||
# Ensure hashtags
|
||||
if self.hashtags:
|
||||
for hashtag in self.hashtags:
|
||||
tag, _ = await Hashtag.objects.aget_or_create(
|
||||
hashtag=hashtag,
|
||||
tag, _ = Hashtag.objects.get_or_create(
|
||||
hashtag=hashtag[: Hashtag.MAXIMUM_LENGTH],
|
||||
)
|
||||
await tag.atransition_perform(HashtagStates.outdated)
|
||||
tag.transition_perform(HashtagStates.outdated)
|
||||
|
||||
def calculate_stats(self, save=True):
|
||||
"""
|
||||
Recalculates our stats dict
|
||||
"""
|
||||
from activities.models import PostInteraction, PostInteractionStates
|
||||
|
||||
self.stats = {
|
||||
"likes": self.interactions.filter(
|
||||
type=PostInteraction.Types.like,
|
||||
state__in=PostInteractionStates.group_active(),
|
||||
).count(),
|
||||
"boosts": self.interactions.filter(
|
||||
type=PostInteraction.Types.boost,
|
||||
state__in=PostInteractionStates.group_active(),
|
||||
).count(),
|
||||
"replies": Post.objects.filter(in_reply_to=self.object_uri).count(),
|
||||
}
|
||||
if save:
|
||||
self.save()
|
||||
|
||||
def calculate_type_data(self, save=True):
|
||||
"""
|
||||
Recalculate type_data (used mostly for poll votes)
|
||||
"""
|
||||
from activities.models import PostInteraction
|
||||
|
||||
if self.local and isinstance(self.type_data, QuestionData):
|
||||
self.type_data.voter_count = (
|
||||
self.interactions.filter(
|
||||
type=PostInteraction.Types.vote,
|
||||
)
|
||||
.values("identity")
|
||||
.distinct()
|
||||
.count()
|
||||
)
|
||||
|
||||
for option in self.type_data.options:
|
||||
option.votes = self.interactions.filter(
|
||||
type=PostInteraction.Types.vote,
|
||||
value=option.name,
|
||||
).count()
|
||||
if save:
|
||||
self.save()
|
||||
|
||||
### ActivityPub (outbound) ###
|
||||
|
||||
|
@ -522,6 +650,7 @@ class Post(StatorModel):
|
|||
"""
|
||||
Returns the AP JSON for this object
|
||||
"""
|
||||
self.author.ensure_uris()
|
||||
value = {
|
||||
"to": [],
|
||||
"cc": [],
|
||||
|
@ -554,11 +683,14 @@ class Post(StatorModel):
|
|||
if self.edited:
|
||||
value["updated"] = format_ld_date(self.edited)
|
||||
# Targeting
|
||||
# TODO: Add followers object
|
||||
if self.visibility == self.Visibilities.public:
|
||||
value["to"].append("Public")
|
||||
value["to"].append("as:Public")
|
||||
elif self.visibility == self.Visibilities.unlisted:
|
||||
value["cc"].append("Public")
|
||||
value["cc"].append("as:Public")
|
||||
elif (
|
||||
self.visibility == self.Visibilities.followers and self.author.followers_uri
|
||||
):
|
||||
value["to"].append(self.author.followers_uri)
|
||||
# Mentions
|
||||
for mention in self.mentions.all():
|
||||
value["tag"].append(mention.to_ap_tag())
|
||||
|
@ -626,27 +758,38 @@ class Post(StatorModel):
|
|||
"object": object,
|
||||
}
|
||||
|
||||
async def aget_targets(self) -> Iterable[Identity]:
|
||||
def get_targets(self) -> Iterable[Identity]:
|
||||
"""
|
||||
Returns a list of Identities that need to see posts and their changes
|
||||
"""
|
||||
targets = set()
|
||||
async for mention in self.mentions.all():
|
||||
for mention in self.mentions.all():
|
||||
targets.add(mention)
|
||||
# Then, if it's not mentions only, also deliver to followers
|
||||
if self.visibility in [Post.Visibilities.public, Post.Visibilities.unlisted]:
|
||||
for interaction in self.interactions.all():
|
||||
targets.add(interaction.identity)
|
||||
# Then, if it's not mentions only, also deliver to followers and all hashtag followers
|
||||
if self.visibility != Post.Visibilities.mentioned:
|
||||
async for follower in self.author.inbound_follows.select_related("source"):
|
||||
for follower in self.author.inbound_follows.filter(
|
||||
state__in=FollowStates.group_active()
|
||||
).select_related("source"):
|
||||
targets.add(follower.source)
|
||||
if self.hashtags:
|
||||
for follow in HashtagFollow.objects.by_hashtags(
|
||||
self.hashtags
|
||||
).prefetch_related("identity"):
|
||||
targets.add(follow.identity)
|
||||
|
||||
# If it's a reply, always include the original author if we know them
|
||||
reply_post = await self.ain_reply_to_post()
|
||||
reply_post = self.in_reply_to_post()
|
||||
if reply_post:
|
||||
targets.add(reply_post.author)
|
||||
# And if it's a reply to one of our own, we have to re-fan-out to
|
||||
# the original author's followers
|
||||
if reply_post.author.local:
|
||||
async for follower in reply_post.author.inbound_follows.select_related(
|
||||
"source"
|
||||
):
|
||||
for follower in reply_post.author.inbound_follows.filter(
|
||||
state__in=FollowStates.group_active()
|
||||
).select_related("source"):
|
||||
targets.add(follower.source)
|
||||
# If this is a remote post or local-only, filter to only include
|
||||
# local identities
|
||||
|
@ -655,7 +798,31 @@ class Post(StatorModel):
|
|||
# If it's a local post, include the author
|
||||
if self.local:
|
||||
targets.add(self.author)
|
||||
return targets
|
||||
# Fetch the author's full blocks and remove them as targets
|
||||
blocks = (
|
||||
self.author.outbound_blocks.active()
|
||||
.filter(mute=False)
|
||||
.select_related("target")
|
||||
)
|
||||
for block in blocks:
|
||||
try:
|
||||
targets.remove(block.target)
|
||||
except KeyError:
|
||||
pass
|
||||
# Now dedupe the targets based on shared inboxes (we only keep one per
|
||||
# shared inbox)
|
||||
deduped_targets = set()
|
||||
shared_inboxes = set()
|
||||
for target in targets:
|
||||
if target.local or not target.shared_inbox_uri:
|
||||
deduped_targets.add(target)
|
||||
elif target.shared_inbox_uri not in shared_inboxes:
|
||||
shared_inboxes.add(target.shared_inbox_uri)
|
||||
deduped_targets.add(target)
|
||||
else:
|
||||
# Their shared inbox is already being sent to
|
||||
pass
|
||||
return deduped_targets
|
||||
|
||||
### ActivityPub (inbound) ###
|
||||
|
||||
|
@ -668,13 +835,26 @@ class Post(StatorModel):
|
|||
Raises DoesNotExist if it's not found and create is False,
|
||||
or it's from a blocked domain.
|
||||
"""
|
||||
# Ensure the domain of the object's actor and ID match to prevent injection
|
||||
if urlparse(data["id"]).hostname != urlparse(data["attributedTo"]).hostname:
|
||||
raise ValueError("Object's ID domain is different to its author")
|
||||
try:
|
||||
# Ensure data has the primary fields of all Posts
|
||||
if (
|
||||
not isinstance(data["id"], str)
|
||||
or not isinstance(data["attributedTo"], str)
|
||||
or not isinstance(data["type"], str)
|
||||
):
|
||||
raise TypeError()
|
||||
# Ensure the domain of the object's actor and ID match to prevent injection
|
||||
if urlparse(data["id"]).hostname != urlparse(data["attributedTo"]).hostname:
|
||||
raise ValueError("Object's ID domain is different to its author")
|
||||
except (TypeError, KeyError) as ex:
|
||||
raise cls.DoesNotExist(
|
||||
"Object data is not a recognizable ActivityPub object"
|
||||
) from ex
|
||||
|
||||
# Do we have one with the right ID?
|
||||
created = False
|
||||
try:
|
||||
post = cls.objects.select_related("author__domain").get(
|
||||
post: Post = cls.objects.select_related("author__domain").get(
|
||||
object_uri=data["id"]
|
||||
)
|
||||
except cls.DoesNotExist:
|
||||
|
@ -684,51 +864,79 @@ class Post(StatorModel):
|
|||
# If the author is not fetched yet, try again later
|
||||
if author.domain is None:
|
||||
if fetch_author:
|
||||
async_to_sync(author.fetch_actor)()
|
||||
if author.domain is None:
|
||||
if not author.fetch_actor() or author.domain is None:
|
||||
raise TryAgainLater()
|
||||
else:
|
||||
raise TryAgainLater()
|
||||
# If the post is from a blocked domain, stop and drop
|
||||
if author.domain.blocked:
|
||||
if author.domain.recursively_blocked():
|
||||
raise cls.DoesNotExist("Post is from a blocked domain")
|
||||
post = cls.objects.create(
|
||||
object_uri=data["id"],
|
||||
author=author,
|
||||
content="",
|
||||
local=False,
|
||||
type=data["type"],
|
||||
)
|
||||
created = True
|
||||
# parallelism may cause another simultaneous worker thread
|
||||
# to try to create the same post - so watch for that and
|
||||
# try to avoid failing the entire transaction
|
||||
try:
|
||||
# wrapped in a transaction to avoid breaking the outer
|
||||
# transaction
|
||||
with transaction.atomic():
|
||||
post = cls.objects.create(
|
||||
object_uri=data["id"],
|
||||
author=author,
|
||||
content="",
|
||||
local=False,
|
||||
type=data["type"],
|
||||
)
|
||||
created = True
|
||||
except IntegrityError:
|
||||
# despite previous checks, a parallel thread managed
|
||||
# to create the same object already
|
||||
raise TryAgainLater()
|
||||
else:
|
||||
raise cls.DoesNotExist(f"No post with ID {data['id']}", data)
|
||||
if update or created:
|
||||
post.type = data["type"]
|
||||
if post.type in (cls.Types.article, cls.Types.question):
|
||||
type_data = PostTypeData(__root__=data).__root__
|
||||
post.type_data = type_data.dict()
|
||||
post.content = get_value_or_map(data, "content", "contentMap")
|
||||
post.summary = data.get("summary")
|
||||
post.sensitive = data.get("sensitive", False)
|
||||
post.url = data.get("url", data["id"])
|
||||
if post.type in (cls.Types.article, cls.Types.question):
|
||||
post.type_data = PostTypeData(__root__=data).__root__
|
||||
try:
|
||||
# apparently sometimes posts (Pages?) in the fediverse
|
||||
# don't have content, but this shouldn't be a total failure
|
||||
post.content = get_value_or_map(data, "content", "contentMap")
|
||||
except ActivityPubFormatError as err:
|
||||
logger.warning("%s on %s", err, post.url)
|
||||
post.content = None
|
||||
# Document types have names, not summaries
|
||||
post.summary = data.get("summary") or data.get("name")
|
||||
if not post.content and post.summary:
|
||||
post.content = post.summary
|
||||
post.summary = None
|
||||
post.sensitive = data.get("sensitive", False)
|
||||
post.published = parse_ld_date(data.get("published"))
|
||||
post.edited = parse_ld_date(data.get("updated"))
|
||||
post.in_reply_to = data.get("inReplyTo")
|
||||
# Mentions and hashtags
|
||||
post.hashtags = []
|
||||
for tag in get_list(data, "tag"):
|
||||
if tag["type"].lower() == "mention":
|
||||
tag_type = tag["type"].lower()
|
||||
if tag_type == "mention":
|
||||
mention_identity = Identity.by_actor_uri(tag["href"], create=True)
|
||||
post.mentions.add(mention_identity)
|
||||
elif tag["type"].lower() in ["_:hashtag", "hashtag"]:
|
||||
elif tag_type in ["_:hashtag", "hashtag"]:
|
||||
# kbin produces tags with 'tag' instead of 'name'
|
||||
if "tag" in tag and "name" not in tag:
|
||||
name = get_value_or_map(tag, "tag", "tagMap")
|
||||
else:
|
||||
name = get_value_or_map(tag, "name", "nameMap")
|
||||
post.hashtags.append(
|
||||
get_value_or_map(tag, "name", "nameMap").lower().lstrip("#")
|
||||
name.lower().lstrip("#")[: Hashtag.MAXIMUM_LENGTH]
|
||||
)
|
||||
elif tag["type"].lower() in ["toot:emoji", "emoji"]:
|
||||
elif tag_type in ["toot:emoji", "emoji"]:
|
||||
emoji = Emoji.by_ap_tag(post.author.domain, tag, create=True)
|
||||
post.emojis.add(emoji)
|
||||
else:
|
||||
raise ValueError(f"Unknown tag type {tag['type']}")
|
||||
# Various ActivityPub implementations and proposals introduced tag
|
||||
# types, e.g. Edition in Bookwyrm and Link in fep-e232 Object Links
|
||||
# it should be safe to ignore (and log) them before a full support
|
||||
pass
|
||||
# Visibility and to
|
||||
# (a post is public if it's to:public, otherwise it's unlisted if
|
||||
# it's cc:public, otherwise it's more limited)
|
||||
|
@ -739,17 +947,34 @@ class Post(StatorModel):
|
|||
post.visibility = Post.Visibilities.public
|
||||
elif "public" in cc or "as:public" in cc:
|
||||
post.visibility = Post.Visibilities.unlisted
|
||||
elif post.author.followers_uri in to:
|
||||
post.visibility = Post.Visibilities.followers
|
||||
# Attachments
|
||||
# These have no IDs, so we have to wipe them each time
|
||||
post.attachments.all().delete()
|
||||
for attachment in get_list(data, "attachment"):
|
||||
if "url" not in attachment and "href" in attachment:
|
||||
# Links have hrefs, while other Objects have urls
|
||||
attachment["url"] = attachment["href"]
|
||||
if "focalPoint" in attachment:
|
||||
focal_x, focal_y = attachment["focalPoint"]
|
||||
try:
|
||||
focal_x, focal_y = attachment["focalPoint"]
|
||||
except (ValueError, TypeError):
|
||||
focal_x, focal_y = None, None
|
||||
else:
|
||||
focal_x, focal_y = None, None
|
||||
mimetype = attachment.get("mediaType")
|
||||
if not mimetype or not isinstance(mimetype, str):
|
||||
if "url" not in attachment:
|
||||
raise ActivityPubFormatError(
|
||||
f"No URL present on attachment in {post.url}"
|
||||
)
|
||||
mimetype, _ = mimetypes.guess_type(attachment["url"])
|
||||
if not mimetype:
|
||||
mimetype = "application/octet-stream"
|
||||
post.attachments.create(
|
||||
remote_url=attachment["url"],
|
||||
mimetype=attachment["mediaType"],
|
||||
mimetype=mimetype,
|
||||
name=attachment.get("name"),
|
||||
width=attachment.get("width"),
|
||||
height=attachment.get("height"),
|
||||
|
@ -757,10 +982,29 @@ class Post(StatorModel):
|
|||
focal_x=focal_x,
|
||||
focal_y=focal_y,
|
||||
)
|
||||
post.save()
|
||||
# Potentially schedule a fetch of the reply parent
|
||||
# Calculate stats in case we have existing replies
|
||||
post.calculate_stats(save=False)
|
||||
with transaction.atomic():
|
||||
# if we don't commit the transaction here, there's a chance
|
||||
# the parent fetch below goes into an infinite loop
|
||||
post.save()
|
||||
|
||||
# Potentially schedule a fetch of the reply parent, and recalculate
|
||||
# its stats if it's here already.
|
||||
if post.in_reply_to:
|
||||
cls.ensure_object_uri(post.in_reply_to)
|
||||
try:
|
||||
parent = cls.by_object_uri(post.in_reply_to)
|
||||
except cls.DoesNotExist:
|
||||
try:
|
||||
cls.ensure_object_uri(post.in_reply_to, reason=post.object_uri)
|
||||
except ValueError:
|
||||
logger.warning(
|
||||
"Cannot fetch ancestor of Post=%s, ancestor_uri=%s",
|
||||
post.pk,
|
||||
post.in_reply_to,
|
||||
)
|
||||
else:
|
||||
parent.calculate_stats()
|
||||
return post
|
||||
|
||||
@classmethod
|
||||
|
@ -774,10 +1018,10 @@ class Post(StatorModel):
|
|||
except cls.DoesNotExist:
|
||||
if fetch:
|
||||
try:
|
||||
response = async_to_sync(SystemActor().signed_request)(
|
||||
response = SystemActor().signed_request(
|
||||
method="get", uri=object_uri
|
||||
)
|
||||
except httpx.RequestError:
|
||||
except (httpx.HTTPError, ssl.SSLCertVerificationError, ValueError):
|
||||
raise cls.DoesNotExist(f"Could not fetch {object_uri}")
|
||||
if response.status_code in [404, 410]:
|
||||
raise cls.DoesNotExist(f"No post at {object_uri}")
|
||||
|
@ -788,37 +1032,40 @@ class Post(StatorModel):
|
|||
f"Error fetching post from {object_uri}: {response.status_code}",
|
||||
{response.content},
|
||||
)
|
||||
post = cls.by_ap(
|
||||
canonicalise(response.json(), include_security=True),
|
||||
create=True,
|
||||
update=True,
|
||||
fetch_author=True,
|
||||
)
|
||||
try:
|
||||
post = cls.by_ap(
|
||||
canonicalise(response.json(), include_security=True),
|
||||
create=True,
|
||||
update=True,
|
||||
fetch_author=True,
|
||||
)
|
||||
except (json.JSONDecodeError, ValueError, JsonLdError) as err:
|
||||
raise cls.DoesNotExist(
|
||||
f"Invalid ld+json response for {object_uri}"
|
||||
) from err
|
||||
# We may need to fetch the author too
|
||||
if post.author.state == IdentityStates.outdated:
|
||||
async_to_sync(post.author.fetch_actor)()
|
||||
post.author.fetch_actor()
|
||||
return post
|
||||
else:
|
||||
raise cls.DoesNotExist(f"Cannot find Post with URI {object_uri}")
|
||||
|
||||
@classmethod
|
||||
def ensure_object_uri(cls, object_uri: str):
|
||||
def ensure_object_uri(cls, object_uri: str, reason: str | None = None):
|
||||
"""
|
||||
Sees if the post is in our local set, and if not, schedules a fetch
|
||||
for it (in the background)
|
||||
"""
|
||||
if not object_uri:
|
||||
raise ValueError("No URI provided!")
|
||||
if not object_uri or "://" not in object_uri:
|
||||
raise ValueError("URI missing or invalid")
|
||||
try:
|
||||
cls.by_object_uri(object_uri)
|
||||
except cls.DoesNotExist:
|
||||
InboxMessage.objects.create(
|
||||
message={
|
||||
"type": "__internal__",
|
||||
"object": {
|
||||
"type": "FetchPost",
|
||||
"object": object_uri,
|
||||
},
|
||||
InboxMessage.create_internal(
|
||||
{
|
||||
"type": "FetchPost",
|
||||
"object": object_uri,
|
||||
"reason": reason,
|
||||
}
|
||||
)
|
||||
|
||||
|
@ -832,7 +1079,7 @@ class Post(StatorModel):
|
|||
if data["actor"] != data["object"]["attributedTo"]:
|
||||
raise ValueError("Create actor does not match its Post object", data)
|
||||
# Create it, stator will fan it out locally
|
||||
cls.by_ap(data["object"], create=True, update=True)
|
||||
cls.by_ap(data["object"], create=True, update=True, fetch_author=True)
|
||||
|
||||
@classmethod
|
||||
def handle_update_ap(cls, data):
|
||||
|
@ -878,8 +1125,10 @@ class Post(StatorModel):
|
|||
Handles an internal fetch-request inbox message
|
||||
"""
|
||||
try:
|
||||
cls.by_object_uri(data["object"]["object"], fetch=True)
|
||||
except cls.DoesNotExist:
|
||||
uri = data["object"]
|
||||
if "://" in uri:
|
||||
cls.by_object_uri(uri, fetch=True)
|
||||
except (cls.DoesNotExist, KeyError):
|
||||
pass
|
||||
|
||||
### OpenGraph API ###
|
||||
|
@ -900,10 +1149,15 @@ class Post(StatorModel):
|
|||
|
||||
### Mastodon API ###
|
||||
|
||||
def to_mastodon_json(self, interactions=None):
|
||||
def to_mastodon_json(self, interactions=None, bookmarks=None, identity=None):
|
||||
reply_parent = None
|
||||
if self.in_reply_to:
|
||||
reply_parent = Post.objects.filter(object_uri=self.in_reply_to).first()
|
||||
# Load the PK and author.id explicitly to prevent a SELECT on the entire author Identity
|
||||
reply_parent = (
|
||||
Post.objects.filter(object_uri=self.in_reply_to)
|
||||
.only("pk", "author_id")
|
||||
.first()
|
||||
)
|
||||
visibility_mapping = {
|
||||
self.Visibilities.public: "public",
|
||||
self.Visibilities.unlisted: "unlisted",
|
||||
|
@ -915,7 +1169,7 @@ class Post(StatorModel):
|
|||
"id": self.pk,
|
||||
"uri": self.object_uri,
|
||||
"created_at": format_ld_date(self.published),
|
||||
"account": self.author.to_mastodon_json(),
|
||||
"account": self.author.to_mastodon_json(include_counts=False),
|
||||
"content": self.safe_content_remote(),
|
||||
"visibility": visibility_mapping[self.visibility],
|
||||
"sensitive": self.sensitive,
|
||||
|
@ -924,14 +1178,7 @@ class Post(StatorModel):
|
|||
attachment.to_mastodon_json() for attachment in self.attachments.all()
|
||||
],
|
||||
"mentions": [
|
||||
{
|
||||
"id": mention.id,
|
||||
"username": mention.username or "",
|
||||
"url": mention.absolute_profile_uri() or "",
|
||||
"acct": mention.handle or "",
|
||||
}
|
||||
for mention in self.mentions.all()
|
||||
if mention.username
|
||||
mention.to_mastodon_mention_json() for mention in self.mentions.all()
|
||||
],
|
||||
"tags": (
|
||||
[
|
||||
|
@ -944,15 +1191,25 @@ class Post(StatorModel):
|
|||
if self.hashtags
|
||||
else []
|
||||
),
|
||||
"emojis": [emoji.to_mastodon_json() for emoji in self.emojis.usable()],
|
||||
"reblogs_count": self.interactions.filter(type="boost").count(),
|
||||
"favourites_count": self.interactions.filter(type="like").count(),
|
||||
"replies_count": 0,
|
||||
# Filter in the list comp rather than query because the common case is no emoji in the resultset
|
||||
# When filter is on emojis like `emojis.usable()` it causes a query that is not cached by prefetch_related
|
||||
"emojis": [
|
||||
emoji.to_mastodon_json()
|
||||
for emoji in self.emojis.all()
|
||||
if emoji.is_usable
|
||||
],
|
||||
"reblogs_count": self.stats_with_defaults["boosts"],
|
||||
"favourites_count": self.stats_with_defaults["likes"],
|
||||
"replies_count": self.stats_with_defaults["replies"],
|
||||
"url": self.absolute_object_uri(),
|
||||
"in_reply_to_id": reply_parent.pk if reply_parent else None,
|
||||
"in_reply_to_account_id": reply_parent.author.pk if reply_parent else None,
|
||||
"in_reply_to_account_id": (
|
||||
reply_parent.author_id if reply_parent else None
|
||||
),
|
||||
"reblog": None,
|
||||
"poll": None,
|
||||
"poll": self.type_data.to_mastodon_json(self, identity)
|
||||
if isinstance(self.type_data, QuestionData)
|
||||
else None,
|
||||
"card": None,
|
||||
"language": None,
|
||||
"text": self.safe_content_remote(),
|
||||
|
@ -961,4 +1218,7 @@ class Post(StatorModel):
|
|||
if interactions:
|
||||
value["favourited"] = self.pk in interactions.get("like", [])
|
||||
value["reblogged"] = self.pk in interactions.get("boost", [])
|
||||
value["pinned"] = self.pk in interactions.get("pin", [])
|
||||
if bookmarks:
|
||||
value["bookmarked"] = self.pk in bookmarks
|
||||
return value
|
||||
|
|
|
@ -8,16 +8,11 @@ from stator.models import State, StateField, StateGraph, StatorModel
|
|||
|
||||
|
||||
class PostAttachmentStates(StateGraph):
|
||||
new = State(try_interval=30000)
|
||||
new = State(externally_progressed=True)
|
||||
fetched = State()
|
||||
|
||||
new.transitions_to(fetched)
|
||||
|
||||
@classmethod
|
||||
async def handle_new(cls, instance):
|
||||
# TODO: Fetch images to our own media storage
|
||||
pass
|
||||
|
||||
|
||||
class PostAttachment(StatorModel):
|
||||
"""
|
||||
|
@ -31,6 +26,13 @@ class PostAttachment(StatorModel):
|
|||
blank=True,
|
||||
null=True,
|
||||
)
|
||||
author = models.ForeignKey(
|
||||
"users.Identity",
|
||||
on_delete=models.CASCADE,
|
||||
related_name="attachments",
|
||||
blank=True,
|
||||
null=True,
|
||||
)
|
||||
|
||||
state = StateField(graph=PostAttachmentStates)
|
||||
|
||||
|
@ -55,8 +57,8 @@ class PostAttachment(StatorModel):
|
|||
|
||||
width = models.IntegerField(null=True, blank=True)
|
||||
height = models.IntegerField(null=True, blank=True)
|
||||
focal_x = models.IntegerField(null=True, blank=True)
|
||||
focal_y = models.IntegerField(null=True, blank=True)
|
||||
focal_x = models.FloatField(null=True, blank=True)
|
||||
focal_y = models.FloatField(null=True, blank=True)
|
||||
blurhash = models.TextField(null=True, blank=True)
|
||||
|
||||
created = models.DateTimeField(auto_now_add=True)
|
||||
|
@ -73,7 +75,11 @@ class PostAttachment(StatorModel):
|
|||
]
|
||||
|
||||
def is_video(self):
|
||||
return self.mimetype in ["video/webm"]
|
||||
return self.mimetype in [
|
||||
"video/mp4",
|
||||
"video/ogg",
|
||||
"video/webm",
|
||||
]
|
||||
|
||||
def thumbnail_url(self) -> RelativeAbsoluteUrl:
|
||||
if self.thumbnail:
|
||||
|
@ -89,16 +95,25 @@ class PostAttachment(StatorModel):
|
|||
def full_url(self):
|
||||
if self.file:
|
||||
return RelativeAbsoluteUrl(self.file.url)
|
||||
else:
|
||||
if self.is_image():
|
||||
return ProxyAbsoluteUrl(
|
||||
f"/proxy/post_attachment/{self.pk}/",
|
||||
remote_url=self.remote_url,
|
||||
)
|
||||
return RelativeAbsoluteUrl(self.remote_url)
|
||||
|
||||
@property
|
||||
def file_display_name(self):
|
||||
if self.remote_url:
|
||||
return self.remote_url.rsplit("/", 1)[-1]
|
||||
if self.file:
|
||||
return self.file.name
|
||||
return f"attachment ({self.mimetype})"
|
||||
|
||||
### ActivityPub ###
|
||||
|
||||
def to_ap(self):
|
||||
return {
|
||||
ap = {
|
||||
"url": self.file.url,
|
||||
"name": self.name,
|
||||
"type": "Document",
|
||||
|
@ -107,13 +122,22 @@ class PostAttachment(StatorModel):
|
|||
"mediaType": self.mimetype,
|
||||
"blurhash": self.blurhash,
|
||||
}
|
||||
if self.is_image() and self.focal_x and self.focal_y:
|
||||
ap["type"] = "Image"
|
||||
ap["focalPoint"] = [self.focal_x, self.focal_y]
|
||||
return ap
|
||||
|
||||
### Mastodon Client API ###
|
||||
|
||||
def to_mastodon_json(self):
|
||||
type_ = "unknown"
|
||||
if self.is_image():
|
||||
type_ = "image"
|
||||
elif self.is_video():
|
||||
type_ = "video"
|
||||
value = {
|
||||
"id": self.pk,
|
||||
"type": "image" if self.is_image() else "unknown",
|
||||
"type": type_,
|
||||
"url": self.full_url().absolute,
|
||||
"preview_url": self.thumbnail_url().absolute,
|
||||
"remote_url": None,
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
from collections.abc import Iterable
|
||||
|
||||
from django.db import models, transaction
|
||||
from django.utils import timezone
|
||||
|
||||
from activities.models.fan_out import FanOut
|
||||
from activities.models.post import Post
|
||||
from activities.models.timeline_event import TimelineEvent
|
||||
from activities.models.post_types import QuestionData
|
||||
from core.ld import format_ld_date, get_str_or_id, parse_ld_date
|
||||
from core.snowflake import Snowflake
|
||||
from stator.models import State, StateField, StateGraph, StatorModel
|
||||
from users.models.follow import Follow
|
||||
from users.models.identity import Identity
|
||||
|
||||
|
||||
|
@ -14,7 +16,7 @@ class PostInteractionStates(StateGraph):
|
|||
new = State(try_interval=300)
|
||||
fanned_out = State(externally_progressed=True)
|
||||
undone = State(try_interval=300)
|
||||
undone_fanned_out = State()
|
||||
undone_fanned_out = State(delete_after=24 * 60 * 60)
|
||||
|
||||
new.transitions_to(fanned_out)
|
||||
fanned_out.transitions_to(undone)
|
||||
|
@ -25,91 +27,89 @@ class PostInteractionStates(StateGraph):
|
|||
return [cls.new, cls.fanned_out]
|
||||
|
||||
@classmethod
|
||||
async def handle_new(cls, instance: "PostInteraction"):
|
||||
def handle_new(cls, instance: "PostInteraction"):
|
||||
"""
|
||||
Creates all needed fan-out objects for a new PostInteraction.
|
||||
"""
|
||||
interaction = await instance.afetch_full()
|
||||
# Boost: send a copy to all people who follow this user
|
||||
if interaction.type == interaction.Types.boost:
|
||||
async for follow in interaction.identity.inbound_follows.select_related(
|
||||
"source", "target"
|
||||
):
|
||||
if follow.source.local or follow.target.local:
|
||||
await FanOut.objects.acreate(
|
||||
type=FanOut.Types.interaction,
|
||||
identity_id=follow.source_id,
|
||||
subject_post=interaction.post,
|
||||
subject_post_interaction=interaction,
|
||||
)
|
||||
# And one to the post's author
|
||||
await FanOut.objects.acreate(
|
||||
type=FanOut.Types.interaction,
|
||||
identity_id=interaction.post.author_id,
|
||||
subject_post=interaction.post,
|
||||
subject_post_interaction=interaction,
|
||||
)
|
||||
# Like: send a copy to the original post author only
|
||||
elif interaction.type == interaction.Types.like:
|
||||
await FanOut.objects.acreate(
|
||||
type=FanOut.Types.interaction,
|
||||
identity_id=interaction.post.author_id,
|
||||
subject_post=interaction.post,
|
||||
subject_post_interaction=interaction,
|
||||
)
|
||||
# Boost: send a copy to all people who follow this user (limiting
|
||||
# to just local follows if it's a remote boost)
|
||||
# Pin: send Add activity to all people who follow this user
|
||||
if instance.type == instance.Types.boost or instance.type == instance.Types.pin:
|
||||
for target in instance.get_targets():
|
||||
FanOut.objects.create(
|
||||
type=FanOut.Types.interaction,
|
||||
identity=target,
|
||||
subject_post=instance.post,
|
||||
subject_post_interaction=instance,
|
||||
)
|
||||
# Like: send a copy to the original post author only,
|
||||
# if the liker is local or they are
|
||||
elif instance.type == instance.Types.like:
|
||||
if instance.identity.local or instance.post.local:
|
||||
FanOut.objects.create(
|
||||
type=FanOut.Types.interaction,
|
||||
identity_id=instance.post.author_id,
|
||||
subject_post=instance.post,
|
||||
subject_post_interaction=instance,
|
||||
)
|
||||
# Vote: send a copy of the vote to the original
|
||||
# post author only if it's a local interaction
|
||||
# to a non local post
|
||||
elif instance.type == instance.Types.vote:
|
||||
if instance.identity.local and not instance.post.local:
|
||||
FanOut.objects.create(
|
||||
type=FanOut.Types.interaction,
|
||||
identity_id=instance.post.author_id,
|
||||
subject_post=instance.post,
|
||||
subject_post_interaction=instance,
|
||||
)
|
||||
else:
|
||||
raise ValueError("Cannot fan out unknown type")
|
||||
# And one for themselves if they're local and it's a boost
|
||||
if (
|
||||
interaction.type == PostInteraction.Types.boost
|
||||
and interaction.identity.local
|
||||
):
|
||||
await FanOut.objects.acreate(
|
||||
identity_id=interaction.identity_id,
|
||||
if instance.type == PostInteraction.Types.boost and instance.identity.local:
|
||||
FanOut.objects.create(
|
||||
identity_id=instance.identity_id,
|
||||
type=FanOut.Types.interaction,
|
||||
subject_post=interaction.post,
|
||||
subject_post_interaction=interaction,
|
||||
subject_post=instance.post,
|
||||
subject_post_interaction=instance,
|
||||
)
|
||||
return cls.fanned_out
|
||||
|
||||
@classmethod
|
||||
async def handle_undone(cls, instance: "PostInteraction"):
|
||||
def handle_undone(cls, instance: "PostInteraction"):
|
||||
"""
|
||||
Creates all needed fan-out objects to undo a PostInteraction.
|
||||
"""
|
||||
interaction = await instance.afetch_full()
|
||||
# Undo Boost: send a copy to all people who follow this user
|
||||
if interaction.type == interaction.Types.boost:
|
||||
async for follow in interaction.identity.inbound_follows.select_related(
|
||||
# Undo Pin: send a Remove activity to all people who follow this user
|
||||
if instance.type == instance.Types.boost or instance.type == instance.Types.pin:
|
||||
for follow in instance.identity.inbound_follows.select_related(
|
||||
"source", "target"
|
||||
):
|
||||
if follow.source.local or follow.target.local:
|
||||
await FanOut.objects.acreate(
|
||||
FanOut.objects.create(
|
||||
type=FanOut.Types.undo_interaction,
|
||||
identity_id=follow.source_id,
|
||||
subject_post=interaction.post,
|
||||
subject_post_interaction=interaction,
|
||||
subject_post=instance.post,
|
||||
subject_post_interaction=instance,
|
||||
)
|
||||
# Undo Like: send a copy to the original post author only
|
||||
elif interaction.type == interaction.Types.like:
|
||||
await FanOut.objects.acreate(
|
||||
elif instance.type == instance.Types.like:
|
||||
FanOut.objects.create(
|
||||
type=FanOut.Types.undo_interaction,
|
||||
identity_id=interaction.post.author_id,
|
||||
subject_post=interaction.post,
|
||||
subject_post_interaction=interaction,
|
||||
identity_id=instance.post.author_id,
|
||||
subject_post=instance.post,
|
||||
subject_post_interaction=instance,
|
||||
)
|
||||
else:
|
||||
raise ValueError("Cannot fan out unknown type")
|
||||
# And one for themselves if they're local and it's a boost
|
||||
if (
|
||||
interaction.type == PostInteraction.Types.boost
|
||||
and interaction.identity.local
|
||||
):
|
||||
await FanOut.objects.acreate(
|
||||
identity_id=interaction.identity_id,
|
||||
if instance.type == PostInteraction.Types.boost and instance.identity.local:
|
||||
FanOut.objects.create(
|
||||
identity_id=instance.identity_id,
|
||||
type=FanOut.Types.undo_interaction,
|
||||
subject_post=interaction.post,
|
||||
subject_post_interaction=interaction,
|
||||
subject_post=instance.post,
|
||||
subject_post_interaction=instance,
|
||||
)
|
||||
return cls.undone_fanned_out
|
||||
|
||||
|
@ -122,6 +122,13 @@ class PostInteraction(StatorModel):
|
|||
class Types(models.TextChoices):
|
||||
like = "like"
|
||||
boost = "boost"
|
||||
vote = "vote"
|
||||
pin = "pin"
|
||||
|
||||
id = models.BigIntegerField(
|
||||
primary_key=True,
|
||||
default=Snowflake.generate_post_interaction,
|
||||
)
|
||||
|
||||
# The state the boost is in
|
||||
state = StateField(PostInteractionStates)
|
||||
|
@ -146,6 +153,10 @@ class PostInteraction(StatorModel):
|
|||
related_name="interactions",
|
||||
)
|
||||
|
||||
# Used to store any interaction extra text value like the vote
|
||||
# in the question/poll case
|
||||
value = models.CharField(max_length=50, blank=True, null=True)
|
||||
|
||||
# When the activity was originally created (as opposed to when we received it)
|
||||
# Mastodon only seems to send this for boosts, not likes
|
||||
published = models.DateTimeField(default=timezone.now)
|
||||
|
@ -154,7 +165,7 @@ class PostInteraction(StatorModel):
|
|||
updated = models.DateTimeField(auto_now=True)
|
||||
|
||||
class Meta:
|
||||
index_together = [["type", "identity", "post"]]
|
||||
indexes = [models.Index(fields=["type", "identity", "post"])]
|
||||
|
||||
### Display helpers ###
|
||||
|
||||
|
@ -168,7 +179,7 @@ class PostInteraction(StatorModel):
|
|||
ids_with_interaction_type = cls.objects.filter(
|
||||
identity=identity,
|
||||
post_id__in=[post.pk for post in posts],
|
||||
type__in=[cls.Types.like, cls.Types.boost],
|
||||
type__in=[cls.Types.like, cls.Types.boost, cls.Types.pin],
|
||||
state__in=[PostInteractionStates.new, PostInteractionStates.fanned_out],
|
||||
).values_list("post_id", "type")
|
||||
# Make it into the return dict
|
||||
|
@ -178,7 +189,7 @@ class PostInteraction(StatorModel):
|
|||
return result
|
||||
|
||||
@classmethod
|
||||
def get_event_interactions(cls, events, identity):
|
||||
def get_event_interactions(cls, events, identity) -> dict[str, set[str]]:
|
||||
"""
|
||||
Returns a dict of {interaction_type: set(post_ids)} for all the posts
|
||||
within the events and the given identity, for use in templates.
|
||||
|
@ -187,15 +198,87 @@ class PostInteraction(StatorModel):
|
|||
[e.subject_post for e in events if e.subject_post], identity
|
||||
)
|
||||
|
||||
### Async helpers ###
|
||||
def get_targets(self) -> Iterable[Identity]:
|
||||
"""
|
||||
Returns an iterable with Identities of followers that have unique
|
||||
shared_inbox among each other to be used as target.
|
||||
|
||||
async def afetch_full(self):
|
||||
When interaction is boost, only boost follows are considered,
|
||||
for pins all followers are considered.
|
||||
"""
|
||||
Returns a version of the object with all relations pre-loaded
|
||||
"""
|
||||
return await PostInteraction.objects.select_related("identity", "post").aget(
|
||||
pk=self.pk
|
||||
)
|
||||
# Start including the post author
|
||||
targets = {self.post.author}
|
||||
|
||||
query = self.identity.inbound_follows.active()
|
||||
# Include all followers that are following the boosts
|
||||
if self.type == self.Types.boost:
|
||||
query = query.filter(boosts=True)
|
||||
for follow in query.select_related("source"):
|
||||
targets.add(follow.source)
|
||||
|
||||
# Fetch the full blocks and remove them as targets
|
||||
for block in (
|
||||
self.identity.outbound_blocks.active()
|
||||
.filter(mute=False)
|
||||
.select_related("target")
|
||||
):
|
||||
try:
|
||||
targets.remove(block.target)
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
deduped_targets = set()
|
||||
shared_inboxes = set()
|
||||
for target in targets:
|
||||
if target.local:
|
||||
# Local targets always gets the boosts
|
||||
# despite its creator locality
|
||||
deduped_targets.add(target)
|
||||
elif self.identity.local:
|
||||
# Dedupe the targets based on shared inboxes
|
||||
# (we only keep one per shared inbox)
|
||||
if not target.shared_inbox_uri:
|
||||
deduped_targets.add(target)
|
||||
elif target.shared_inbox_uri not in shared_inboxes:
|
||||
shared_inboxes.add(target.shared_inbox_uri)
|
||||
deduped_targets.add(target)
|
||||
|
||||
return deduped_targets
|
||||
|
||||
### Create helpers ###
|
||||
|
||||
@classmethod
|
||||
def create_votes(cls, post, identity, choices) -> list["PostInteraction"]:
|
||||
question = post.type_data
|
||||
|
||||
if question.end_time and timezone.now() > question.end_time:
|
||||
raise ValueError("Validation failed: The poll has already ended")
|
||||
|
||||
if post.interactions.filter(identity=identity, type=cls.Types.vote).exists():
|
||||
raise ValueError("Validation failed: You have already voted on this poll")
|
||||
|
||||
votes = []
|
||||
with transaction.atomic():
|
||||
for choice in set(choices):
|
||||
vote = cls.objects.create(
|
||||
identity=identity,
|
||||
post=post,
|
||||
type=PostInteraction.Types.vote,
|
||||
value=question.options[choice].name,
|
||||
)
|
||||
vote.object_uri = f"{identity.actor_uri}#votes/{vote.id}"
|
||||
vote.save()
|
||||
votes.append(vote)
|
||||
|
||||
if not post.local:
|
||||
question.options[choice].votes += 1
|
||||
|
||||
if not post.local:
|
||||
question.voter_count += 1
|
||||
|
||||
post.calculate_type_data()
|
||||
|
||||
return votes
|
||||
|
||||
### ActivityPub (outbound) ###
|
||||
|
||||
|
@ -223,10 +306,33 @@ class PostInteraction(StatorModel):
|
|||
"actor": self.identity.actor_uri,
|
||||
"object": self.post.object_uri,
|
||||
}
|
||||
else:
|
||||
elif self.type == self.Types.vote:
|
||||
value = {
|
||||
"type": "Note",
|
||||
"id": self.object_uri,
|
||||
"to": self.post.author.actor_uri,
|
||||
"name": self.value,
|
||||
"inReplyTo": self.post.object_uri,
|
||||
"attributedTo": self.identity.actor_uri,
|
||||
}
|
||||
elif self.type == self.Types.pin:
|
||||
raise ValueError("Cannot turn into AP")
|
||||
return value
|
||||
|
||||
def to_create_ap(self):
|
||||
"""
|
||||
Returns the AP JSON to create this object
|
||||
"""
|
||||
object = self.to_ap()
|
||||
return {
|
||||
"to": object.get("to", []),
|
||||
"cc": object.get("cc", []),
|
||||
"type": "Create",
|
||||
"id": self.object_uri,
|
||||
"actor": self.identity.actor_uri,
|
||||
"object": object,
|
||||
}
|
||||
|
||||
def to_undo_ap(self) -> dict:
|
||||
"""
|
||||
Returns the AP JSON to undo this object
|
||||
|
@ -239,6 +345,28 @@ class PostInteraction(StatorModel):
|
|||
"object": object,
|
||||
}
|
||||
|
||||
def to_add_ap(self):
|
||||
"""
|
||||
Returns the AP JSON to add a pin interaction to the featured collection
|
||||
"""
|
||||
return {
|
||||
"type": "Add",
|
||||
"actor": self.identity.actor_uri,
|
||||
"object": self.post.object_uri,
|
||||
"target": self.identity.actor_uri + "collections/featured/",
|
||||
}
|
||||
|
||||
def to_remove_ap(self):
|
||||
"""
|
||||
Returns the AP JSON to remove a pin interaction from the featured collection
|
||||
"""
|
||||
return {
|
||||
"type": "Remove",
|
||||
"actor": self.identity.actor_uri,
|
||||
"object": self.post.object_uri,
|
||||
"target": self.identity.actor_uri + "collections/featured/",
|
||||
}
|
||||
|
||||
### ActivityPub (inbound) ###
|
||||
|
||||
@classmethod
|
||||
|
@ -257,12 +385,40 @@ class PostInteraction(StatorModel):
|
|||
# Resolve the author
|
||||
identity = Identity.by_actor_uri(data["actor"], create=True)
|
||||
# Resolve the post
|
||||
post = Post.by_object_uri(get_str_or_id(data["object"]), fetch=True)
|
||||
object = data["object"]
|
||||
target = get_str_or_id(object, "inReplyTo") or get_str_or_id(object)
|
||||
post = Post.by_object_uri(target, fetch=True)
|
||||
value = None
|
||||
# Get the right type
|
||||
if data["type"].lower() == "like":
|
||||
type = cls.Types.like
|
||||
elif data["type"].lower() == "announce":
|
||||
type = cls.Types.boost
|
||||
elif (
|
||||
data["type"].lower() == "create"
|
||||
and object["type"].lower() == "note"
|
||||
and isinstance(post.type_data, QuestionData)
|
||||
):
|
||||
type = cls.Types.vote
|
||||
question = post.type_data
|
||||
value = object["name"]
|
||||
if question.end_time and timezone.now() > question.end_time:
|
||||
# TODO: Maybe create an expecific expired exception?
|
||||
raise cls.DoesNotExist(
|
||||
f"Cannot create a vote to the expired question {post.id}"
|
||||
)
|
||||
|
||||
already_voted = (
|
||||
post.type_data.mode == "oneOf"
|
||||
and post.interactions.filter(
|
||||
type=cls.Types.vote, identity=identity
|
||||
).exists()
|
||||
)
|
||||
if already_voted:
|
||||
raise cls.DoesNotExist(
|
||||
f"The identity {identity.handle} already voted in question {post.id}"
|
||||
)
|
||||
|
||||
else:
|
||||
raise ValueError(f"Cannot handle AP type {data['type']}")
|
||||
# Make the actual interaction
|
||||
|
@ -273,6 +429,7 @@ class PostInteraction(StatorModel):
|
|||
published=parse_ld_date(data.get("published", None))
|
||||
or timezone.now(),
|
||||
type=type,
|
||||
value=value,
|
||||
)
|
||||
else:
|
||||
raise cls.DoesNotExist(f"No interaction with ID {data['id']}", data)
|
||||
|
@ -291,17 +448,10 @@ class PostInteraction(StatorModel):
|
|||
# That post is gone, boss
|
||||
# TODO: Limited retry state?
|
||||
return
|
||||
# Boosts (announces) go to everyone who follows locally
|
||||
if interaction.type == cls.Types.boost:
|
||||
for follow in Follow.objects.filter(
|
||||
target=interaction.identity, source__local=True
|
||||
):
|
||||
TimelineEvent.add_post_interaction(follow.source, interaction)
|
||||
# Likes go to just the author of the post
|
||||
elif interaction.type == cls.Types.like:
|
||||
TimelineEvent.add_post_interaction(interaction.post.author, interaction)
|
||||
# Force it into fanned_out as it's not ours
|
||||
interaction.transition_perform(PostInteractionStates.fanned_out)
|
||||
|
||||
if interaction and interaction.post:
|
||||
interaction.post.calculate_stats()
|
||||
interaction.post.calculate_type_data()
|
||||
|
||||
@classmethod
|
||||
def handle_undo_ap(cls, data):
|
||||
|
@ -322,10 +472,83 @@ class PostInteraction(StatorModel):
|
|||
interaction.timeline_events.all().delete()
|
||||
# Force it into undone_fanned_out as it's not ours
|
||||
interaction.transition_perform(PostInteractionStates.undone_fanned_out)
|
||||
# Recalculate post stats
|
||||
interaction.post.calculate_stats()
|
||||
interaction.post.calculate_type_data()
|
||||
|
||||
@classmethod
|
||||
def handle_add_ap(cls, data):
|
||||
"""
|
||||
Handles an incoming Add activity which is a pin
|
||||
"""
|
||||
target = data.get("target", None)
|
||||
if not target:
|
||||
return
|
||||
|
||||
# we only care about pinned posts, not hashtags
|
||||
object = data.get("object", {})
|
||||
if isinstance(object, dict) and object.get("type") == "Hashtag":
|
||||
return
|
||||
|
||||
with transaction.atomic():
|
||||
identity = Identity.by_actor_uri(data["actor"], create=True)
|
||||
# it's only a pin if the target is the identity's featured collection URI
|
||||
if identity.featured_collection_uri != target:
|
||||
return
|
||||
|
||||
object_uri = get_str_or_id(object)
|
||||
if not object_uri:
|
||||
return
|
||||
post = Post.by_object_uri(object_uri, fetch=True)
|
||||
|
||||
return PostInteraction.objects.get_or_create(
|
||||
type=cls.Types.pin,
|
||||
identity=identity,
|
||||
post=post,
|
||||
state__in=PostInteractionStates.group_active(),
|
||||
)[0]
|
||||
|
||||
@classmethod
|
||||
def handle_remove_ap(cls, data):
|
||||
"""
|
||||
Handles an incoming Remove activity which is an unpin
|
||||
"""
|
||||
target = data.get("target", None)
|
||||
if not target:
|
||||
return
|
||||
|
||||
# we only care about pinned posts, not hashtags
|
||||
object = data.get("object", {})
|
||||
if isinstance(object, dict) and object.get("type") == "Hashtag":
|
||||
return
|
||||
|
||||
with transaction.atomic():
|
||||
identity = Identity.by_actor_uri(data["actor"], create=True)
|
||||
# it's only an unpin if the target is the identity's featured collection URI
|
||||
if identity.featured_collection_uri != target:
|
||||
return
|
||||
|
||||
try:
|
||||
object_uri = get_str_or_id(object)
|
||||
if not object_uri:
|
||||
return
|
||||
post = Post.by_object_uri(object_uri, fetch=False)
|
||||
for interaction in cls.objects.filter(
|
||||
type=cls.Types.pin,
|
||||
identity=identity,
|
||||
post=post,
|
||||
state__in=PostInteractionStates.group_active(),
|
||||
):
|
||||
# Force it into undone_fanned_out as it's not ours
|
||||
interaction.transition_perform(
|
||||
PostInteractionStates.undone_fanned_out
|
||||
)
|
||||
except (cls.DoesNotExist, Post.DoesNotExist):
|
||||
return
|
||||
|
||||
### Mastodon API ###
|
||||
|
||||
def to_mastodon_status_json(self, interactions=None):
|
||||
def to_mastodon_status_json(self, interactions=None, identity=None):
|
||||
"""
|
||||
This wraps Posts in a fake Status for boost interactions.
|
||||
"""
|
||||
|
@ -333,13 +556,15 @@ class PostInteraction(StatorModel):
|
|||
raise ValueError(
|
||||
f"Cannot make status JSON for interaction of type {self.type}"
|
||||
)
|
||||
# Grab our subject post JSON, and just return it if we're a post
|
||||
post_json = self.post.to_mastodon_json(interactions=interactions)
|
||||
# Make a fake post for this boost (because mastodon treats boosts as posts)
|
||||
post_json = self.post.to_mastodon_json(
|
||||
interactions=interactions, identity=identity
|
||||
)
|
||||
return {
|
||||
"id": f"interaction-{self.pk}",
|
||||
"id": f"{self.pk}",
|
||||
"uri": post_json["uri"],
|
||||
"created_at": format_ld_date(self.published),
|
||||
"account": self.identity.to_mastodon_json(),
|
||||
"account": self.identity.to_mastodon_json(include_counts=False),
|
||||
"content": "",
|
||||
"visibility": post_json["visibility"],
|
||||
"sensitive": post_json["sensitive"],
|
||||
|
@ -354,7 +579,7 @@ class PostInteraction(StatorModel):
|
|||
"url": post_json["url"],
|
||||
"in_reply_to_id": None,
|
||||
"in_reply_to_account_id": None,
|
||||
"poll": None,
|
||||
"poll": post_json["poll"],
|
||||
"card": None,
|
||||
"language": None,
|
||||
"text": "",
|
||||
|
|
|
@ -2,8 +2,11 @@ import json
|
|||
from datetime import datetime
|
||||
from typing import Literal
|
||||
|
||||
from django.utils import timezone
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from core.ld import format_ld_date
|
||||
|
||||
|
||||
class BasePostDataType(BaseModel):
|
||||
pass
|
||||
|
@ -14,6 +17,11 @@ class QuestionOption(BaseModel):
|
|||
type: Literal["Note"] = "Note"
|
||||
votes: int = 0
|
||||
|
||||
def __init__(self, **data) -> None:
|
||||
data["votes"] = data.get("votes", data.get("replies", {}).get("totalItems", 0))
|
||||
|
||||
super().__init__(**data)
|
||||
|
||||
|
||||
class QuestionData(BasePostDataType):
|
||||
type: Literal["Question"]
|
||||
|
@ -27,6 +35,10 @@ class QuestionData(BasePostDataType):
|
|||
allow_population_by_field_name = True
|
||||
|
||||
def __init__(self, **data) -> None:
|
||||
data["voter_count"] = data.get(
|
||||
"voter_count", data.get("votersCount", data.get("toot:votersCount", 0))
|
||||
)
|
||||
|
||||
if "mode" not in data:
|
||||
data["mode"] = "anyOf" if "anyOf" in data else "oneOf"
|
||||
if "options" not in data:
|
||||
|
@ -36,6 +48,51 @@ class QuestionData(BasePostDataType):
|
|||
data["options"] = options
|
||||
super().__init__(**data)
|
||||
|
||||
def to_mastodon_json(self, post, identity=None):
|
||||
from activities.models import PostInteraction
|
||||
|
||||
multiple = self.mode == "anyOf"
|
||||
value = {
|
||||
"id": post.id,
|
||||
"expires_at": None,
|
||||
"expired": False,
|
||||
"multiple": multiple,
|
||||
"votes_count": 0,
|
||||
"voters_count": self.voter_count,
|
||||
"voted": False,
|
||||
"own_votes": [],
|
||||
"options": [],
|
||||
"emojis": [],
|
||||
}
|
||||
|
||||
if self.end_time:
|
||||
value["expires_at"] = format_ld_date(self.end_time)
|
||||
value["expired"] = timezone.now() >= self.end_time
|
||||
|
||||
options = self.options or []
|
||||
option_map = {}
|
||||
for index, option in enumerate(options):
|
||||
value["options"].append(
|
||||
{
|
||||
"title": option.name,
|
||||
"votes_count": option.votes,
|
||||
}
|
||||
)
|
||||
value["votes_count"] += option.votes
|
||||
option_map[option.name] = index
|
||||
|
||||
if identity:
|
||||
votes = post.interactions.filter(
|
||||
identity=identity,
|
||||
type=PostInteraction.Types.vote,
|
||||
)
|
||||
value["voted"] = post.author == identity or votes.exists()
|
||||
value["own_votes"] = [
|
||||
option_map[vote.value] for vote in votes if vote.value in option_map
|
||||
]
|
||||
|
||||
return value
|
||||
|
||||
|
||||
class ArticleData(BasePostDataType):
|
||||
type: Literal["Article"]
|
||||
|
|
|
@ -16,8 +16,10 @@ class TimelineEvent(models.Model):
|
|||
mentioned = "mentioned"
|
||||
liked = "liked" # Someone liking one of our posts
|
||||
followed = "followed"
|
||||
follow_requested = "follow_requested"
|
||||
boosted = "boosted" # Someone boosting one of our posts
|
||||
announcement = "announcement" # Server announcement
|
||||
identity_created = "identity_created" # New identity created
|
||||
|
||||
# The user this event is for
|
||||
identity = models.ForeignKey(
|
||||
|
@ -54,14 +56,18 @@ class TimelineEvent(models.Model):
|
|||
|
||||
published = models.DateTimeField(default=timezone.now)
|
||||
seen = models.BooleanField(default=False)
|
||||
dismissed = models.BooleanField(default=False)
|
||||
|
||||
created = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
class Meta:
|
||||
index_together = [
|
||||
indexes = [
|
||||
# This relies on a DB that can use left subsets of indexes
|
||||
("identity", "type", "subject_post", "subject_identity"),
|
||||
("identity", "type", "subject_identity"),
|
||||
models.Index(
|
||||
fields=["identity", "type", "subject_post", "subject_identity"]
|
||||
),
|
||||
models.Index(fields=["identity", "type", "subject_identity"]),
|
||||
models.Index(fields=["identity", "created"]),
|
||||
]
|
||||
|
||||
### Alternate constructors ###
|
||||
|
@ -69,14 +75,30 @@ class TimelineEvent(models.Model):
|
|||
@classmethod
|
||||
def add_follow(cls, identity, source_identity):
|
||||
"""
|
||||
Adds a follow to the timeline if it's not there already
|
||||
Adds a follow to the timeline if it's not there already, remove follow request if any
|
||||
"""
|
||||
cls.objects.filter(
|
||||
type=cls.Types.follow_requested,
|
||||
identity=identity,
|
||||
subject_identity=source_identity,
|
||||
).delete()
|
||||
return cls.objects.get_or_create(
|
||||
identity=identity,
|
||||
type=cls.Types.followed,
|
||||
subject_identity=source_identity,
|
||||
)[0]
|
||||
|
||||
@classmethod
|
||||
def add_follow_request(cls, identity, source_identity):
|
||||
"""
|
||||
Adds a follow request to the timeline if it's not there already
|
||||
"""
|
||||
return cls.objects.get_or_create(
|
||||
identity=identity,
|
||||
type=cls.Types.follow_requested,
|
||||
subject_identity=source_identity,
|
||||
)[0]
|
||||
|
||||
@classmethod
|
||||
def add_post(cls, identity, post):
|
||||
"""
|
||||
|
@ -102,6 +124,17 @@ class TimelineEvent(models.Model):
|
|||
defaults={"published": post.published or post.created},
|
||||
)[0]
|
||||
|
||||
@classmethod
|
||||
def add_identity_created(cls, identity, new_identity):
|
||||
"""
|
||||
Adds a new identity item
|
||||
"""
|
||||
return cls.objects.get_or_create(
|
||||
identity=identity,
|
||||
type=cls.Types.identity_created,
|
||||
subject_identity=new_identity,
|
||||
)[0]
|
||||
|
||||
@classmethod
|
||||
def add_post_interaction(cls, identity, interaction):
|
||||
"""
|
||||
|
@ -153,6 +186,38 @@ class TimelineEvent(models.Model):
|
|||
subject_identity_id=interaction.identity_id,
|
||||
).delete()
|
||||
|
||||
@classmethod
|
||||
def delete_follow(cls, target, source):
|
||||
TimelineEvent.objects.filter(
|
||||
type__in=[cls.Types.followed, cls.Types.follow_requested],
|
||||
identity=target,
|
||||
subject_identity=source,
|
||||
).delete()
|
||||
|
||||
### Background tasks ###
|
||||
|
||||
@classmethod
|
||||
def handle_clear_timeline(cls, message):
|
||||
"""
|
||||
Internal stator handler for clearing all events by a user off another
|
||||
user's timeline.
|
||||
"""
|
||||
actor_id = message["actor"]
|
||||
object_id = message["object"]
|
||||
full_erase = message.get("fullErase", False)
|
||||
|
||||
if full_erase:
|
||||
q = (
|
||||
models.Q(subject_post__author_id=object_id)
|
||||
| models.Q(subject_post_interaction__identity_id=object_id)
|
||||
| models.Q(subject_identity_id=object_id)
|
||||
)
|
||||
else:
|
||||
q = models.Q(
|
||||
type=cls.Types.post, subject_post__author_id=object_id
|
||||
) | models.Q(type=cls.Types.boost, subject_identity_id=object_id)
|
||||
TimelineEvent.objects.filter(q, identity_id=actor_id).delete()
|
||||
|
||||
### Mastodon Client API ###
|
||||
|
||||
def to_mastodon_notification_json(self, interactions=None):
|
||||
|
@ -178,16 +243,22 @@ class TimelineEvent(models.Model):
|
|||
)
|
||||
elif self.type == self.Types.followed:
|
||||
result["type"] = "follow"
|
||||
elif self.type == self.Types.follow_requested:
|
||||
result["type"] = "follow_request"
|
||||
elif self.type == self.Types.identity_created:
|
||||
result["type"] = "admin.sign_up"
|
||||
else:
|
||||
raise ValueError(f"Cannot convert {self.type} to notification JSON")
|
||||
return result
|
||||
|
||||
def to_mastodon_status_json(self, interactions=None):
|
||||
def to_mastodon_status_json(self, interactions=None, bookmarks=None, identity=None):
|
||||
if self.type == self.Types.post:
|
||||
return self.subject_post.to_mastodon_json(interactions=interactions)
|
||||
return self.subject_post.to_mastodon_json(
|
||||
interactions=interactions, bookmarks=bookmarks, identity=identity
|
||||
)
|
||||
elif self.type == self.Types.boost:
|
||||
return self.subject_post_interaction.to_mastodon_status_json(
|
||||
interactions=interactions
|
||||
interactions=interactions, identity=identity
|
||||
)
|
||||
else:
|
||||
raise ValueError(f"Cannot make status JSON for type {self.type}")
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from django.db import models
|
||||
import logging
|
||||
|
||||
from activities.models import (
|
||||
Post,
|
||||
|
@ -9,6 +9,8 @@ from activities.models import (
|
|||
)
|
||||
from users.models import Identity
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PostService:
|
||||
"""
|
||||
|
@ -31,22 +33,6 @@ class PostService:
|
|||
"author",
|
||||
"author__domain",
|
||||
)
|
||||
.annotate(
|
||||
like_count=models.Count(
|
||||
"interactions",
|
||||
filter=models.Q(
|
||||
interactions__type=PostInteraction.Types.like,
|
||||
interactions__state__in=PostInteractionStates.group_active(),
|
||||
),
|
||||
),
|
||||
boost_count=models.Count(
|
||||
"interactions",
|
||||
filter=models.Q(
|
||||
interactions__type=PostInteraction.Types.boost,
|
||||
interactions__state__in=PostInteractionStates.group_active(),
|
||||
),
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
def __init__(self, post: Post):
|
||||
|
@ -63,6 +49,7 @@ class PostService:
|
|||
)[0]
|
||||
if interaction.state not in PostInteractionStates.group_active():
|
||||
interaction.transition_perform(PostInteractionStates.new)
|
||||
self.post.calculate_stats()
|
||||
|
||||
def uninteract_as(self, identity, type):
|
||||
"""
|
||||
|
@ -74,6 +61,7 @@ class PostService:
|
|||
post=self.post,
|
||||
):
|
||||
interaction.transition_perform(PostInteractionStates.undone)
|
||||
self.post.calculate_stats()
|
||||
|
||||
def like_as(self, identity: Identity):
|
||||
self.interact_as(identity, PostInteraction.Types.like)
|
||||
|
@ -87,7 +75,12 @@ class PostService:
|
|||
def unboost_as(self, identity: Identity):
|
||||
self.uninteract_as(identity, PostInteraction.Types.boost)
|
||||
|
||||
def context(self, identity: Identity | None) -> tuple[list[Post], list[Post]]:
|
||||
def context(
|
||||
self,
|
||||
identity: Identity | None,
|
||||
num_ancestors: int = 10,
|
||||
num_descendants: int = 50,
|
||||
) -> tuple[list[Post], list[Post]]:
|
||||
"""
|
||||
Returns ancestor/descendant information.
|
||||
|
||||
|
@ -97,16 +90,20 @@ class PostService:
|
|||
If identity is provided, includes mentions/followers-only posts they
|
||||
can see. Otherwise, shows unlisted and above only.
|
||||
"""
|
||||
num_ancestors = 10
|
||||
num_descendants = 50
|
||||
# Retrieve ancestors via parent walk
|
||||
ancestors: list[Post] = []
|
||||
ancestor = self.post
|
||||
while ancestor.in_reply_to and len(ancestors) < num_ancestors:
|
||||
object_uri = ancestor.in_reply_to
|
||||
reason = ancestor.object_uri
|
||||
ancestor = self.queryset().filter(object_uri=object_uri).first()
|
||||
if ancestor is None:
|
||||
Post.ensure_object_uri(object_uri)
|
||||
try:
|
||||
Post.ensure_object_uri(object_uri, reason=reason)
|
||||
except ValueError:
|
||||
logger.error(
|
||||
f"Cannot fetch ancestor Post={self.post.pk}, ancestor_uri={object_uri}"
|
||||
)
|
||||
break
|
||||
if ancestor.state in [PostStates.deleted, PostStates.deleted_fanned_out]:
|
||||
break
|
||||
|
@ -114,6 +111,7 @@ class PostService:
|
|||
# Retrieve descendants via breadth-first-search
|
||||
descendants: list[Post] = []
|
||||
queue = [self.post]
|
||||
seen: set[str] = set()
|
||||
while queue and len(descendants) < num_descendants:
|
||||
node = queue.pop()
|
||||
child_queryset = (
|
||||
|
@ -128,8 +126,10 @@ class PostService:
|
|||
else:
|
||||
child_queryset = child_queryset.unlisted(include_replies=True)
|
||||
for child in child_queryset:
|
||||
descendants.append(child)
|
||||
queue.append(child)
|
||||
if child.pk not in seen:
|
||||
descendants.append(child)
|
||||
queue.append(child)
|
||||
seen.add(child.pk)
|
||||
return ancestors, descendants
|
||||
|
||||
def delete(self):
|
||||
|
@ -145,3 +145,22 @@ class PostService:
|
|||
),
|
||||
PostInteractionStates.undone,
|
||||
)
|
||||
|
||||
def pin_as(self, identity: Identity):
|
||||
if identity != self.post.author:
|
||||
raise ValueError("Not the author of this post")
|
||||
if self.post.visibility == Post.Visibilities.mentioned:
|
||||
raise ValueError("Cannot pin a mentioned-only post")
|
||||
if (
|
||||
PostInteraction.objects.filter(
|
||||
type=PostInteraction.Types.pin,
|
||||
identity=identity,
|
||||
).count()
|
||||
>= 5
|
||||
):
|
||||
raise ValueError("Maximum number of pins already reached")
|
||||
|
||||
self.interact_as(identity, PostInteraction.Types.pin)
|
||||
|
||||
def unpin_as(self, identity: Identity):
|
||||
self.uninteract_as(identity, PostInteraction.Types.pin)
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import httpx
|
||||
from asgiref.sync import async_to_sync
|
||||
|
||||
from activities.models import Hashtag, Post
|
||||
from core.json import json_from_response
|
||||
from core.ld import canonicalise
|
||||
from users.models import Domain, Identity, IdentityStates
|
||||
from users.models.system_actor import SystemActor
|
||||
|
@ -49,7 +49,7 @@ class SearchService:
|
|||
username, domain_instance or domain, fetch=True
|
||||
)
|
||||
if identity and identity.state == IdentityStates.outdated:
|
||||
async_to_sync(identity.fetch_actor)()
|
||||
identity.fetch_actor()
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
|
@ -59,7 +59,7 @@ class SearchService:
|
|||
else:
|
||||
for identity in Identity.objects.filter(username=handle)[:20]:
|
||||
results.add(identity)
|
||||
for identity in Identity.objects.filter(username__startswith=handle)[:20]:
|
||||
for identity in Identity.objects.filter(username__istartswith=handle)[:20]:
|
||||
results.add(identity)
|
||||
return results
|
||||
|
||||
|
@ -74,7 +74,7 @@ class SearchService:
|
|||
|
||||
# Fetch the provided URL as the system actor to retrieve the AP JSON
|
||||
try:
|
||||
response = async_to_sync(SystemActor().signed_request)(
|
||||
response = SystemActor().signed_request(
|
||||
method="get",
|
||||
uri=self.query,
|
||||
)
|
||||
|
@ -82,7 +82,12 @@ class SearchService:
|
|||
return None
|
||||
if response.status_code >= 400:
|
||||
return None
|
||||
document = canonicalise(response.json(), include_security=True)
|
||||
|
||||
json_data = json_from_response(response)
|
||||
if not json_data:
|
||||
return None
|
||||
|
||||
document = canonicalise(json_data, include_security=True)
|
||||
type = document.get("type", "unknown").lower()
|
||||
|
||||
# Is it an identity?
|
||||
|
@ -90,11 +95,11 @@ class SearchService:
|
|||
# Try and retrieve the profile by actor URI
|
||||
identity = Identity.by_actor_uri(document["id"], create=True)
|
||||
if identity and identity.state == IdentityStates.outdated:
|
||||
async_to_sync(identity.fetch_actor)()
|
||||
identity.fetch_actor()
|
||||
return identity
|
||||
|
||||
# Is it a post?
|
||||
elif type == "note":
|
||||
elif type in [value.lower() for value in Post.Types.values]:
|
||||
# Try and retrieve the post by URI
|
||||
# (we do not trust the JSON we just got - fetch from source!)
|
||||
try:
|
||||
|
@ -123,6 +128,14 @@ class SearchService:
|
|||
results.add(hashtag)
|
||||
return results
|
||||
|
||||
def search_post_content(self):
|
||||
"""
|
||||
Searches for posts on an identity via full text search
|
||||
"""
|
||||
return self.identity.posts.unlisted(include_replies=True).filter(
|
||||
content__search=self.query
|
||||
)[:50]
|
||||
|
||||
def search_all(self):
|
||||
"""
|
||||
Returns all possible results for a search
|
||||
|
|
|
@ -21,38 +21,19 @@ class TimelineService:
|
|||
|
||||
@classmethod
|
||||
def event_queryset(cls):
|
||||
return (
|
||||
TimelineEvent.objects.select_related(
|
||||
"subject_post",
|
||||
"subject_post__author",
|
||||
"subject_post__author__domain",
|
||||
"subject_identity",
|
||||
"subject_identity__domain",
|
||||
"subject_post_interaction",
|
||||
"subject_post_interaction__identity",
|
||||
"subject_post_interaction__identity__domain",
|
||||
)
|
||||
.prefetch_related(
|
||||
"subject_post__attachments",
|
||||
"subject_post__mentions",
|
||||
"subject_post__emojis",
|
||||
)
|
||||
.annotate(
|
||||
like_count=models.Count(
|
||||
"subject_post__interactions",
|
||||
filter=models.Q(
|
||||
subject_post__interactions__type=PostInteraction.Types.like,
|
||||
subject_post__interactions__state__in=PostInteractionStates.group_active(),
|
||||
),
|
||||
),
|
||||
boost_count=models.Count(
|
||||
"subject_post__interactions",
|
||||
filter=models.Q(
|
||||
subject_post__interactions__type=PostInteraction.Types.boost,
|
||||
subject_post__interactions__state__in=PostInteractionStates.group_active(),
|
||||
),
|
||||
),
|
||||
)
|
||||
return TimelineEvent.objects.select_related(
|
||||
"subject_post",
|
||||
"subject_post__author",
|
||||
"subject_post__author__domain",
|
||||
"subject_identity",
|
||||
"subject_identity__domain",
|
||||
"subject_post_interaction",
|
||||
"subject_post_interaction__identity",
|
||||
"subject_post_interaction__identity__domain",
|
||||
).prefetch_related(
|
||||
"subject_post__attachments",
|
||||
"subject_post__mentions",
|
||||
"subject_post__emojis",
|
||||
)
|
||||
|
||||
def home(self) -> models.QuerySet[TimelineEvent]:
|
||||
|
@ -62,23 +43,26 @@ class TimelineService:
|
|||
identity=self.identity,
|
||||
type__in=[TimelineEvent.Types.post, TimelineEvent.Types.boost],
|
||||
)
|
||||
.order_by("-published")
|
||||
.order_by("-created")
|
||||
)
|
||||
|
||||
def local(self) -> models.QuerySet[Post]:
|
||||
return (
|
||||
queryset = (
|
||||
PostService.queryset()
|
||||
.local_public()
|
||||
.filter(author__restriction=Identity.Restriction.none)
|
||||
.order_by("-published")
|
||||
.order_by("-id")
|
||||
)
|
||||
if self.identity is not None:
|
||||
queryset = queryset.filter(author__domain=self.identity.domain)
|
||||
return queryset
|
||||
|
||||
def federated(self) -> models.QuerySet[Post]:
|
||||
return (
|
||||
PostService.queryset()
|
||||
.public()
|
||||
.filter(author__restriction=Identity.Restriction.none)
|
||||
.order_by("-published")
|
||||
.order_by("-id")
|
||||
)
|
||||
|
||||
def hashtag(self, hashtag: str | Hashtag) -> models.QuerySet[Post]:
|
||||
|
@ -87,23 +71,84 @@ class TimelineService:
|
|||
.public()
|
||||
.filter(author__restriction=Identity.Restriction.none)
|
||||
.tagged_with(hashtag)
|
||||
.order_by("-published")
|
||||
.order_by("-id")
|
||||
)
|
||||
|
||||
def notifications(self, types: list[str]) -> models.QuerySet[TimelineEvent]:
|
||||
return (
|
||||
self.event_queryset()
|
||||
.filter(identity=self.identity, type__in=types)
|
||||
.order_by("-published")
|
||||
.filter(identity=self.identity, type__in=types, dismissed=False)
|
||||
.order_by("-created")
|
||||
)
|
||||
|
||||
def identity_public(self, identity: Identity):
|
||||
def identity_public(
|
||||
self,
|
||||
identity: Identity,
|
||||
include_boosts: bool = True,
|
||||
include_replies: bool = True,
|
||||
):
|
||||
"""
|
||||
Returns all publically visible posts for an identity
|
||||
Returns timeline events with all of an identity's publicly visible posts
|
||||
and their boosts
|
||||
"""
|
||||
filter = models.Q(
|
||||
type=TimelineEvent.Types.post,
|
||||
subject_post__author=identity,
|
||||
subject_post__visibility__in=[
|
||||
Post.Visibilities.public,
|
||||
Post.Visibilities.local_only,
|
||||
Post.Visibilities.unlisted,
|
||||
],
|
||||
)
|
||||
if include_boosts:
|
||||
filter = filter | models.Q(
|
||||
type=TimelineEvent.Types.boost, subject_identity=identity
|
||||
)
|
||||
if not include_replies:
|
||||
filter = filter & models.Q(subject_post__in_reply_to__isnull=True)
|
||||
return (
|
||||
self.event_queryset()
|
||||
.filter(
|
||||
filter,
|
||||
identity=identity,
|
||||
)
|
||||
.order_by("-created")
|
||||
)
|
||||
|
||||
def identity_pinned(self) -> models.QuerySet[Post]:
|
||||
"""
|
||||
Return all pinned posts that are publicly visible for an identity
|
||||
"""
|
||||
return (
|
||||
PostService.queryset()
|
||||
.filter(author=identity)
|
||||
.unlisted(include_replies=True)
|
||||
.order_by("-created")
|
||||
.public()
|
||||
.filter(
|
||||
interactions__identity=self.identity,
|
||||
interactions__type=PostInteraction.Types.pin,
|
||||
interactions__state__in=PostInteractionStates.group_active(),
|
||||
)
|
||||
)
|
||||
|
||||
def likes(self) -> models.QuerySet[Post]:
|
||||
"""
|
||||
Return all liked posts for an identity
|
||||
"""
|
||||
return (
|
||||
PostService.queryset()
|
||||
.filter(
|
||||
interactions__identity=self.identity,
|
||||
interactions__type=PostInteraction.Types.like,
|
||||
interactions__state__in=PostInteractionStates.group_active(),
|
||||
)
|
||||
.order_by("-id")
|
||||
)
|
||||
|
||||
def bookmarks(self) -> models.QuerySet[Post]:
|
||||
"""
|
||||
Return all bookmarked posts for an identity
|
||||
"""
|
||||
return (
|
||||
PostService.queryset()
|
||||
.filter(bookmarks__identity=self.identity)
|
||||
.order_by("-id")
|
||||
)
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import datetime
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from django import template
|
||||
from django.utils import timezone
|
||||
|
@ -13,10 +14,11 @@ def timedeltashort(value: datetime.datetime):
|
|||
"""
|
||||
if not value:
|
||||
return ""
|
||||
# TODO: Handle things in the future properly
|
||||
delta = timezone.now() - value
|
||||
seconds = int(delta.total_seconds())
|
||||
days = delta.days
|
||||
sign = "-" if seconds < 0 else ""
|
||||
seconds = abs(seconds)
|
||||
days = abs(delta.days)
|
||||
if seconds < 60:
|
||||
text = f"{seconds:0n}s"
|
||||
elif seconds < 60 * 60:
|
||||
|
@ -30,4 +32,32 @@ def timedeltashort(value: datetime.datetime):
|
|||
else:
|
||||
years = max(days // 365.25, 1)
|
||||
text = f"{years:0n}y"
|
||||
return text
|
||||
return sign + text
|
||||
|
||||
|
||||
@register.filter
|
||||
def timedeltashortenddate(value: datetime.datetime):
|
||||
"""
|
||||
Formatter for end dates - timedeltashort but it adds "ended ... ago" or
|
||||
"left" depending on the direction.
|
||||
"""
|
||||
output = timedeltashort(value)
|
||||
if output.startswith("-"):
|
||||
return f"{output[1:]} left"
|
||||
else:
|
||||
return f"Ended {output} ago"
|
||||
|
||||
|
||||
@register.simple_tag(takes_context=True)
|
||||
def urlparams(context, **kwargs):
|
||||
"""
|
||||
Generates a URL parameter string the same as the current page but with
|
||||
the given items changed.
|
||||
"""
|
||||
params = dict(context["request"].GET.items())
|
||||
for name, value in kwargs.items():
|
||||
if value:
|
||||
params[name] = value
|
||||
elif name in params:
|
||||
del params[name]
|
||||
return urlencode(params)
|
||||
|
|
|
@ -1,27 +1,17 @@
|
|||
from django import forms
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import PermissionDenied
|
||||
from django.shortcuts import get_object_or_404, redirect, render
|
||||
from django.contrib import messages
|
||||
from django.shortcuts import redirect
|
||||
from django.utils import timezone
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.generic import FormView
|
||||
|
||||
from activities.models import (
|
||||
Post,
|
||||
PostAttachment,
|
||||
PostAttachmentStates,
|
||||
PostStates,
|
||||
TimelineEvent,
|
||||
)
|
||||
from activities.models import Post, PostAttachment, PostAttachmentStates, TimelineEvent
|
||||
from core.files import blurhash_image, resize_image
|
||||
from core.html import html_to_plaintext
|
||||
from core.models import Config
|
||||
from users.decorators import identity_required
|
||||
from users.views.base import IdentityViewMixin
|
||||
|
||||
|
||||
@method_decorator(identity_required, name="dispatch")
|
||||
class Compose(FormView):
|
||||
|
||||
class Compose(IdentityViewMixin, FormView):
|
||||
template_name = "activities/compose.html"
|
||||
|
||||
class form_class(forms.Form):
|
||||
|
@ -29,11 +19,11 @@ class Compose(FormView):
|
|||
widget=forms.Textarea(
|
||||
attrs={
|
||||
"autofocus": "autofocus",
|
||||
"maxlength": Config.lazy_system_value("post_length"),
|
||||
"placeholder": "What's on your mind?",
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
visibility = forms.ChoiceField(
|
||||
choices=[
|
||||
(Post.Visibilities.public, "Public"),
|
||||
|
@ -43,6 +33,7 @@ class Compose(FormView):
|
|||
(Post.Visibilities.mentioned, "Mentioned Only"),
|
||||
],
|
||||
)
|
||||
|
||||
content_warning = forms.CharField(
|
||||
required=False,
|
||||
label=Config.lazy_system_value("content_warning_text"),
|
||||
|
@ -53,16 +44,54 @@ class Compose(FormView):
|
|||
),
|
||||
help_text="Optional - Post will be hidden behind this text until clicked",
|
||||
)
|
||||
reply_to = forms.CharField(widget=forms.HiddenInput(), required=False)
|
||||
|
||||
def __init__(self, request, *args, **kwargs):
|
||||
image = forms.ImageField(
|
||||
required=False,
|
||||
help_text="Optional - For multiple image uploads and cropping, please use an app",
|
||||
widget=forms.FileInput(
|
||||
attrs={
|
||||
"_": f"""
|
||||
on change
|
||||
if me.files[0].size > {settings.SETUP.MEDIA_MAX_IMAGE_FILESIZE_MB * 1024 ** 2}
|
||||
add [@disabled=] to #upload
|
||||
|
||||
remove <ul.errorlist/>
|
||||
make <ul.errorlist/> called errorlist
|
||||
make <li/> called error
|
||||
set size_in_mb to (me.files[0].size / 1024 / 1024).toFixed(2)
|
||||
put 'File must be {settings.SETUP.MEDIA_MAX_IMAGE_FILESIZE_MB}MB or less (actual: ' + size_in_mb + 'MB)' into error
|
||||
put error into errorlist
|
||||
put errorlist before me
|
||||
else
|
||||
remove @disabled from #upload
|
||||
remove <ul.errorlist/>
|
||||
end
|
||||
end
|
||||
"""
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
image_caption = forms.CharField(
|
||||
required=False,
|
||||
help_text="Provide an image caption for the visually impaired",
|
||||
)
|
||||
|
||||
def __init__(self, identity, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.request = request
|
||||
self.identity = identity
|
||||
self.fields["text"].widget.attrs[
|
||||
"_"
|
||||
] = f"""
|
||||
] = rf"""
|
||||
init
|
||||
-- Move cursor to the end of existing text
|
||||
set my.selectionStart to my.value.length
|
||||
end
|
||||
|
||||
on load or input
|
||||
set characters to my.value.trim().length
|
||||
-- Unicode-aware counting to match Python
|
||||
-- <LF> will be normalized as <CR><LF> in Django
|
||||
set characters to Array.from(my.value.replaceAll('\n','\r\n').trim()).length
|
||||
put {Config.system.post_length} - characters into #character-counter
|
||||
|
||||
if characters > {Config.system.post_length} then
|
||||
|
@ -77,7 +106,7 @@ class Compose(FormView):
|
|||
def clean_text(self):
|
||||
text = self.cleaned_data.get("text")
|
||||
# Check minimum interval
|
||||
last_post = self.request.identity.posts.order_by("-created").first()
|
||||
last_post = self.identity.posts.order_by("-created").first()
|
||||
if (
|
||||
last_post
|
||||
and (timezone.now() - last_post.created).total_seconds()
|
||||
|
@ -96,156 +125,75 @@ class Compose(FormView):
|
|||
)
|
||||
return text
|
||||
|
||||
def clean_image(self):
|
||||
value = self.cleaned_data.get("image")
|
||||
if value:
|
||||
max_mb = settings.SETUP.MEDIA_MAX_IMAGE_FILESIZE_MB
|
||||
max_bytes = max_mb * 1024 * 1024
|
||||
if value.size > max_bytes:
|
||||
# Erase the file from our data to stop trying to show it again
|
||||
self.files = {}
|
||||
raise forms.ValidationError(
|
||||
f"File must be {max_mb}MB or less (actual: {value.size / 1024 ** 2:.2f})"
|
||||
)
|
||||
return value
|
||||
|
||||
def get_form(self, form_class=None):
|
||||
return self.form_class(request=self.request, **self.get_form_kwargs())
|
||||
return self.form_class(identity=self.identity, **self.get_form_kwargs())
|
||||
|
||||
def get_initial(self):
|
||||
initial = super().get_initial()
|
||||
if self.post_obj:
|
||||
initial.update(
|
||||
{
|
||||
"reply_to": self.reply_to.pk if self.reply_to else "",
|
||||
"visibility": self.post_obj.visibility,
|
||||
"text": html_to_plaintext(self.post_obj.content),
|
||||
"content_warning": self.post_obj.summary,
|
||||
}
|
||||
)
|
||||
else:
|
||||
initial[
|
||||
"visibility"
|
||||
] = self.request.identity.config_identity.default_post_visibility
|
||||
if self.reply_to:
|
||||
initial["reply_to"] = self.reply_to.pk
|
||||
if self.reply_to.visibility == Post.Visibilities.public:
|
||||
initial["visibility"] = Post.Visibilities.unlisted
|
||||
else:
|
||||
initial["visibility"] = self.reply_to.visibility
|
||||
initial["content_warning"] = self.reply_to.summary
|
||||
# Build a set of mentions for the content to start as
|
||||
mentioned = {self.reply_to.author}
|
||||
mentioned.update(self.reply_to.mentions.all())
|
||||
mentioned.discard(self.request.identity)
|
||||
initial["text"] = "".join(
|
||||
f"@{identity.handle} "
|
||||
for identity in mentioned
|
||||
if identity.username
|
||||
)
|
||||
initial["visibility"] = self.identity.config_identity.default_post_visibility
|
||||
return initial
|
||||
|
||||
def form_valid(self, form):
|
||||
# Gather any attachment objects now, they're not in the form proper
|
||||
# See if we need to make an image attachment
|
||||
attachments = []
|
||||
if "attachment" in self.request.POST:
|
||||
attachments = PostAttachment.objects.filter(
|
||||
pk__in=self.request.POST.getlist("attachment", [])
|
||||
if form.cleaned_data.get("image"):
|
||||
main_file = resize_image(
|
||||
form.cleaned_data["image"],
|
||||
size=(2000, 2000),
|
||||
cover=False,
|
||||
)
|
||||
# Dispatch based on edit or not
|
||||
if self.post_obj:
|
||||
self.post_obj.edit_local(
|
||||
content=form.cleaned_data["text"],
|
||||
summary=form.cleaned_data.get("content_warning"),
|
||||
visibility=form.cleaned_data["visibility"],
|
||||
attachments=attachments,
|
||||
thumbnail_file = resize_image(
|
||||
form.cleaned_data["image"],
|
||||
size=(400, 225),
|
||||
cover=True,
|
||||
)
|
||||
self.post_obj.transition_perform(PostStates.edited)
|
||||
else:
|
||||
post = Post.create_local(
|
||||
author=self.request.identity,
|
||||
content=form.cleaned_data["text"],
|
||||
summary=form.cleaned_data.get("content_warning"),
|
||||
visibility=form.cleaned_data["visibility"],
|
||||
reply_to=self.reply_to,
|
||||
attachments=attachments,
|
||||
attachment = PostAttachment.objects.create(
|
||||
blurhash=blurhash_image(thumbnail_file),
|
||||
mimetype="image/webp",
|
||||
width=main_file.image.width,
|
||||
height=main_file.image.height,
|
||||
name=form.cleaned_data.get("image_caption"),
|
||||
state=PostAttachmentStates.fetched,
|
||||
author=self.identity,
|
||||
)
|
||||
# Add their own timeline event for immediate visibility
|
||||
TimelineEvent.add_post(self.request.identity, post)
|
||||
return redirect("/")
|
||||
|
||||
def dispatch(self, request, handle=None, post_id=None, *args, **kwargs):
|
||||
self.post_obj = None
|
||||
if handle and post_id:
|
||||
# Make sure the request identity owns the post!
|
||||
if handle != request.identity.handle:
|
||||
raise PermissionDenied("Post author is not requestor")
|
||||
|
||||
self.post_obj = get_object_or_404(request.identity.posts, pk=post_id)
|
||||
|
||||
# Grab the reply-to post info now
|
||||
self.reply_to = None
|
||||
reply_to_id = request.POST.get("reply_to") or request.GET.get("reply_to")
|
||||
if reply_to_id:
|
||||
try:
|
||||
self.reply_to = Post.objects.get(pk=reply_to_id)
|
||||
except Post.DoesNotExist:
|
||||
pass
|
||||
# Keep going with normal rendering
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
attachment.file.save(
|
||||
main_file.name,
|
||||
main_file,
|
||||
)
|
||||
attachment.thumbnail.save(
|
||||
thumbnail_file.name,
|
||||
thumbnail_file,
|
||||
)
|
||||
attachment.save()
|
||||
attachments.append(attachment)
|
||||
# Create the post
|
||||
post = Post.create_local(
|
||||
author=self.identity,
|
||||
content=form.cleaned_data["text"],
|
||||
summary=form.cleaned_data.get("content_warning"),
|
||||
visibility=form.cleaned_data["visibility"],
|
||||
attachments=attachments,
|
||||
)
|
||||
# Add their own timeline event for immediate visibility
|
||||
TimelineEvent.add_post(self.identity, post)
|
||||
messages.success(self.request, "Your post was created.")
|
||||
return redirect(".")
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super().get_context_data(**kwargs)
|
||||
context["reply_to"] = self.reply_to
|
||||
if self.post_obj:
|
||||
context["post"] = self.post_obj
|
||||
context["identity"] = self.identity
|
||||
context["section"] = "compose"
|
||||
return context
|
||||
|
||||
|
||||
@method_decorator(identity_required, name="dispatch")
|
||||
class ImageUpload(FormView):
|
||||
"""
|
||||
Handles image upload - returns a new input type hidden to embed in
|
||||
the main form that references an orphaned PostAttachment
|
||||
"""
|
||||
|
||||
template_name = "activities/_image_upload.html"
|
||||
|
||||
class form_class(forms.Form):
|
||||
image = forms.ImageField()
|
||||
description = forms.CharField(required=False)
|
||||
|
||||
def clean_image(self):
|
||||
value = self.cleaned_data["image"]
|
||||
max_mb = settings.SETUP.MEDIA_MAX_IMAGE_FILESIZE_MB
|
||||
max_bytes = max_mb * 1024 * 1024
|
||||
if value.size > max_bytes:
|
||||
# Erase the file from our data to stop trying to show it again
|
||||
self.files = {}
|
||||
raise forms.ValidationError(f"File must be {max_mb}MB or less")
|
||||
return value
|
||||
|
||||
def form_invalid(self, form):
|
||||
return super().form_invalid(form)
|
||||
|
||||
def form_valid(self, form):
|
||||
# Make a PostAttachment
|
||||
main_file = resize_image(
|
||||
form.cleaned_data["image"],
|
||||
size=(2000, 2000),
|
||||
cover=False,
|
||||
)
|
||||
thumbnail_file = resize_image(
|
||||
form.cleaned_data["image"],
|
||||
size=(400, 225),
|
||||
cover=True,
|
||||
)
|
||||
attachment = PostAttachment.objects.create(
|
||||
blurhash=blurhash_image(thumbnail_file),
|
||||
mimetype="image/webp",
|
||||
width=main_file.image.width,
|
||||
height=main_file.image.height,
|
||||
name=form.cleaned_data.get("description"),
|
||||
state=PostAttachmentStates.fetched,
|
||||
)
|
||||
|
||||
attachment.file.save(
|
||||
main_file.name,
|
||||
main_file,
|
||||
)
|
||||
attachment.thumbnail.save(
|
||||
thumbnail_file.name,
|
||||
thumbnail_file,
|
||||
)
|
||||
attachment.save()
|
||||
# Return the response, with a hidden input plus a note
|
||||
return render(
|
||||
self.request, "activities/_image_uploaded.html", {"attachment": attachment}
|
||||
)
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import json
|
||||
|
||||
import httpx
|
||||
from asgiref.sync import async_to_sync
|
||||
from django import forms
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.generic import FormView, TemplateView
|
||||
|
@ -13,7 +12,6 @@ from users.models import SystemActor
|
|||
|
||||
@method_decorator(admin_required, name="dispatch")
|
||||
class JsonViewer(FormView):
|
||||
|
||||
template_name = "activities/debug_json.html"
|
||||
|
||||
class form_class(forms.Form):
|
||||
|
@ -31,7 +29,7 @@ class JsonViewer(FormView):
|
|||
context = self.get_context_data(form=form)
|
||||
|
||||
try:
|
||||
response = async_to_sync(SystemActor().signed_request)(
|
||||
response = SystemActor().signed_request(
|
||||
method="get",
|
||||
uri=uri,
|
||||
)
|
||||
|
@ -56,25 +54,23 @@ class JsonViewer(FormView):
|
|||
except json.JSONDecodeError as ex:
|
||||
result = str(ex)
|
||||
else:
|
||||
result = json.dumps(document, indent=4, sort_keys=True)
|
||||
context["raw_result"] = json.dumps(response.json(), indent=2)
|
||||
result = json.dumps(document, indent=2, sort_keys=True)
|
||||
# result = pprint.pformat(document)
|
||||
context["result"] = result
|
||||
return self.render_to_response(context)
|
||||
|
||||
|
||||
class NotFound(TemplateView):
|
||||
|
||||
template_name = "404.html"
|
||||
|
||||
|
||||
class ServerError(TemplateView):
|
||||
|
||||
template_name = "500.html"
|
||||
|
||||
|
||||
@method_decorator(admin_required, name="dispatch")
|
||||
class OauthAuthorize(TemplateView):
|
||||
|
||||
template_name = "api/oauth_authorize.html"
|
||||
|
||||
def get_context_data(self):
|
||||
|
|
|
@ -1,26 +0,0 @@
|
|||
from django.views.generic import ListView
|
||||
|
||||
from activities.models import Hashtag
|
||||
|
||||
|
||||
class ExploreTag(ListView):
|
||||
|
||||
template_name = "activities/explore_tag.html"
|
||||
extra_context = {
|
||||
"current_page": "explore",
|
||||
"allows_refresh": True,
|
||||
}
|
||||
paginate_by = 20
|
||||
|
||||
def get_queryset(self):
|
||||
return (
|
||||
Hashtag.objects.public()
|
||||
.filter(
|
||||
stats__total__gt=0,
|
||||
)
|
||||
.order_by("-stats__total")
|
||||
)[:20]
|
||||
|
||||
|
||||
class Explore(ExploreTag):
|
||||
pass
|
|
@ -1,15 +1,13 @@
|
|||
from django.core.exceptions import PermissionDenied
|
||||
from django.http import Http404, JsonResponse
|
||||
from django.shortcuts import get_object_or_404, redirect, render
|
||||
from django.shortcuts import get_object_or_404, redirect
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.vary import vary_on_headers
|
||||
from django.views.generic import TemplateView, View
|
||||
from django.views.generic import TemplateView
|
||||
|
||||
from activities.models import Post, PostInteraction, PostStates
|
||||
from activities.models import Post, PostStates
|
||||
from activities.services import PostService
|
||||
from core.decorators import cache_page_by_ap_json
|
||||
from core.ld import canonicalise
|
||||
from users.decorators import identity_required
|
||||
from users.models import Identity
|
||||
from users.shortcuts import by_handle_or_404
|
||||
|
||||
|
@ -19,7 +17,6 @@ from users.shortcuts import by_handle_or_404
|
|||
)
|
||||
@method_decorator(vary_on_headers("Accept"), name="dispatch")
|
||||
class Individual(TemplateView):
|
||||
|
||||
template_name = "activities/post.html"
|
||||
|
||||
identity: Identity
|
||||
|
@ -30,7 +27,9 @@ class Individual(TemplateView):
|
|||
if self.identity.blocked:
|
||||
raise Http404("Blocked user")
|
||||
self.post_obj = get_object_or_404(
|
||||
PostService.queryset().filter(author=self.identity),
|
||||
PostService.queryset()
|
||||
.filter(author=self.identity)
|
||||
.unlisted(include_replies=True),
|
||||
pk=post_id,
|
||||
)
|
||||
if self.post_obj.state in [PostStates.deleted, PostStates.deleted_fanned_out]:
|
||||
|
@ -47,20 +46,17 @@ class Individual(TemplateView):
|
|||
context = super().get_context_data(**kwargs)
|
||||
|
||||
ancestors, descendants = PostService(self.post_obj).context(
|
||||
self.request.identity
|
||||
identity=None, num_ancestors=2
|
||||
)
|
||||
|
||||
context.update(
|
||||
{
|
||||
"identity": self.identity,
|
||||
"post": self.post_obj,
|
||||
"interactions": PostInteraction.get_post_interactions(
|
||||
[self.post_obj] + ancestors + descendants,
|
||||
self.request.identity,
|
||||
),
|
||||
"link_original": True,
|
||||
"ancestors": ancestors,
|
||||
"descendants": descendants,
|
||||
"public_styling": True,
|
||||
}
|
||||
)
|
||||
|
||||
|
@ -74,95 +70,3 @@ class Individual(TemplateView):
|
|||
canonicalise(self.post_obj.to_ap(), include_security=True),
|
||||
content_type="application/activity+json",
|
||||
)
|
||||
|
||||
|
||||
@method_decorator(identity_required, name="dispatch")
|
||||
class Like(View):
|
||||
"""
|
||||
Adds/removes a like from the current identity to the post
|
||||
"""
|
||||
|
||||
undo = False
|
||||
|
||||
def post(self, request, handle, post_id):
|
||||
identity = by_handle_or_404(self.request, handle, local=False)
|
||||
post = get_object_or_404(
|
||||
PostService.queryset().filter(author=identity),
|
||||
pk=post_id,
|
||||
)
|
||||
service = PostService(post)
|
||||
if self.undo:
|
||||
service.unlike_as(request.identity)
|
||||
post.like_count = max(0, post.like_count - 1)
|
||||
else:
|
||||
service.like_as(request.identity)
|
||||
post.like_count += 1
|
||||
# Return either a redirect or a HTMX snippet
|
||||
if request.htmx:
|
||||
return render(
|
||||
request,
|
||||
"activities/_like.html",
|
||||
{
|
||||
"post": post,
|
||||
"interactions": {"like": set() if self.undo else {post.pk}},
|
||||
},
|
||||
)
|
||||
return redirect(post.urls.view)
|
||||
|
||||
|
||||
@method_decorator(identity_required, name="dispatch")
|
||||
class Boost(View):
|
||||
"""
|
||||
Adds/removes a boost from the current identity to the post
|
||||
"""
|
||||
|
||||
undo = False
|
||||
|
||||
def post(self, request, handle, post_id):
|
||||
identity = by_handle_or_404(self.request, handle, local=False)
|
||||
post = get_object_or_404(
|
||||
PostService.queryset().filter(author=identity),
|
||||
pk=post_id,
|
||||
)
|
||||
service = PostService(post)
|
||||
if self.undo:
|
||||
service.unboost_as(request.identity)
|
||||
post.boost_count = max(0, post.boost_count - 1)
|
||||
else:
|
||||
service.boost_as(request.identity)
|
||||
post.boost_count += 1
|
||||
# Return either a redirect or a HTMX snippet
|
||||
if request.htmx:
|
||||
return render(
|
||||
request,
|
||||
"activities/_boost.html",
|
||||
{
|
||||
"post": post,
|
||||
"interactions": {"boost": set() if self.undo else {post.pk}},
|
||||
},
|
||||
)
|
||||
return redirect(post.urls.view)
|
||||
|
||||
|
||||
@method_decorator(identity_required, name="dispatch")
|
||||
class Delete(TemplateView):
|
||||
"""
|
||||
Deletes a post
|
||||
"""
|
||||
|
||||
template_name = "activities/post_delete.html"
|
||||
|
||||
def dispatch(self, request, handle, post_id):
|
||||
# Make sure the request identity owns the post!
|
||||
if handle != request.identity.handle:
|
||||
raise PermissionDenied("Post author is not requestor")
|
||||
self.identity = by_handle_or_404(self.request, handle, local=False)
|
||||
self.post_obj = get_object_or_404(self.identity.posts, pk=post_id)
|
||||
return super().dispatch(request)
|
||||
|
||||
def get_context_data(self):
|
||||
return {"post": self.post_obj}
|
||||
|
||||
def post(self, request):
|
||||
PostService(self.post_obj).delete()
|
||||
return redirect("/")
|
||||
|
|
|
@ -1,22 +0,0 @@
|
|||
from django import forms
|
||||
from django.views.generic import FormView
|
||||
|
||||
from activities.services import SearchService
|
||||
|
||||
|
||||
class Search(FormView):
|
||||
|
||||
template_name = "activities/search.html"
|
||||
|
||||
class form_class(forms.Form):
|
||||
query = forms.CharField(
|
||||
help_text="Search for:\nA user by @username@domain or their profile URL\nA hashtag by #tagname\nA post by its URL",
|
||||
widget=forms.TextInput(attrs={"type": "search", "autofocus": "autofocus"}),
|
||||
)
|
||||
|
||||
def form_valid(self, form):
|
||||
searcher = SearchService(form.cleaned_data["query"], self.request.identity)
|
||||
# Render results
|
||||
context = self.get_context_data(form=form)
|
||||
context["results"] = searcher.search_all()
|
||||
return self.render_to_response(context)
|
|
@ -1,48 +1,35 @@
|
|||
from django.core.paginator import Paginator
|
||||
from django.contrib.auth.decorators import login_required
|
||||
from django.shortcuts import get_object_or_404, redirect
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.generic import ListView, TemplateView
|
||||
|
||||
from activities.models import Hashtag, PostInteraction, TimelineEvent
|
||||
from activities.models import Hashtag, TimelineEvent
|
||||
from activities.services import TimelineService
|
||||
from core.decorators import cache_page
|
||||
from users.decorators import identity_required
|
||||
|
||||
from .compose import Compose
|
||||
from users.models import Identity
|
||||
from users.views.base import IdentityViewMixin
|
||||
|
||||
|
||||
@method_decorator(identity_required, name="dispatch")
|
||||
@method_decorator(login_required, name="dispatch")
|
||||
class Home(TemplateView):
|
||||
"""
|
||||
Homepage for logged-in users - shows identities primarily.
|
||||
"""
|
||||
|
||||
template_name = "activities/home.html"
|
||||
|
||||
form_class = Compose.form_class
|
||||
|
||||
def get_form(self, form_class=None):
|
||||
return self.form_class(request=self.request, **self.get_form_kwargs())
|
||||
|
||||
def get_context_data(self):
|
||||
events = TimelineService(self.request.identity).home()
|
||||
paginator = Paginator(events, 25)
|
||||
page_number = self.request.GET.get("page")
|
||||
context = {
|
||||
"interactions": PostInteraction.get_event_interactions(
|
||||
events,
|
||||
self.request.identity,
|
||||
),
|
||||
"current_page": "home",
|
||||
"allows_refresh": True,
|
||||
"page_obj": paginator.get_page(page_number),
|
||||
"form": self.form_class(request=self.request),
|
||||
return {
|
||||
"identities": Identity.objects.filter(
|
||||
users__pk=self.request.user.pk
|
||||
).order_by("created"),
|
||||
}
|
||||
return context
|
||||
|
||||
|
||||
@method_decorator(
|
||||
cache_page("cache_timeout_page_timeline", public_only=True), name="dispatch"
|
||||
)
|
||||
class Tag(ListView):
|
||||
|
||||
template_name = "activities/tag.html"
|
||||
extra_context = {
|
||||
"current_page": "tag",
|
||||
|
@ -59,64 +46,15 @@ class Tag(ListView):
|
|||
return super().get(request, *args, **kwargs)
|
||||
|
||||
def get_queryset(self):
|
||||
return TimelineService(self.request.identity).hashtag(self.hashtag)
|
||||
return TimelineService(None).hashtag(self.hashtag)
|
||||
|
||||
def get_context_data(self):
|
||||
context = super().get_context_data()
|
||||
context["hashtag"] = self.hashtag
|
||||
context["interactions"] = PostInteraction.get_post_interactions(
|
||||
context["page_obj"], self.request.identity
|
||||
)
|
||||
return context
|
||||
|
||||
|
||||
@method_decorator(
|
||||
cache_page("cache_timeout_page_timeline", public_only=True), name="dispatch"
|
||||
)
|
||||
class Local(ListView):
|
||||
|
||||
template_name = "activities/local.html"
|
||||
extra_context = {
|
||||
"current_page": "local",
|
||||
"allows_refresh": True,
|
||||
}
|
||||
paginate_by = 25
|
||||
|
||||
def get_queryset(self):
|
||||
return TimelineService(self.request.identity).local()
|
||||
|
||||
def get_context_data(self):
|
||||
context = super().get_context_data()
|
||||
context["interactions"] = PostInteraction.get_post_interactions(
|
||||
context["page_obj"], self.request.identity
|
||||
)
|
||||
return context
|
||||
|
||||
|
||||
@method_decorator(identity_required, name="dispatch")
|
||||
class Federated(ListView):
|
||||
|
||||
template_name = "activities/federated.html"
|
||||
extra_context = {
|
||||
"current_page": "federated",
|
||||
"allows_refresh": True,
|
||||
}
|
||||
paginate_by = 25
|
||||
|
||||
def get_queryset(self):
|
||||
return TimelineService(self.request.identity).federated()
|
||||
|
||||
def get_context_data(self):
|
||||
context = super().get_context_data()
|
||||
context["interactions"] = PostInteraction.get_post_interactions(
|
||||
context["page_obj"], self.request.identity
|
||||
)
|
||||
return context
|
||||
|
||||
|
||||
@method_decorator(identity_required, name="dispatch")
|
||||
class Notifications(ListView):
|
||||
|
||||
class Notifications(IdentityViewMixin, ListView):
|
||||
template_name = "activities/notifications.html"
|
||||
extra_context = {
|
||||
"current_page": "notifications",
|
||||
|
@ -145,7 +83,7 @@ class Notifications(ListView):
|
|||
for type_name, type in self.notification_types.items():
|
||||
if notification_options.get(type_name, True):
|
||||
types.append(type)
|
||||
return TimelineService(self.request.identity).notifications(types)
|
||||
return TimelineService(self.identity).notifications(types)
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super().get_context_data(**kwargs)
|
||||
|
@ -166,9 +104,6 @@ class Notifications(ListView):
|
|||
events.append(event)
|
||||
# Retrieve what kinds of things to show
|
||||
context["events"] = events
|
||||
context["identity"] = self.identity
|
||||
context["notification_options"] = self.request.session["notification_options"]
|
||||
context["interactions"] = PostInteraction.get_event_interactions(
|
||||
context["page_obj"],
|
||||
self.request.identity,
|
||||
)
|
||||
return context
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
from collections.abc import Callable
|
||||
from functools import wraps
|
||||
|
||||
from django.http import JsonResponse
|
||||
|
@ -5,15 +6,46 @@ from django.http import JsonResponse
|
|||
|
||||
def identity_required(function):
|
||||
"""
|
||||
API version of the identity_required decorator that just makes sure the
|
||||
token is tied to one, not an app only.
|
||||
Makes sure the token is tied to an identity, not an app only.
|
||||
"""
|
||||
|
||||
@wraps(function)
|
||||
def inner(request, *args, **kwargs):
|
||||
# They need an identity
|
||||
if not request.identity:
|
||||
return JsonResponse({"error": "identity_token_required"}, status=400)
|
||||
return JsonResponse({"error": "identity_token_required"}, status=401)
|
||||
return function(request, *args, **kwargs)
|
||||
|
||||
# This is for the API only
|
||||
inner.csrf_exempt = True
|
||||
|
||||
return inner
|
||||
|
||||
|
||||
def scope_required(scope: str, requires_identity=True):
|
||||
"""
|
||||
Asserts that the token we're using has the provided scope
|
||||
"""
|
||||
|
||||
def decorator(function: Callable):
|
||||
@wraps(function)
|
||||
def inner(request, *args, **kwargs):
|
||||
if not request.token:
|
||||
if request.identity:
|
||||
# They're just logged in via cookie - give full access
|
||||
pass
|
||||
else:
|
||||
return JsonResponse(
|
||||
{"error": "identity_token_required"}, status=401
|
||||
)
|
||||
elif not request.token.has_scope(scope):
|
||||
return JsonResponse({"error": "out_of_scope_for_token"}, status=403)
|
||||
# They need an identity
|
||||
if not request.identity and requires_identity:
|
||||
return JsonResponse({"error": "identity_token_required"}, status=401)
|
||||
return function(request, *args, **kwargs)
|
||||
|
||||
inner.csrf_exempt = True # type:ignore
|
||||
return inner
|
||||
|
||||
return decorator
|
||||
|
|
|
@ -14,14 +14,21 @@ class ApiTokenMiddleware:
|
|||
|
||||
def __call__(self, request):
|
||||
auth_header = request.headers.get("authorization", None)
|
||||
request.token = None
|
||||
request.identity = None
|
||||
if auth_header and auth_header.startswith("Bearer "):
|
||||
token_value = auth_header[7:]
|
||||
try:
|
||||
token = Token.objects.get(token=token_value)
|
||||
except Token.DoesNotExist:
|
||||
return HttpResponse("Invalid Bearer token", status=400)
|
||||
request.user = token.user
|
||||
request.identity = token.identity
|
||||
if token_value == "__app__":
|
||||
# Special client app token value
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
token = Token.objects.get(token=token_value, revoked=None)
|
||||
except Token.DoesNotExist:
|
||||
return HttpResponse("Invalid Bearer token", status=400)
|
||||
request.user = token.user
|
||||
request.identity = token.identity
|
||||
request.token = token
|
||||
request.session = None
|
||||
response = self.get_response(request)
|
||||
return response
|
||||
|
|
|
@ -6,7 +6,6 @@ from django.db import migrations, models
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
|
|
|
@ -0,0 +1,92 @@
|
|||
# Generated by Django 4.1.4 on 2023-01-01 00:38
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("users", "0008_follow_boosts"),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
("api", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="token",
|
||||
name="code",
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="token",
|
||||
name="revoked",
|
||||
field=models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="token",
|
||||
name="token",
|
||||
field=models.CharField(max_length=500, unique=True),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="Authorization",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
(
|
||||
"code",
|
||||
models.CharField(
|
||||
blank=True, max_length=128, null=True, unique=True
|
||||
),
|
||||
),
|
||||
("scopes", models.JSONField()),
|
||||
("redirect_uri", models.TextField(blank=True, null=True)),
|
||||
("valid_for_seconds", models.IntegerField(default=60)),
|
||||
("created", models.DateTimeField(auto_now_add=True)),
|
||||
("updated", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"application",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="authorizations",
|
||||
to="api.application",
|
||||
),
|
||||
),
|
||||
(
|
||||
"identity",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="authorizations",
|
||||
to="users.identity",
|
||||
),
|
||||
),
|
||||
(
|
||||
"token",
|
||||
models.OneToOneField(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="api.token",
|
||||
),
|
||||
),
|
||||
(
|
||||
"user",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="authorizations",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
17
api/migrations/0003_token_push_subscription.py
Normal file
17
api/migrations/0003_token_push_subscription.py
Normal file
|
@ -0,0 +1,17 @@
|
|||
# Generated by Django 4.2.1 on 2023-07-15 17:40
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0002_remove_token_code_token_revoked_alter_token_token_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="token",
|
||||
name="push_subscription",
|
||||
field=models.JSONField(blank=True, null=True),
|
||||
),
|
||||
]
|
|
@ -1,2 +1,3 @@
|
|||
from .application import Application # noqa
|
||||
from .authorization import Authorization # noqa
|
||||
from .token import Token # noqa
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
import secrets
|
||||
|
||||
from django.db import models
|
||||
|
||||
|
||||
|
@ -17,3 +19,23 @@ class Application(models.Model):
|
|||
|
||||
created = models.DateTimeField(auto_now_add=True)
|
||||
updated = models.DateTimeField(auto_now=True)
|
||||
|
||||
@classmethod
|
||||
def create(
|
||||
cls,
|
||||
client_name: str,
|
||||
redirect_uris: str,
|
||||
website: str | None,
|
||||
scopes: str | None = None,
|
||||
):
|
||||
client_id = "tk-" + secrets.token_urlsafe(16)
|
||||
client_secret = secrets.token_urlsafe(40)
|
||||
|
||||
return cls.objects.create(
|
||||
name=client_name,
|
||||
website=website,
|
||||
client_id=client_id,
|
||||
client_secret=client_secret,
|
||||
redirect_uris=redirect_uris,
|
||||
scopes=scopes or "read",
|
||||
)
|
||||
|
|
44
api/models/authorization.py
Normal file
44
api/models/authorization.py
Normal file
|
@ -0,0 +1,44 @@
|
|||
from django.db import models
|
||||
|
||||
|
||||
class Authorization(models.Model):
|
||||
"""
|
||||
An authorization code as part of the OAuth flow
|
||||
"""
|
||||
|
||||
application = models.ForeignKey(
|
||||
"api.Application",
|
||||
on_delete=models.CASCADE,
|
||||
related_name="authorizations",
|
||||
)
|
||||
|
||||
user = models.ForeignKey(
|
||||
"users.User",
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="authorizations",
|
||||
)
|
||||
|
||||
identity = models.ForeignKey(
|
||||
"users.Identity",
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="authorizations",
|
||||
)
|
||||
|
||||
code = models.CharField(max_length=128, blank=True, null=True, unique=True)
|
||||
token = models.OneToOneField(
|
||||
"api.Token",
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=models.CASCADE,
|
||||
)
|
||||
|
||||
scopes = models.JSONField()
|
||||
redirect_uri = models.TextField(blank=True, null=True)
|
||||
valid_for_seconds = models.IntegerField(default=60)
|
||||
|
||||
created = models.DateTimeField(auto_now_add=True)
|
||||
updated = models.DateTimeField(auto_now=True)
|
|
@ -1,4 +1,21 @@
|
|||
import urlman
|
||||
from django.db import models
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class PushSubscriptionSchema(BaseModel):
|
||||
"""
|
||||
Basic validating schema for push data
|
||||
"""
|
||||
|
||||
class Keys(BaseModel):
|
||||
p256dh: str
|
||||
auth: str
|
||||
|
||||
endpoint: str
|
||||
keys: Keys
|
||||
alerts: dict[str, bool]
|
||||
policy: str
|
||||
|
||||
|
||||
class Token(models.Model):
|
||||
|
@ -30,10 +47,28 @@ class Token(models.Model):
|
|||
related_name="tokens",
|
||||
)
|
||||
|
||||
token = models.CharField(max_length=500)
|
||||
code = models.CharField(max_length=100, blank=True, null=True)
|
||||
|
||||
token = models.CharField(max_length=500, unique=True)
|
||||
scopes = models.JSONField()
|
||||
|
||||
created = models.DateTimeField(auto_now_add=True)
|
||||
updated = models.DateTimeField(auto_now=True)
|
||||
revoked = models.DateTimeField(blank=True, null=True)
|
||||
|
||||
push_subscription = models.JSONField(blank=True, null=True)
|
||||
|
||||
class urls(urlman.Urls):
|
||||
edit = "/@{self.identity.handle}/settings/tokens/{self.id}/"
|
||||
|
||||
def has_scope(self, scope: str):
|
||||
"""
|
||||
Returns if this token has the given scope.
|
||||
It's a function so we can do mapping/reduction if needed
|
||||
"""
|
||||
# TODO: Support granular scopes the other way?
|
||||
scope_prefix = scope.split(":")[0]
|
||||
return (scope in self.scopes) or (scope_prefix in self.scopes)
|
||||
|
||||
def set_push_subscription(self, data: dict):
|
||||
# Validate schema and assign
|
||||
self.push_subscription = PushSubscriptionSchema(**data).dict()
|
||||
self.save()
|
||||
|
|
|
@ -1,24 +1,106 @@
|
|||
import dataclasses
|
||||
import urllib.parse
|
||||
from collections.abc import Callable
|
||||
from typing import Any, Generic, Protocol, TypeVar
|
||||
|
||||
from django.db import models
|
||||
from django.db.models.expressions import Case, F, When
|
||||
from django.http import HttpRequest
|
||||
from hatchway.http import ApiResponse
|
||||
|
||||
from activities.models import PostInteraction
|
||||
from activities.models import PostInteraction, TimelineEvent
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
class SchemaWithId(Protocol):
|
||||
"""
|
||||
Little protocol type to represent schemas that have an ID attribute
|
||||
"""
|
||||
|
||||
id: str
|
||||
|
||||
|
||||
TI = TypeVar("TI", bound=SchemaWithId)
|
||||
TM = TypeVar("TM", bound=models.Model)
|
||||
|
||||
|
||||
class PaginatingApiResponse(ApiResponse[list[TI]]):
|
||||
"""
|
||||
An ApiResponse subclass that also handles pagination link headers
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
data: list[TI],
|
||||
request: HttpRequest,
|
||||
include_params: list[str],
|
||||
**kwargs,
|
||||
):
|
||||
# Call superclass
|
||||
super().__init__(data, **kwargs)
|
||||
# Figure out if we need link headers
|
||||
self._request = request
|
||||
self.extra_params = self.filter_params(self._request, include_params)
|
||||
link_header = self.build_link_header()
|
||||
if link_header:
|
||||
self.headers["link"] = link_header
|
||||
|
||||
@staticmethod
|
||||
def filter_params(request: HttpRequest, allowed_params: list[str]):
|
||||
params = {}
|
||||
for key in allowed_params:
|
||||
value = request.GET.get(key, None)
|
||||
if value:
|
||||
params[key] = value
|
||||
return params
|
||||
|
||||
def get_part(self, data_index: int, param_name: str, rel: str) -> str | None:
|
||||
"""
|
||||
Used to get next/prev URLs
|
||||
"""
|
||||
if not self.data:
|
||||
return None
|
||||
# Use the ID of the last object for the next page start
|
||||
params = dict(self.extra_params)
|
||||
params[param_name] = self.data[data_index].id
|
||||
return (
|
||||
"<"
|
||||
+ self._request.build_absolute_uri(self._request.path)
|
||||
+ "?"
|
||||
+ urllib.parse.urlencode(params)
|
||||
+ f'>; rel="{rel}"'
|
||||
)
|
||||
|
||||
def build_link_header(self):
|
||||
parts = [
|
||||
entry
|
||||
for entry in [
|
||||
self.get_part(-1, "max_id", "next"),
|
||||
self.get_part(0, "min_id", "prev"),
|
||||
]
|
||||
if entry
|
||||
]
|
||||
if not parts:
|
||||
return None
|
||||
return ", ".join(parts)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class PaginationResult:
|
||||
class PaginationResult(Generic[T]):
|
||||
"""
|
||||
Represents a pagination result for Mastodon (it does Link header stuff)
|
||||
"""
|
||||
|
||||
#: A list of objects that matched the pagination query.
|
||||
results: list[models.Model]
|
||||
results: list[T]
|
||||
|
||||
#: The actual applied limit, which may be different from what was requested.
|
||||
limit: int
|
||||
|
||||
#: A list of transformed JSON objects
|
||||
json_results: list[dict] | None = None
|
||||
|
||||
@classmethod
|
||||
def empty(cls):
|
||||
return cls(results=[], limit=20)
|
||||
|
@ -29,9 +111,10 @@ class PaginationResult:
|
|||
"""
|
||||
if not self.results:
|
||||
return None
|
||||
|
||||
if self.json_results is None:
|
||||
raise ValueError("You must JSONify the results first")
|
||||
params = self.filter_params(request, allowed_params)
|
||||
params["max_id"] = self.results[-1].pk
|
||||
params["max_id"] = self.json_results[-1]["id"]
|
||||
|
||||
return f"{request.build_absolute_uri(request.path)}?{urllib.parse.urlencode(params)}"
|
||||
|
||||
|
@ -41,9 +124,10 @@ class PaginationResult:
|
|||
"""
|
||||
if not self.results:
|
||||
return None
|
||||
|
||||
if self.json_results is None:
|
||||
raise ValueError("You must JSONify the results first")
|
||||
params = self.filter_params(request, allowed_params)
|
||||
params["min_id"] = self.results[0].pk
|
||||
params["min_id"] = self.json_results[0]["id"]
|
||||
|
||||
return f"{request.build_absolute_uri(request.path)}?{urllib.parse.urlencode(params)}"
|
||||
|
||||
|
@ -58,6 +142,49 @@ class PaginationResult:
|
|||
)
|
||||
)
|
||||
|
||||
def jsonify_results(self, map_function: Callable[[Any], Any]):
|
||||
"""
|
||||
Replaces our results with ones transformed via map_function
|
||||
"""
|
||||
self.json_results = [map_function(result) for result in self.results]
|
||||
|
||||
def jsonify_posts(self, identity):
|
||||
"""
|
||||
Predefined way of JSON-ifying Post objects
|
||||
"""
|
||||
interactions = PostInteraction.get_post_interactions(self.results, identity)
|
||||
self.jsonify_results(
|
||||
lambda post: post.to_mastodon_json(
|
||||
interactions=interactions, identity=identity
|
||||
)
|
||||
)
|
||||
|
||||
def jsonify_status_events(self, identity):
|
||||
"""
|
||||
Predefined way of JSON-ifying TimelineEvent objects representing statuses
|
||||
"""
|
||||
interactions = PostInteraction.get_event_interactions(self.results, identity)
|
||||
self.jsonify_results(
|
||||
lambda event: event.to_mastodon_status_json(
|
||||
interactions=interactions, identity=identity
|
||||
)
|
||||
)
|
||||
|
||||
def jsonify_notification_events(self, identity):
|
||||
"""
|
||||
Predefined way of JSON-ifying TimelineEvent objects representing notifications
|
||||
"""
|
||||
interactions = PostInteraction.get_event_interactions(self.results, identity)
|
||||
self.jsonify_results(
|
||||
lambda event: event.to_mastodon_notification_json(interactions=interactions)
|
||||
)
|
||||
|
||||
def jsonify_identities(self):
|
||||
"""
|
||||
Predefined way of JSON-ifying Identity objects
|
||||
"""
|
||||
self.jsonify_results(lambda identity: identity.to_mastodon_json())
|
||||
|
||||
@staticmethod
|
||||
def filter_params(request: HttpRequest, allowed_params: list[str]):
|
||||
params = {}
|
||||
|
@ -71,68 +198,63 @@ class PaginationResult:
|
|||
class MastodonPaginator:
|
||||
"""
|
||||
Paginates in the Mastodon style (max_id, min_id, etc).
|
||||
Note that this basically _requires_ us to always do it on IDs, so we do.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
anchor_model: type[models.Model],
|
||||
sort_attribute: str = "created",
|
||||
default_limit: int = 20,
|
||||
max_limit: int = 40,
|
||||
):
|
||||
self.anchor_model = anchor_model
|
||||
self.sort_attribute = sort_attribute
|
||||
self.default_limit = default_limit
|
||||
self.max_limit = max_limit
|
||||
|
||||
def get_anchor(self, anchor_id: str):
|
||||
"""
|
||||
Gets an anchor object by ID.
|
||||
It's possible that the anchor object might be an interaction, in which
|
||||
case we recurse down to its post.
|
||||
"""
|
||||
if anchor_id.startswith("interaction-"):
|
||||
try:
|
||||
return PostInteraction.objects.get(pk=anchor_id[12:])
|
||||
except PostInteraction.DoesNotExist:
|
||||
return PaginationResult.empty()
|
||||
try:
|
||||
return self.anchor_model.objects.get(pk=anchor_id)
|
||||
except self.anchor_model.DoesNotExist:
|
||||
return PaginationResult.empty()
|
||||
|
||||
def paginate(
|
||||
self,
|
||||
queryset,
|
||||
queryset: models.QuerySet[TM],
|
||||
min_id: str | None,
|
||||
max_id: str | None,
|
||||
since_id: str | None,
|
||||
limit: int | None,
|
||||
) -> PaginationResult:
|
||||
if max_id:
|
||||
anchor = self.get_anchor(max_id)
|
||||
queryset = queryset.filter(
|
||||
**{self.sort_attribute + "__lt": getattr(anchor, self.sort_attribute)}
|
||||
home: bool = False,
|
||||
) -> PaginationResult[TM]:
|
||||
limit = min(limit or self.default_limit, self.max_limit)
|
||||
filters = {}
|
||||
id_field = "id"
|
||||
reverse = False
|
||||
if home:
|
||||
# The home timeline interleaves Post IDs and PostInteraction IDs in an
|
||||
# annotated field called "subject_id".
|
||||
id_field = "subject_id"
|
||||
queryset = queryset.annotate(
|
||||
subject_id=Case(
|
||||
When(type=TimelineEvent.Types.post, then=F("subject_post_id")),
|
||||
default=F("subject_post_interaction"),
|
||||
)
|
||||
)
|
||||
|
||||
if since_id:
|
||||
anchor = self.get_anchor(since_id)
|
||||
queryset = queryset.filter(
|
||||
**{self.sort_attribute + "__gt": getattr(anchor, self.sort_attribute)}
|
||||
)
|
||||
|
||||
if min_id:
|
||||
# These "does not start with interaction" checks can be removed after a
|
||||
# couple months, when clients have flushed them out.
|
||||
if max_id and not max_id.startswith("interaction"):
|
||||
filters[f"{id_field}__lt"] = max_id
|
||||
if since_id and not since_id.startswith("interaction"):
|
||||
filters[f"{id_field}__gt"] = since_id
|
||||
if min_id and not min_id.startswith("interaction"):
|
||||
# Min ID requires items _immediately_ newer than specified, so we
|
||||
# invert the ordering to accommodate
|
||||
anchor = self.get_anchor(min_id)
|
||||
queryset = queryset.filter(
|
||||
**{self.sort_attribute + "__gt": getattr(anchor, self.sort_attribute)}
|
||||
).order_by(self.sort_attribute)
|
||||
else:
|
||||
queryset = queryset.order_by("-" + self.sort_attribute)
|
||||
filters[f"{id_field}__gt"] = min_id
|
||||
reverse = True
|
||||
|
||||
# Default is to order by ID descending (newest first), except for min_id
|
||||
# queries, which should order by ID for limiting, then reverse the results to be
|
||||
# consistent. The clearest explanation of this I've found so far is this:
|
||||
# https://mastodon.social/@Gargron/100846335353411164
|
||||
ordering = id_field if reverse else f"-{id_field}"
|
||||
results = list(queryset.filter(**filters).order_by(ordering)[:limit])
|
||||
if reverse:
|
||||
results.reverse()
|
||||
|
||||
limit = min(limit or self.default_limit, self.max_limit)
|
||||
return PaginationResult(
|
||||
results=list(queryset[:limit]),
|
||||
results=results,
|
||||
limit=limit,
|
||||
)
|
||||
|
|
|
@ -1,20 +0,0 @@
|
|||
import json
|
||||
|
||||
from ninja.parser import Parser
|
||||
|
||||
|
||||
class FormOrJsonParser(Parser):
|
||||
"""
|
||||
If there's form data in a request, makes it into a JSON dict.
|
||||
This is needed as the Mastodon API allows form data OR json body as input.
|
||||
"""
|
||||
|
||||
def parse_body(self, request):
|
||||
# Did they submit JSON?
|
||||
if request.content_type == "application/json" and request.body.strip():
|
||||
return json.loads(request.body)
|
||||
# Fall back to form data
|
||||
value = {}
|
||||
for key, item in request.POST.items():
|
||||
value[key] = item
|
||||
return value
|
349
api/schemas.py
349
api/schemas.py
|
@ -1,6 +1,13 @@
|
|||
from typing import Literal, Optional, Union
|
||||
|
||||
from ninja import Field, Schema
|
||||
from django.conf import settings
|
||||
from hatchway import Field, Schema
|
||||
|
||||
from activities import models as activities_models
|
||||
from api import models as api_models
|
||||
from core.html import FediverseHtmlParser
|
||||
from users import models as users_models
|
||||
from users.services import IdentityService
|
||||
|
||||
|
||||
class Application(Schema):
|
||||
|
@ -10,6 +17,23 @@ class Application(Schema):
|
|||
client_id: str
|
||||
client_secret: str
|
||||
redirect_uri: str = Field(alias="redirect_uris")
|
||||
vapid_key: str | None
|
||||
|
||||
@classmethod
|
||||
def from_application(cls, application: api_models.Application) -> "Application":
|
||||
instance = cls.from_orm(application)
|
||||
instance.vapid_key = settings.SETUP.VAPID_PUBLIC_KEY
|
||||
return instance
|
||||
|
||||
@classmethod
|
||||
def from_application_no_keys(
|
||||
cls, application: api_models.Application
|
||||
) -> "Application":
|
||||
instance = cls.from_orm(application)
|
||||
instance.vapid_key = settings.SETUP.VAPID_PUBLIC_KEY
|
||||
instance.client_id = ""
|
||||
instance.client_secret = ""
|
||||
return instance
|
||||
|
||||
|
||||
class CustomEmoji(Schema):
|
||||
|
@ -19,6 +43,10 @@ class CustomEmoji(Schema):
|
|||
visible_in_picker: bool
|
||||
category: str
|
||||
|
||||
@classmethod
|
||||
def from_emoji(cls, emoji: activities_models.Emoji) -> "CustomEmoji":
|
||||
return cls(**emoji.to_mastodon_json())
|
||||
|
||||
|
||||
class AccountField(Schema):
|
||||
name: str
|
||||
|
@ -51,6 +79,18 @@ class Account(Schema):
|
|||
statuses_count: int
|
||||
followers_count: int
|
||||
following_count: int
|
||||
source: dict | None
|
||||
|
||||
@classmethod
|
||||
def from_identity(
|
||||
cls,
|
||||
identity: users_models.Identity,
|
||||
include_counts: bool = True,
|
||||
source=False,
|
||||
) -> "Account":
|
||||
return cls(
|
||||
**identity.to_mastodon_json(include_counts=include_counts, source=source)
|
||||
)
|
||||
|
||||
|
||||
class MediaAttachment(Schema):
|
||||
|
@ -63,6 +103,38 @@ class MediaAttachment(Schema):
|
|||
description: str | None
|
||||
blurhash: str | None
|
||||
|
||||
@classmethod
|
||||
def from_post_attachment(
|
||||
cls, attachment: activities_models.PostAttachment
|
||||
) -> "MediaAttachment":
|
||||
return cls(**attachment.to_mastodon_json())
|
||||
|
||||
|
||||
class PollOptions(Schema):
|
||||
title: str
|
||||
votes_count: int | None
|
||||
|
||||
|
||||
class Poll(Schema):
|
||||
id: str
|
||||
expires_at: str | None
|
||||
expired: bool
|
||||
multiple: bool
|
||||
votes_count: int
|
||||
voters_count: int | None
|
||||
voted: bool
|
||||
own_votes: list[int]
|
||||
options: list[PollOptions]
|
||||
emojis: list[CustomEmoji]
|
||||
|
||||
@classmethod
|
||||
def from_post(
|
||||
cls,
|
||||
post: activities_models.Post,
|
||||
identity: users_models.Identity | None = None,
|
||||
) -> "Poll":
|
||||
return cls(**post.type_data.to_mastodon_json(post, identity=identity))
|
||||
|
||||
|
||||
class StatusMention(Schema):
|
||||
id: str
|
||||
|
@ -96,16 +168,99 @@ class Status(Schema):
|
|||
in_reply_to_id: str | None = Field(...)
|
||||
in_reply_to_account_id: str | None = Field(...)
|
||||
reblog: Optional["Status"] = Field(...)
|
||||
poll: None = Field(...)
|
||||
poll: Poll | None = Field(...)
|
||||
card: None = Field(...)
|
||||
language: None = Field(...)
|
||||
text: str | None = Field(...)
|
||||
edited_at: str | None
|
||||
favourited: bool | None
|
||||
reblogged: bool | None
|
||||
muted: bool | None
|
||||
bookmarked: bool | None
|
||||
pinned: bool | None
|
||||
favourited: bool = False
|
||||
reblogged: bool = False
|
||||
muted: bool = False
|
||||
bookmarked: bool = False
|
||||
pinned: bool = False
|
||||
|
||||
@classmethod
|
||||
def from_post(
|
||||
cls,
|
||||
post: activities_models.Post,
|
||||
interactions: dict[str, set[str]] | None = None,
|
||||
bookmarks: set[str] | None = None,
|
||||
identity: users_models.Identity | None = None,
|
||||
) -> "Status":
|
||||
return cls(
|
||||
**post.to_mastodon_json(
|
||||
interactions=interactions,
|
||||
bookmarks=bookmarks,
|
||||
identity=identity,
|
||||
)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def map_from_post(
|
||||
cls,
|
||||
posts: list[activities_models.Post],
|
||||
identity: users_models.Identity,
|
||||
) -> list["Status"]:
|
||||
interactions = activities_models.PostInteraction.get_post_interactions(
|
||||
posts, identity
|
||||
)
|
||||
bookmarks = users_models.Bookmark.for_identity(identity, posts)
|
||||
return [
|
||||
cls.from_post(
|
||||
post,
|
||||
interactions=interactions,
|
||||
bookmarks=bookmarks,
|
||||
identity=identity,
|
||||
)
|
||||
for post in posts
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def from_timeline_event(
|
||||
cls,
|
||||
timeline_event: activities_models.TimelineEvent,
|
||||
interactions: dict[str, set[str]] | None = None,
|
||||
bookmarks: set[str] | None = None,
|
||||
identity: users_models.Identity | None = None,
|
||||
) -> "Status":
|
||||
return cls(
|
||||
**timeline_event.to_mastodon_status_json(
|
||||
interactions=interactions, bookmarks=bookmarks, identity=identity
|
||||
)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def map_from_timeline_event(
|
||||
cls,
|
||||
events: list[activities_models.TimelineEvent],
|
||||
identity: users_models.Identity,
|
||||
) -> list["Status"]:
|
||||
interactions = activities_models.PostInteraction.get_event_interactions(
|
||||
events, identity
|
||||
)
|
||||
bookmarks = users_models.Bookmark.for_identity(
|
||||
identity, events, "subject_post_id"
|
||||
)
|
||||
return [
|
||||
cls.from_timeline_event(
|
||||
event, interactions=interactions, bookmarks=bookmarks, identity=identity
|
||||
)
|
||||
for event in events
|
||||
]
|
||||
|
||||
|
||||
class StatusSource(Schema):
|
||||
id: str
|
||||
text: str
|
||||
spoiler_text: str
|
||||
|
||||
@classmethod
|
||||
def from_post(cls, post: activities_models.Post):
|
||||
return cls(
|
||||
id=post.id,
|
||||
text=FediverseHtmlParser(post.content).plain_text,
|
||||
spoiler_text=post.summary or "",
|
||||
)
|
||||
|
||||
|
||||
class Conversation(Schema):
|
||||
|
@ -133,11 +288,54 @@ class Notification(Schema):
|
|||
account: Account
|
||||
status: Status | None
|
||||
|
||||
@classmethod
|
||||
def from_timeline_event(
|
||||
cls,
|
||||
event: activities_models.TimelineEvent,
|
||||
interactions=None,
|
||||
) -> "Notification":
|
||||
return cls(**event.to_mastodon_notification_json(interactions=interactions))
|
||||
|
||||
|
||||
class Tag(Schema):
|
||||
name: str
|
||||
url: str
|
||||
history: dict
|
||||
history: list
|
||||
following: bool | None
|
||||
|
||||
@classmethod
|
||||
def from_hashtag(
|
||||
cls,
|
||||
hashtag: activities_models.Hashtag,
|
||||
following: bool | None = None,
|
||||
) -> "Tag":
|
||||
return cls(**hashtag.to_mastodon_json(following=following))
|
||||
|
||||
|
||||
class FollowedTag(Tag):
|
||||
id: str
|
||||
|
||||
@classmethod
|
||||
def from_follow(
|
||||
cls,
|
||||
follow: users_models.HashtagFollow,
|
||||
) -> "FollowedTag":
|
||||
return cls(id=follow.id, **follow.hashtag.to_mastodon_json(following=True))
|
||||
|
||||
@classmethod
|
||||
def map_from_follows(
|
||||
cls,
|
||||
hashtag_follows: list[users_models.HashtagFollow],
|
||||
) -> list["Tag"]:
|
||||
return [cls.from_follow(follow) for follow in hashtag_follows]
|
||||
|
||||
|
||||
class FeaturedTag(Schema):
|
||||
id: str
|
||||
name: str
|
||||
url: str
|
||||
statuses_count: int
|
||||
last_status_at: str
|
||||
|
||||
|
||||
class Search(Schema):
|
||||
|
@ -161,6 +359,16 @@ class Relationship(Schema):
|
|||
endorsed: bool
|
||||
note: str
|
||||
|
||||
@classmethod
|
||||
def from_identity_pair(
|
||||
cls,
|
||||
identity: users_models.Identity,
|
||||
from_identity: users_models.Identity,
|
||||
) -> "Relationship":
|
||||
return cls(
|
||||
**IdentityService(identity).mastodon_json_relationship(from_identity)
|
||||
)
|
||||
|
||||
|
||||
class Context(Schema):
|
||||
ancestors: list[Status]
|
||||
|
@ -170,3 +378,128 @@ class Context(Schema):
|
|||
class FamiliarFollowers(Schema):
|
||||
id: str
|
||||
accounts: list[Account]
|
||||
|
||||
|
||||
class Announcement(Schema):
|
||||
id: str
|
||||
content: str
|
||||
starts_at: str | None = Field(...)
|
||||
ends_at: str | None = Field(...)
|
||||
all_day: bool
|
||||
published_at: str
|
||||
updated_at: str
|
||||
read: bool | None # Only missing for anonymous responses
|
||||
mentions: list[Account]
|
||||
statuses: list[Status]
|
||||
tags: list[Tag]
|
||||
emojis: list[CustomEmoji]
|
||||
reactions: list
|
||||
|
||||
@classmethod
|
||||
def from_announcement(
|
||||
cls,
|
||||
announcement: users_models.Announcement,
|
||||
user: users_models.User,
|
||||
) -> "Announcement":
|
||||
return cls(**announcement.to_mastodon_json(user=user))
|
||||
|
||||
|
||||
class List(Schema):
|
||||
id: str
|
||||
title: str
|
||||
replies_policy: Literal[
|
||||
"followed",
|
||||
"list",
|
||||
"none",
|
||||
]
|
||||
|
||||
|
||||
class Preferences(Schema):
|
||||
posting_default_visibility: Literal[
|
||||
"public",
|
||||
"unlisted",
|
||||
"private",
|
||||
"direct",
|
||||
] = Field(alias="posting:default:visibility")
|
||||
posting_default_sensitive: bool = Field(alias="posting:default:sensitive")
|
||||
posting_default_language: str | None = Field(alias="posting:default:language")
|
||||
reading_expand_media: Literal[
|
||||
"default",
|
||||
"show_all",
|
||||
"hide_all",
|
||||
] = Field(alias="reading:expand:media")
|
||||
reading_expand_spoilers: bool = Field(alias="reading:expand:spoilers")
|
||||
|
||||
@classmethod
|
||||
def from_identity(
|
||||
cls,
|
||||
identity: users_models.Identity,
|
||||
) -> "Preferences":
|
||||
visibility_mapping = {
|
||||
activities_models.Post.Visibilities.public: "public",
|
||||
activities_models.Post.Visibilities.unlisted: "unlisted",
|
||||
activities_models.Post.Visibilities.followers: "private",
|
||||
activities_models.Post.Visibilities.mentioned: "direct",
|
||||
activities_models.Post.Visibilities.local_only: "public",
|
||||
}
|
||||
return cls.parse_obj(
|
||||
{
|
||||
"posting:default:visibility": visibility_mapping[
|
||||
identity.config_identity.default_post_visibility
|
||||
],
|
||||
"posting:default:sensitive": False,
|
||||
"posting:default:language": None,
|
||||
"reading:expand:media": "default",
|
||||
"reading:expand:spoilers": identity.config_identity.expand_content_warnings,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class PushSubscriptionKeys(Schema):
|
||||
p256dh: str
|
||||
auth: str
|
||||
|
||||
|
||||
class PushSubscriptionCreation(Schema):
|
||||
endpoint: str
|
||||
keys: PushSubscriptionKeys
|
||||
|
||||
|
||||
class PushDataAlerts(Schema):
|
||||
mention: bool = False
|
||||
status: bool = False
|
||||
reblog: bool = False
|
||||
follow: bool = False
|
||||
follow_request: bool = False
|
||||
favourite: bool = False
|
||||
poll: bool = False
|
||||
update: bool = False
|
||||
admin_sign_up: bool = Field(False, alias="admin.sign_up")
|
||||
admin_report: bool = Field(False, alias="admin.report")
|
||||
|
||||
|
||||
class PushData(Schema):
|
||||
alerts: PushDataAlerts
|
||||
policy: Literal["all", "followed", "follower", "none"] = "all"
|
||||
|
||||
|
||||
class PushSubscription(Schema):
|
||||
id: str
|
||||
endpoint: str
|
||||
alerts: PushDataAlerts
|
||||
policy: str
|
||||
server_key: str
|
||||
|
||||
@classmethod
|
||||
def from_token(
|
||||
cls,
|
||||
token: api_models.Token,
|
||||
) -> Optional["PushSubscription"]:
|
||||
value = token.push_subscription
|
||||
if value:
|
||||
value["id"] = "1"
|
||||
value["server_key"] = settings.SETUP.VAPID_PUBLIC_KEY
|
||||
del value["keys"]
|
||||
return value
|
||||
else:
|
||||
return None
|
||||
|
|
136
api/urls.py
Normal file
136
api/urls.py
Normal file
|
@ -0,0 +1,136 @@
|
|||
from django.urls import path
|
||||
from hatchway import methods
|
||||
|
||||
from api.views import (
|
||||
accounts,
|
||||
announcements,
|
||||
apps,
|
||||
bookmarks,
|
||||
emoji,
|
||||
filters,
|
||||
follow_requests,
|
||||
instance,
|
||||
lists,
|
||||
media,
|
||||
notifications,
|
||||
polls,
|
||||
preferences,
|
||||
push,
|
||||
search,
|
||||
statuses,
|
||||
suggestions,
|
||||
tags,
|
||||
timelines,
|
||||
trends,
|
||||
)
|
||||
|
||||
urlpatterns = [
|
||||
# Accounts
|
||||
path("v1/accounts/verify_credentials", accounts.verify_credentials),
|
||||
path("v1/accounts/update_credentials", accounts.update_credentials),
|
||||
path("v1/accounts/relationships", accounts.account_relationships),
|
||||
path("v1/accounts/familiar_followers", accounts.familiar_followers),
|
||||
path("v1/accounts/search", accounts.accounts_search),
|
||||
path("v1/accounts/lookup", accounts.lookup),
|
||||
path("v1/accounts/<id>", accounts.account),
|
||||
path("v1/accounts/<id>/statuses", accounts.account_statuses),
|
||||
path("v1/accounts/<id>/follow", accounts.account_follow),
|
||||
path("v1/accounts/<id>/unfollow", accounts.account_unfollow),
|
||||
path("v1/accounts/<id>/block", accounts.account_block),
|
||||
path("v1/accounts/<id>/unblock", accounts.account_unblock),
|
||||
path("v1/accounts/<id>/mute", accounts.account_mute),
|
||||
path("v1/accounts/<id>/unmute", accounts.account_unmute),
|
||||
path("v1/accounts/<id>/following", accounts.account_following),
|
||||
path("v1/accounts/<id>/followers", accounts.account_followers),
|
||||
path("v1/accounts/<id>/featured_tags", accounts.account_featured_tags),
|
||||
# Announcements
|
||||
path("v1/announcements", announcements.announcement_list),
|
||||
path("v1/announcements/<pk>/dismiss", announcements.announcement_dismiss),
|
||||
# Apps
|
||||
path("v1/apps", apps.add_app),
|
||||
path("v1/apps/verify_credentials", apps.verify_credentials),
|
||||
# Bookmarks
|
||||
path("v1/bookmarks", bookmarks.bookmarks),
|
||||
# Emoji
|
||||
path("v1/custom_emojis", emoji.emojis),
|
||||
# Filters
|
||||
path("v2/filters", filters.list_filters),
|
||||
path("v1/filters", filters.list_filters),
|
||||
# Follow requests
|
||||
path("v1/follow_requests", follow_requests.follow_requests),
|
||||
path("v1/follow_requests/<id>/authorize", follow_requests.accept_follow_request),
|
||||
path("v1/follow_requests/<id>/reject", follow_requests.reject_follow_request),
|
||||
# Instance
|
||||
path("v1/instance", instance.instance_info_v1),
|
||||
path("v1/instance/activity", instance.activity),
|
||||
path("v1/instance/peers", instance.peers),
|
||||
path("v2/instance", instance.instance_info_v2),
|
||||
# Lists
|
||||
path("v1/lists", lists.get_lists),
|
||||
# Media
|
||||
path("v1/media", media.upload_media),
|
||||
path("v2/media", media.upload_media),
|
||||
path("v1/media/<id>", methods(get=media.get_media, put=media.update_media)),
|
||||
path(
|
||||
"v1/statuses/<id>",
|
||||
methods(
|
||||
get=statuses.status,
|
||||
put=statuses.edit_status,
|
||||
delete=statuses.delete_status,
|
||||
),
|
||||
),
|
||||
path("v1/statuses/<id>/source", statuses.status_source),
|
||||
# Notifications
|
||||
path("v1/notifications", notifications.notifications),
|
||||
path("v1/notifications/clear", notifications.dismiss_notifications),
|
||||
path("v1/notifications/<id>", notifications.get_notification),
|
||||
path("v1/notifications/<id>/dismiss", notifications.dismiss_notification),
|
||||
# Polls
|
||||
path("v1/polls/<id>", polls.get_poll),
|
||||
path("v1/polls/<id>/votes", polls.vote_poll),
|
||||
# Preferences
|
||||
path("v1/preferences", preferences.preferences),
|
||||
# Push
|
||||
path(
|
||||
"v1/push/subscription",
|
||||
methods(
|
||||
get=push.get_subscription,
|
||||
post=push.create_subscription,
|
||||
put=push.update_subscription,
|
||||
delete=push.delete_subscription,
|
||||
),
|
||||
),
|
||||
# Search
|
||||
path("v1/search", search.search),
|
||||
path("v2/search", search.search),
|
||||
# Statuses
|
||||
path("v1/statuses", statuses.post_status),
|
||||
path("v1/statuses/<id>/context", statuses.status_context),
|
||||
path("v1/statuses/<id>/favourite", statuses.favourite_status),
|
||||
path("v1/statuses/<id>/unfavourite", statuses.unfavourite_status),
|
||||
path("v1/statuses/<id>/favourited_by", statuses.favourited_by),
|
||||
path("v1/statuses/<id>/reblog", statuses.reblog_status),
|
||||
path("v1/statuses/<id>/unreblog", statuses.unreblog_status),
|
||||
path("v1/statuses/<id>/reblogged_by", statuses.reblogged_by),
|
||||
path("v1/statuses/<id>/bookmark", statuses.bookmark_status),
|
||||
path("v1/statuses/<id>/unbookmark", statuses.unbookmark_status),
|
||||
path("v1/statuses/<id>/pin", statuses.pin_status),
|
||||
path("v1/statuses/<id>/unpin", statuses.unpin_status),
|
||||
# Tags
|
||||
path("v1/followed_tags", tags.followed_tags),
|
||||
path("v1/tags/<hashtag>", tags.hashtag),
|
||||
path("v1/tags/<id>/follow", tags.follow),
|
||||
path("v1/tags/<id>/unfollow", tags.unfollow),
|
||||
# Timelines
|
||||
path("v1/timelines/home", timelines.home),
|
||||
path("v1/timelines/public", timelines.public),
|
||||
path("v1/timelines/tag/<hashtag>", timelines.hashtag),
|
||||
path("v1/conversations", timelines.conversations),
|
||||
path("v1/favourites", timelines.favourites),
|
||||
# Trends
|
||||
path("v1/trends/tags", trends.trends_tags),
|
||||
path("v1/trends/statuses", trends.trends_statuses),
|
||||
path("v1/trends/links", trends.trends_links),
|
||||
# Suggestions
|
||||
path("v2/suggestions", suggestions.suggested_users),
|
||||
]
|
|
@ -1,11 +0,0 @@
|
|||
from .accounts import * # noqa
|
||||
from .apps import * # noqa
|
||||
from .emoji import * # noqa
|
||||
from .filters import * # noqa
|
||||
from .instance import * # noqa
|
||||
from .media import * # noqa
|
||||
from .notifications import * # noqa
|
||||
from .oauth import * # noqa
|
||||
from .search import * # noqa
|
||||
from .statuses import * # noqa
|
||||
from .timelines import * # noqa
|
|
@ -1,74 +1,144 @@
|
|||
from django.db.models import Q
|
||||
from django.http import HttpRequest, HttpResponse, JsonResponse
|
||||
from django.shortcuts import get_object_or_404
|
||||
from ninja import Field
|
||||
from typing import Any
|
||||
|
||||
from activities.models import Post, PostInteraction
|
||||
from django.core.files import File
|
||||
from django.http import HttpRequest
|
||||
from django.shortcuts import get_object_or_404
|
||||
from hatchway import ApiResponse, QueryOrBody, api_view
|
||||
|
||||
from activities.models import Post, PostInteraction, PostInteractionStates
|
||||
from activities.services import SearchService
|
||||
from api import schemas
|
||||
from api.decorators import identity_required
|
||||
from api.pagination import MastodonPaginator
|
||||
from api.views.base import api_router
|
||||
from users.models import Identity
|
||||
from api.decorators import scope_required
|
||||
from api.pagination import MastodonPaginator, PaginatingApiResponse, PaginationResult
|
||||
from core.models import Config
|
||||
from users.models import Identity, IdentityStates
|
||||
from users.services import IdentityService
|
||||
from users.shortcuts import by_handle_or_404
|
||||
|
||||
|
||||
@api_router.get("/v1/accounts/verify_credentials", response=schemas.Account)
|
||||
@identity_required
|
||||
def verify_credentials(request):
|
||||
return request.identity.to_mastodon_json()
|
||||
@scope_required("read")
|
||||
@api_view.get
|
||||
def verify_credentials(request) -> schemas.Account:
|
||||
return schemas.Account.from_identity(request.identity, source=True)
|
||||
|
||||
|
||||
@api_router.get("/v1/accounts/relationships", response=list[schemas.Relationship])
|
||||
@identity_required
|
||||
def account_relationships(request):
|
||||
ids = request.GET.getlist("id[]")
|
||||
@scope_required("write")
|
||||
@api_view.patch
|
||||
def update_credentials(
|
||||
request,
|
||||
display_name: QueryOrBody[str | None] = None,
|
||||
note: QueryOrBody[str | None] = None,
|
||||
discoverable: QueryOrBody[bool | None] = None,
|
||||
locked: QueryOrBody[bool | None] = None,
|
||||
source: QueryOrBody[dict[str, Any] | None] = None,
|
||||
fields_attributes: QueryOrBody[dict[str, dict[str, str]] | None] = None,
|
||||
avatar: File | None = None,
|
||||
header: File | None = None,
|
||||
) -> schemas.Account:
|
||||
identity = request.identity
|
||||
service = IdentityService(identity)
|
||||
if display_name is not None:
|
||||
identity.name = display_name
|
||||
if note is not None:
|
||||
service.set_summary(note)
|
||||
if discoverable is not None:
|
||||
identity.discoverable = discoverable
|
||||
if locked is not None:
|
||||
identity.manually_approves_followers = locked
|
||||
if source:
|
||||
if "privacy" in source:
|
||||
privacy_map = {
|
||||
"public": Post.Visibilities.public,
|
||||
"unlisted": Post.Visibilities.unlisted,
|
||||
"private": Post.Visibilities.followers,
|
||||
"direct": Post.Visibilities.mentioned,
|
||||
}
|
||||
Config.set_identity(
|
||||
identity,
|
||||
"default_post_visibility",
|
||||
privacy_map[source["privacy"]],
|
||||
)
|
||||
if fields_attributes:
|
||||
identity.metadata = []
|
||||
for attribute in fields_attributes.values():
|
||||
attr_name = attribute.get("name", None)
|
||||
attr_value = attribute.get("value", None)
|
||||
if attr_name:
|
||||
# Empty value means delete this item
|
||||
if not attr_value:
|
||||
break
|
||||
identity.metadata.append({"name": attr_name, "value": attr_value})
|
||||
if avatar:
|
||||
service.set_icon(avatar)
|
||||
if header:
|
||||
service.set_image(header)
|
||||
identity.save()
|
||||
identity.transition_perform(IdentityStates.edited)
|
||||
return schemas.Account.from_identity(identity, source=True)
|
||||
|
||||
|
||||
@scope_required("read")
|
||||
@api_view.get
|
||||
def account_relationships(
|
||||
request, id: list[str] | str | None
|
||||
) -> list[schemas.Relationship]:
|
||||
result = []
|
||||
for id in ids:
|
||||
identity = get_object_or_404(Identity, pk=id)
|
||||
if isinstance(id, str):
|
||||
ids = [id]
|
||||
elif id is None:
|
||||
ids = []
|
||||
else:
|
||||
ids = id
|
||||
for actual_id in ids:
|
||||
identity = get_object_or_404(Identity, pk=actual_id)
|
||||
result.append(
|
||||
IdentityService(identity).mastodon_json_relationship(request.identity)
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
@api_router.get(
|
||||
"/v1/accounts/familiar_followers", response=list[schemas.FamiliarFollowers]
|
||||
)
|
||||
@identity_required
|
||||
def familiar_followers(request):
|
||||
@scope_required("read")
|
||||
@api_view.get
|
||||
def familiar_followers(
|
||||
request, id: list[str] | str | None
|
||||
) -> list[schemas.FamiliarFollowers]:
|
||||
"""
|
||||
Returns people you follow that also follow given account IDs
|
||||
"""
|
||||
ids = request.GET.getlist("id[]")
|
||||
if isinstance(id, str):
|
||||
ids = [id]
|
||||
elif id is None:
|
||||
ids = []
|
||||
else:
|
||||
ids = id
|
||||
result = []
|
||||
for id in ids:
|
||||
target_identity = get_object_or_404(Identity, pk=id)
|
||||
for actual_id in ids:
|
||||
target_identity = get_object_or_404(Identity, pk=actual_id)
|
||||
result.append(
|
||||
{
|
||||
"id": id,
|
||||
"accounts": [
|
||||
identity.to_mastodon_json()
|
||||
schemas.FamiliarFollowers(
|
||||
id=actual_id,
|
||||
accounts=[
|
||||
schemas.Account.from_identity(identity)
|
||||
for identity in Identity.objects.filter(
|
||||
inbound_follows__source=request.identity,
|
||||
outbound_follows__target=target_identity,
|
||||
)[:20]
|
||||
],
|
||||
}
|
||||
)
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
@api_router.get("/v1/accounts/search", response=list[schemas.Account])
|
||||
@identity_required
|
||||
def search(
|
||||
@scope_required("read")
|
||||
@api_view.get
|
||||
def accounts_search(
|
||||
request,
|
||||
q: str,
|
||||
fetch_identities: bool = Field(False, alias="resolve"),
|
||||
resolve: bool = False,
|
||||
following: bool = False,
|
||||
limit: int = 20,
|
||||
offset: int = 0,
|
||||
):
|
||||
) -> list[schemas.Account]:
|
||||
"""
|
||||
Handles searching for accounts by username or handle
|
||||
"""
|
||||
|
@ -78,44 +148,33 @@ def search(
|
|||
return []
|
||||
searcher = SearchService(q, request.identity)
|
||||
search_result = searcher.search_identities_handle()
|
||||
return [i.to_mastodon_json() for i in search_result]
|
||||
return [schemas.Account.from_identity(i) for i in search_result]
|
||||
|
||||
|
||||
@api_router.get("/v1/accounts/lookup", response=schemas.Account)
|
||||
def lookup(request: HttpRequest, acct: str):
|
||||
@api_view.get
|
||||
def lookup(request: HttpRequest, acct: str) -> schemas.Account:
|
||||
"""
|
||||
Quickly lookup a username to see if it is available, skipping WebFinger
|
||||
resolution.
|
||||
"""
|
||||
acct = acct.lstrip("@")
|
||||
host = request.get_host()
|
||||
|
||||
identity = Identity.objects.filter(
|
||||
Q(domain__service_domain__iexact=host) | Q(domain__domain__iexact=host),
|
||||
local=True,
|
||||
username__iexact=acct,
|
||||
).first()
|
||||
|
||||
if not identity:
|
||||
return JsonResponse({"error": "Record not found"}, status=404)
|
||||
|
||||
return identity.to_mastodon_json()
|
||||
identity = by_handle_or_404(request, handle=acct, local=False)
|
||||
return schemas.Account.from_identity(identity)
|
||||
|
||||
|
||||
@api_router.get("/v1/accounts/{id}", response=schemas.Account)
|
||||
@identity_required
|
||||
def account(request, id: str):
|
||||
@scope_required("read:accounts")
|
||||
@api_view.get
|
||||
def account(request, id: str) -> schemas.Account:
|
||||
identity = get_object_or_404(
|
||||
Identity.objects.exclude(restriction=Identity.Restriction.blocked), pk=id
|
||||
Identity.objects.exclude(restriction=Identity.Restriction.blocked),
|
||||
pk=id,
|
||||
)
|
||||
return identity.to_mastodon_json()
|
||||
return schemas.Account.from_identity(identity)
|
||||
|
||||
|
||||
@api_router.get("/v1/accounts/{id}/statuses", response=list[schemas.Status])
|
||||
@identity_required
|
||||
@scope_required("read:statuses")
|
||||
@api_view.get
|
||||
def account_statuses(
|
||||
request: HttpRequest,
|
||||
response: HttpResponse,
|
||||
id: str,
|
||||
exclude_reblogs: bool = False,
|
||||
exclude_replies: bool = False,
|
||||
|
@ -126,107 +185,191 @@ def account_statuses(
|
|||
since_id: str | None = None,
|
||||
min_id: str | None = None,
|
||||
limit: int = 20,
|
||||
):
|
||||
) -> ApiResponse[list[schemas.Status]]:
|
||||
identity = get_object_or_404(
|
||||
Identity.objects.exclude(restriction=Identity.Restriction.blocked), pk=id
|
||||
)
|
||||
queryset = (
|
||||
identity.posts.not_hidden()
|
||||
.unlisted(include_replies=not exclude_replies)
|
||||
.select_related("author")
|
||||
.prefetch_related("attachments")
|
||||
.select_related("author", "author__domain")
|
||||
.prefetch_related(
|
||||
"attachments",
|
||||
"mentions__domain",
|
||||
"emojis",
|
||||
"author__inbound_follows",
|
||||
"author__outbound_follows",
|
||||
"author__posts",
|
||||
)
|
||||
.order_by("-created")
|
||||
)
|
||||
if pinned:
|
||||
return []
|
||||
queryset = queryset.filter(
|
||||
interactions__type=PostInteraction.Types.pin,
|
||||
interactions__state__in=PostInteractionStates.group_active(),
|
||||
)
|
||||
if only_media:
|
||||
queryset = queryset.filter(attachments__pk__isnull=False)
|
||||
if tagged:
|
||||
queryset = queryset.tagged_with(tagged)
|
||||
|
||||
paginator = MastodonPaginator(Post, sort_attribute="published")
|
||||
pager = paginator.paginate(
|
||||
# Get user posts with pagination
|
||||
paginator = MastodonPaginator()
|
||||
pager: PaginationResult[Post] = paginator.paginate(
|
||||
queryset,
|
||||
min_id=min_id,
|
||||
max_id=max_id,
|
||||
since_id=since_id,
|
||||
limit=limit,
|
||||
)
|
||||
|
||||
if pager.results:
|
||||
response.headers["Link"] = pager.link_header(
|
||||
request,
|
||||
[
|
||||
"limit",
|
||||
"id",
|
||||
"exclude_reblogs",
|
||||
"exclude_replies",
|
||||
"only_media",
|
||||
"pinned",
|
||||
"tagged",
|
||||
],
|
||||
)
|
||||
|
||||
interactions = PostInteraction.get_post_interactions(
|
||||
pager.results, request.identity
|
||||
return PaginatingApiResponse(
|
||||
schemas.Status.map_from_post(pager.results, request.identity),
|
||||
request=request,
|
||||
include_params=[
|
||||
"limit",
|
||||
"id",
|
||||
"exclude_reblogs",
|
||||
"exclude_replies",
|
||||
"only_media",
|
||||
"pinned",
|
||||
"tagged",
|
||||
],
|
||||
)
|
||||
return [post.to_mastodon_json(interactions=interactions) for post in pager.results]
|
||||
|
||||
|
||||
@api_router.post("/v1/accounts/{id}/follow", response=schemas.Relationship)
|
||||
@identity_required
|
||||
def account_follow(request, id: str):
|
||||
@scope_required("write:follows")
|
||||
@api_view.post
|
||||
def account_follow(request, id: str, reblogs: bool = True) -> schemas.Relationship:
|
||||
identity = get_object_or_404(
|
||||
Identity.objects.exclude(restriction=Identity.Restriction.blocked), pk=id
|
||||
)
|
||||
service = IdentityService(identity)
|
||||
service.follow_from(request.identity)
|
||||
return service.mastodon_json_relationship(request.identity)
|
||||
service = IdentityService(request.identity)
|
||||
service.follow(identity, boosts=reblogs)
|
||||
return schemas.Relationship.from_identity_pair(identity, request.identity)
|
||||
|
||||
|
||||
@api_router.post("/v1/accounts/{id}/unfollow", response=schemas.Relationship)
|
||||
@identity_required
|
||||
def account_unfollow(request, id: str):
|
||||
@scope_required("write:follows")
|
||||
@api_view.post
|
||||
def account_unfollow(request, id: str) -> schemas.Relationship:
|
||||
identity = get_object_or_404(
|
||||
Identity.objects.exclude(restriction=Identity.Restriction.blocked), pk=id
|
||||
)
|
||||
service = IdentityService(identity)
|
||||
service.unfollow_from(request.identity)
|
||||
return service.mastodon_json_relationship(request.identity)
|
||||
service = IdentityService(request.identity)
|
||||
service.unfollow(identity)
|
||||
return schemas.Relationship.from_identity_pair(identity, request.identity)
|
||||
|
||||
|
||||
@api_router.get("/v1/accounts/{id}/following", response=list[schemas.Account])
|
||||
@scope_required("write:blocks")
|
||||
@api_view.post
|
||||
def account_block(request, id: str) -> schemas.Relationship:
|
||||
identity = get_object_or_404(Identity, pk=id)
|
||||
service = IdentityService(request.identity)
|
||||
service.block(identity)
|
||||
return schemas.Relationship.from_identity_pair(identity, request.identity)
|
||||
|
||||
|
||||
@scope_required("write:blocks")
|
||||
@api_view.post
|
||||
def account_unblock(request, id: str) -> schemas.Relationship:
|
||||
identity = get_object_or_404(Identity, pk=id)
|
||||
service = IdentityService(request.identity)
|
||||
service.unblock(identity)
|
||||
return schemas.Relationship.from_identity_pair(identity, request.identity)
|
||||
|
||||
|
||||
@scope_required("write:blocks")
|
||||
@api_view.post
|
||||
def account_mute(
|
||||
request,
|
||||
id: str,
|
||||
notifications: QueryOrBody[bool] = True,
|
||||
duration: QueryOrBody[int] = 0,
|
||||
) -> schemas.Relationship:
|
||||
identity = get_object_or_404(Identity, pk=id)
|
||||
service = IdentityService(request.identity)
|
||||
service.mute(
|
||||
identity,
|
||||
duration=duration,
|
||||
include_notifications=notifications,
|
||||
)
|
||||
return schemas.Relationship.from_identity_pair(identity, request.identity)
|
||||
|
||||
|
||||
@scope_required("write:blocks")
|
||||
@api_view.post
|
||||
def account_unmute(request, id: str) -> schemas.Relationship:
|
||||
identity = get_object_or_404(Identity, pk=id)
|
||||
service = IdentityService(request.identity)
|
||||
service.unmute(identity)
|
||||
return schemas.Relationship.from_identity_pair(identity, request.identity)
|
||||
|
||||
|
||||
@api_view.get
|
||||
def account_following(
|
||||
request: HttpRequest,
|
||||
response: HttpResponse,
|
||||
id: str,
|
||||
max_id: str | None = None,
|
||||
since_id: str | None = None,
|
||||
min_id: str | None = None,
|
||||
limit: int = 40,
|
||||
):
|
||||
) -> ApiResponse[list[schemas.Account]]:
|
||||
identity = get_object_or_404(
|
||||
Identity.objects.exclude(restriction=Identity.Restriction.blocked), pk=id
|
||||
)
|
||||
|
||||
if not identity.config_identity.visible_follows and request.identity != identity:
|
||||
return []
|
||||
return ApiResponse([])
|
||||
|
||||
service = IdentityService(identity)
|
||||
|
||||
paginator = MastodonPaginator(Identity, max_limit=80, sort_attribute="username")
|
||||
pager = paginator.paginate(
|
||||
paginator = MastodonPaginator(max_limit=80)
|
||||
pager: PaginationResult[Identity] = paginator.paginate(
|
||||
service.following(),
|
||||
min_id=min_id,
|
||||
max_id=max_id,
|
||||
since_id=since_id,
|
||||
limit=limit,
|
||||
)
|
||||
return PaginatingApiResponse(
|
||||
[schemas.Account.from_identity(i) for i in pager.results],
|
||||
request=request,
|
||||
include_params=["limit"],
|
||||
)
|
||||
|
||||
if pager.results:
|
||||
response.headers["Link"] = pager.link_header(
|
||||
request,
|
||||
["limit"],
|
||||
)
|
||||
|
||||
return [result.to_mastodon_json() for result in pager.results]
|
||||
@api_view.get
|
||||
def account_followers(
|
||||
request: HttpRequest,
|
||||
id: str,
|
||||
max_id: str | None = None,
|
||||
since_id: str | None = None,
|
||||
min_id: str | None = None,
|
||||
limit: int = 40,
|
||||
) -> ApiResponse[list[schemas.Account]]:
|
||||
identity = get_object_or_404(
|
||||
Identity.objects.exclude(restriction=Identity.Restriction.blocked), pk=id
|
||||
)
|
||||
|
||||
if not identity.config_identity.visible_follows and request.identity != identity:
|
||||
return ApiResponse([])
|
||||
|
||||
service = IdentityService(identity)
|
||||
|
||||
paginator = MastodonPaginator(max_limit=80)
|
||||
pager: PaginationResult[Identity] = paginator.paginate(
|
||||
service.followers(),
|
||||
min_id=min_id,
|
||||
max_id=max_id,
|
||||
since_id=since_id,
|
||||
limit=limit,
|
||||
)
|
||||
return PaginatingApiResponse(
|
||||
[schemas.Account.from_identity(i) for i in pager.results],
|
||||
request=request,
|
||||
include_params=["limit"],
|
||||
)
|
||||
|
||||
|
||||
@api_view.get
|
||||
def account_featured_tags(request: HttpRequest, id: str) -> list[schemas.FeaturedTag]:
|
||||
# Not implemented yet
|
||||
return []
|
||||
|
|
23
api/views/announcements.py
Normal file
23
api/views/announcements.py
Normal file
|
@ -0,0 +1,23 @@
|
|||
from django.shortcuts import get_object_or_404
|
||||
from hatchway import api_view
|
||||
|
||||
from api import schemas
|
||||
from api.decorators import scope_required
|
||||
from users.models import Announcement
|
||||
from users.services import AnnouncementService
|
||||
|
||||
|
||||
@scope_required("read:notifications")
|
||||
@api_view.get
|
||||
def announcement_list(request) -> list[schemas.Announcement]:
|
||||
return [
|
||||
schemas.Announcement.from_announcement(a, request.user)
|
||||
for a in AnnouncementService(request.user).visible()
|
||||
]
|
||||
|
||||
|
||||
@scope_required("write:notifications")
|
||||
@api_view.post
|
||||
def announcement_dismiss(request, pk: str):
|
||||
announcement = get_object_or_404(Announcement, pk=pk)
|
||||
AnnouncementService(request.user).mark_seen(announcement)
|
|
@ -1,29 +1,30 @@
|
|||
import secrets
|
||||
from hatchway import QueryOrBody, api_view
|
||||
|
||||
from ninja import Schema
|
||||
|
||||
from .. import schemas
|
||||
from ..models import Application
|
||||
from .base import api_router
|
||||
from api import schemas
|
||||
from api.decorators import scope_required
|
||||
from api.models import Application
|
||||
|
||||
|
||||
class CreateApplicationSchema(Schema):
|
||||
client_name: str
|
||||
redirect_uris: str
|
||||
scopes: None | str = None
|
||||
website: None | str = None
|
||||
|
||||
|
||||
@api_router.post("/v1/apps", response=schemas.Application)
|
||||
def add_app(request, details: CreateApplicationSchema):
|
||||
client_id = "tk-" + secrets.token_urlsafe(16)
|
||||
client_secret = secrets.token_urlsafe(40)
|
||||
application = Application.objects.create(
|
||||
name=details.client_name,
|
||||
website=details.website,
|
||||
client_id=client_id,
|
||||
client_secret=client_secret,
|
||||
redirect_uris=details.redirect_uris,
|
||||
scopes=details.scopes or "read",
|
||||
@api_view.post
|
||||
def add_app(
|
||||
request,
|
||||
client_name: QueryOrBody[str],
|
||||
redirect_uris: QueryOrBody[str],
|
||||
scopes: QueryOrBody[None | str] = None,
|
||||
website: QueryOrBody[None | str] = None,
|
||||
) -> schemas.Application:
|
||||
application = Application.create(
|
||||
client_name=client_name,
|
||||
website=website,
|
||||
redirect_uris=redirect_uris,
|
||||
scopes=scopes,
|
||||
)
|
||||
return application
|
||||
return schemas.Application.from_application(application)
|
||||
|
||||
|
||||
@scope_required("read")
|
||||
@api_view.get
|
||||
def verify_credentials(
|
||||
request,
|
||||
) -> schemas.Application:
|
||||
return schemas.Application.from_application_no_keys(request.token.application)
|
||||
|
|
|
@ -1,5 +0,0 @@
|
|||
from ninja import NinjaAPI
|
||||
|
||||
from api.parser import FormOrJsonParser
|
||||
|
||||
api_router = NinjaAPI(parser=FormOrJsonParser())
|
33
api/views/bookmarks.py
Normal file
33
api/views/bookmarks.py
Normal file
|
@ -0,0 +1,33 @@
|
|||
from django.http import HttpRequest
|
||||
from hatchway import api_view
|
||||
|
||||
from activities.models import Post
|
||||
from activities.services import TimelineService
|
||||
from api import schemas
|
||||
from api.decorators import scope_required
|
||||
from api.pagination import MastodonPaginator, PaginatingApiResponse, PaginationResult
|
||||
|
||||
|
||||
@scope_required("read:bookmarks")
|
||||
@api_view.get
|
||||
def bookmarks(
|
||||
request: HttpRequest,
|
||||
max_id: str | None = None,
|
||||
since_id: str | None = None,
|
||||
min_id: str | None = None,
|
||||
limit: int = 20,
|
||||
) -> list[schemas.Status]:
|
||||
queryset = TimelineService(request.identity).bookmarks()
|
||||
paginator = MastodonPaginator()
|
||||
pager: PaginationResult[Post] = paginator.paginate(
|
||||
queryset,
|
||||
min_id=min_id,
|
||||
max_id=max_id,
|
||||
since_id=since_id,
|
||||
limit=limit,
|
||||
)
|
||||
return PaginatingApiResponse(
|
||||
schemas.Status.map_from_post(pager.results, request.identity),
|
||||
request=request,
|
||||
include_params=["limit"],
|
||||
)
|
|
@ -1,8 +1,11 @@
|
|||
from hatchway import api_view
|
||||
|
||||
from activities.models import Emoji
|
||||
from api.schemas import CustomEmoji
|
||||
from api.views.base import api_router
|
||||
|
||||
|
||||
@api_router.get("/v1/custom_emojis", response=list[CustomEmoji])
|
||||
def emojis(request):
|
||||
return [e.to_mastodon_json() for e in Emoji.objects.usable().filter(local=True)]
|
||||
@api_view.get
|
||||
def emojis(request) -> list[CustomEmoji]:
|
||||
return [
|
||||
CustomEmoji.from_emoji(e) for e in Emoji.objects.usable().filter(local=True)
|
||||
]
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
from hatchway import api_view
|
||||
|
||||
from api.decorators import identity_required
|
||||
from api.views.base import api_router
|
||||
|
||||
|
||||
@api_router.get("/v1/filters")
|
||||
@identity_required
|
||||
def status(request):
|
||||
@api_view.get
|
||||
def list_filters(request):
|
||||
return []
|
||||
|
|
60
api/views/follow_requests.py
Normal file
60
api/views/follow_requests.py
Normal file
|
@ -0,0 +1,60 @@
|
|||
from django.http import HttpRequest
|
||||
from django.shortcuts import get_object_or_404
|
||||
from hatchway import api_view
|
||||
|
||||
from api import schemas
|
||||
from api.decorators import scope_required
|
||||
from api.pagination import MastodonPaginator, PaginatingApiResponse, PaginationResult
|
||||
from users.models.identity import Identity
|
||||
from users.services.identity import IdentityService
|
||||
|
||||
|
||||
@scope_required("read:follows")
|
||||
@api_view.get
|
||||
def follow_requests(
|
||||
request: HttpRequest,
|
||||
max_id: str | None = None,
|
||||
since_id: str | None = None,
|
||||
min_id: str | None = None,
|
||||
limit: int = 40,
|
||||
) -> list[schemas.Account]:
|
||||
service = IdentityService(request.identity)
|
||||
paginator = MastodonPaginator(max_limit=80)
|
||||
pager: PaginationResult[Identity] = paginator.paginate(
|
||||
service.follow_requests(),
|
||||
min_id=min_id,
|
||||
max_id=max_id,
|
||||
since_id=since_id,
|
||||
limit=limit,
|
||||
)
|
||||
return PaginatingApiResponse(
|
||||
[schemas.Account.from_identity(i) for i in pager.results],
|
||||
request=request,
|
||||
include_params=["limit"],
|
||||
)
|
||||
|
||||
|
||||
@scope_required("write:follows")
|
||||
@api_view.post
|
||||
def accept_follow_request(
|
||||
request: HttpRequest,
|
||||
id: str | None = None,
|
||||
) -> schemas.Relationship:
|
||||
source_identity = get_object_or_404(
|
||||
Identity.objects.exclude(restriction=Identity.Restriction.blocked), pk=id
|
||||
)
|
||||
IdentityService(request.identity).accept_follow_request(source_identity)
|
||||
return IdentityService(source_identity).mastodon_json_relationship(request.identity)
|
||||
|
||||
|
||||
@scope_required("write:follows")
|
||||
@api_view.post
|
||||
def reject_follow_request(
|
||||
request: HttpRequest,
|
||||
id: str | None = None,
|
||||
) -> schemas.Relationship:
|
||||
source_identity = get_object_or_404(
|
||||
Identity.objects.exclude(restriction=Identity.Restriction.blocked), pk=id
|
||||
)
|
||||
IdentityService(request.identity).reject_follow_request(source_identity)
|
||||
return IdentityService(source_identity).mastodon_json_relationship(request.identity)
|
|
@ -1,15 +1,28 @@
|
|||
import datetime
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.cache import cache
|
||||
from django.utils import timezone
|
||||
from hatchway import api_view
|
||||
|
||||
from activities.models import Post
|
||||
from api import schemas
|
||||
from core.models import Config
|
||||
from takahe import __version__
|
||||
from users.models import Domain, Identity
|
||||
|
||||
from .base import api_router
|
||||
|
||||
|
||||
@api_router.get("/v1/instance")
|
||||
def instance_info(request):
|
||||
@api_view.get
|
||||
def instance_info_v1(request):
|
||||
# The stats are expensive to calculate, so don't do it very often
|
||||
stats = cache.get("instance_info_stats")
|
||||
if stats is None:
|
||||
stats = {
|
||||
"user_count": Identity.objects.filter(local=True).count(),
|
||||
"status_count": Post.objects.filter(local=True).not_hidden().count(),
|
||||
"domain_count": Domain.objects.count(),
|
||||
}
|
||||
cache.set("instance_info_stats", stats, timeout=300)
|
||||
return {
|
||||
"uri": request.headers.get("host", settings.SETUP.MAIN_DOMAIN),
|
||||
"title": Config.system.site_name,
|
||||
|
@ -18,11 +31,7 @@ def instance_info(request):
|
|||
"email": "",
|
||||
"version": f"takahe/{__version__}",
|
||||
"urls": {},
|
||||
"stats": {
|
||||
"user_count": Identity.objects.filter(local=True).count(),
|
||||
"status_count": Post.objects.filter(local=True).count(),
|
||||
"domain_count": Domain.objects.count(),
|
||||
},
|
||||
"stats": stats,
|
||||
"thumbnail": Config.system.site_banner,
|
||||
"languages": ["en"],
|
||||
"registrations": (Config.system.signup_allowed),
|
||||
|
@ -32,7 +41,7 @@ def instance_info(request):
|
|||
"accounts": {},
|
||||
"statuses": {
|
||||
"max_characters": Config.system.post_length,
|
||||
"max_media_attachments": 4,
|
||||
"max_media_attachments": Config.system.max_media_attachments,
|
||||
"characters_reserved_per_url": 23,
|
||||
},
|
||||
"media_attachments": {
|
||||
|
@ -47,7 +56,129 @@ def instance_info(request):
|
|||
"image_size_limit": (1024**2) * 10,
|
||||
"image_matrix_limit": 2000 * 2000,
|
||||
},
|
||||
"polls": {
|
||||
"max_options": 4,
|
||||
"max_characters_per_option": 50,
|
||||
"min_expiration": 300,
|
||||
"max_expiration": 2629746,
|
||||
},
|
||||
},
|
||||
"contact_account": None,
|
||||
"rules": [],
|
||||
}
|
||||
|
||||
|
||||
@api_view.get
|
||||
def instance_info_v2(request) -> dict:
|
||||
current_domain = Domain.get_domain(
|
||||
request.headers.get("host", settings.SETUP.MAIN_DOMAIN)
|
||||
)
|
||||
if current_domain is None or not current_domain.local:
|
||||
current_domain = Domain.get_domain(settings.SETUP.MAIN_DOMAIN)
|
||||
if current_domain is None:
|
||||
raise ValueError("No domain set up for MAIN_DOMAIN")
|
||||
admin_identity = (
|
||||
Identity.objects.filter(users__admin=True).order_by("created").first()
|
||||
)
|
||||
return {
|
||||
"domain": current_domain.domain,
|
||||
"title": Config.system.site_name,
|
||||
"version": f"takahe/{__version__}",
|
||||
"source_url": "https://github.com/jointakahe/takahe",
|
||||
"description": "",
|
||||
"email": "",
|
||||
"urls": {},
|
||||
"usage": {
|
||||
"users": {
|
||||
"active_month": Identity.objects.filter(local=True).count(),
|
||||
}
|
||||
},
|
||||
"thumbnail": {
|
||||
"url": Config.system.site_banner,
|
||||
},
|
||||
"languages": ["en"],
|
||||
"configuration": {
|
||||
"urls": {},
|
||||
"accounts": {"max_featured_tags": 0},
|
||||
"statuses": {
|
||||
"max_characters": Config.system.post_length,
|
||||
"max_media_attachments": Config.system.max_media_attachments,
|
||||
"characters_reserved_per_url": 23,
|
||||
},
|
||||
"media_attachments": {
|
||||
"supported_mime_types": [
|
||||
"image/apng",
|
||||
"image/avif",
|
||||
"image/gif",
|
||||
"image/jpeg",
|
||||
"image/png",
|
||||
"image/webp",
|
||||
],
|
||||
"image_size_limit": (1024**2) * 10,
|
||||
"image_matrix_limit": 2000 * 2000,
|
||||
"video_size_limit": 0,
|
||||
"video_frame_rate_limit": 60,
|
||||
"video_matrix_limit": 2000 * 2000,
|
||||
},
|
||||
"polls": {
|
||||
"max_options": 4,
|
||||
"max_characters_per_option": 50,
|
||||
"min_expiration": 300,
|
||||
"max_expiration": 2629746,
|
||||
},
|
||||
"translation": {"enabled": False},
|
||||
},
|
||||
"registrations": {
|
||||
"enabled": Config.system.signup_allowed,
|
||||
"approval_required": False,
|
||||
"message": None,
|
||||
},
|
||||
"contact": {
|
||||
"email": "",
|
||||
"account": schemas.Account.from_identity(admin_identity),
|
||||
},
|
||||
"rules": [],
|
||||
}
|
||||
|
||||
|
||||
@api_view.get
|
||||
def peers(request) -> list[str]:
|
||||
return list(
|
||||
Domain.objects.filter(local=False, blocked=False).values_list(
|
||||
"domain", flat=True
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@api_view.get
|
||||
def activity(request) -> list:
|
||||
"""
|
||||
Weekly activity endpoint
|
||||
"""
|
||||
# The stats are expensive to calculate, so don't do it very often
|
||||
stats = cache.get("instance_activity_stats")
|
||||
if stats is None:
|
||||
stats = []
|
||||
# Work out our most recent week start
|
||||
now = timezone.now()
|
||||
week_start = now.replace(
|
||||
hour=0, minute=0, second=0, microsecond=0
|
||||
) - datetime.timedelta(now.weekday())
|
||||
for i in range(12):
|
||||
week_end = week_start + datetime.timedelta(days=7)
|
||||
stats.append(
|
||||
{
|
||||
"week": int(week_start.timestamp()),
|
||||
"statuses": Post.objects.filter(
|
||||
local=True, created__gte=week_start, created__lt=week_end
|
||||
).count(),
|
||||
# TODO: Populate when we have identity activity tracking
|
||||
"logins": 0,
|
||||
"registrations": Identity.objects.filter(
|
||||
local=True, created__gte=week_start, created__lt=week_end
|
||||
).count(),
|
||||
}
|
||||
)
|
||||
week_start -= datetime.timedelta(days=7)
|
||||
cache.set("instance_activity_stats", stats, timeout=300)
|
||||
return stats
|
||||
|
|
12
api/views/lists.py
Normal file
12
api/views/lists.py
Normal file
|
@ -0,0 +1,12 @@
|
|||
from django.http import HttpRequest
|
||||
from hatchway import api_view
|
||||
|
||||
from api import schemas
|
||||
from api.decorators import scope_required
|
||||
|
||||
|
||||
@scope_required("read:lists")
|
||||
@api_view.get
|
||||
def get_lists(request: HttpRequest) -> list[schemas.List]:
|
||||
# We don't implement this yet
|
||||
return []
|
|
@ -1,28 +1,22 @@
|
|||
from django.core.files import File
|
||||
from django.shortcuts import get_object_or_404
|
||||
from ninja import File, Schema
|
||||
from ninja.files import UploadedFile
|
||||
from hatchway import ApiError, QueryOrBody, api_view
|
||||
|
||||
from activities.models import PostAttachment, PostAttachmentStates
|
||||
from api import schemas
|
||||
from api.views.base import api_router
|
||||
from core.files import blurhash_image, resize_image
|
||||
|
||||
from ..decorators import identity_required
|
||||
from ..decorators import scope_required
|
||||
|
||||
|
||||
class UploadMediaSchema(Schema):
|
||||
description: str = ""
|
||||
focus: str = "0,0"
|
||||
|
||||
|
||||
@api_router.post("/v1/media", response=schemas.MediaAttachment)
|
||||
@api_router.post("/v2/media", response=schemas.MediaAttachment)
|
||||
@identity_required
|
||||
@scope_required("write:media")
|
||||
@api_view.post
|
||||
def upload_media(
|
||||
request,
|
||||
file: UploadedFile = File(...),
|
||||
details: UploadMediaSchema | None = None,
|
||||
):
|
||||
file: File,
|
||||
description: QueryOrBody[str] = "",
|
||||
focus: QueryOrBody[str] = "0,0",
|
||||
) -> schemas.MediaAttachment:
|
||||
main_file = resize_image(
|
||||
file,
|
||||
size=(2000, 2000),
|
||||
|
@ -38,8 +32,9 @@ def upload_media(
|
|||
mimetype="image/webp",
|
||||
width=main_file.image.width,
|
||||
height=main_file.image.height,
|
||||
name=details.description if details else None,
|
||||
name=description or None,
|
||||
state=PostAttachmentStates.fetched,
|
||||
author=request.identity,
|
||||
)
|
||||
attachment.file.save(
|
||||
main_file.name,
|
||||
|
@ -50,27 +45,38 @@ def upload_media(
|
|||
thumbnail_file,
|
||||
)
|
||||
attachment.save()
|
||||
return attachment.to_mastodon_json()
|
||||
return schemas.MediaAttachment.from_post_attachment(attachment)
|
||||
|
||||
|
||||
@api_router.get("/v1/media/{id}", response=schemas.MediaAttachment)
|
||||
@identity_required
|
||||
@scope_required("read:media")
|
||||
@api_view.get
|
||||
def get_media(
|
||||
request,
|
||||
id: str,
|
||||
):
|
||||
) -> schemas.MediaAttachment:
|
||||
attachment = get_object_or_404(PostAttachment, pk=id)
|
||||
return attachment.to_mastodon_json()
|
||||
if attachment.post:
|
||||
if attachment.post.author != request.identity:
|
||||
raise ApiError(401, "Not the author of this attachment")
|
||||
elif attachment.author and attachment.author != request.identity:
|
||||
raise ApiError(401, "Not the author of this attachment")
|
||||
return schemas.MediaAttachment.from_post_attachment(attachment)
|
||||
|
||||
|
||||
@api_router.put("/v1/media/{id}", response=schemas.MediaAttachment)
|
||||
@identity_required
|
||||
@scope_required("write:media")
|
||||
@api_view.put
|
||||
def update_media(
|
||||
request,
|
||||
id: str,
|
||||
details: UploadMediaSchema | None = None,
|
||||
):
|
||||
description: QueryOrBody[str] = "",
|
||||
focus: QueryOrBody[str] = "0,0",
|
||||
) -> schemas.MediaAttachment:
|
||||
attachment = get_object_or_404(PostAttachment, pk=id)
|
||||
attachment.name = details.description if details else None
|
||||
if attachment.post:
|
||||
if attachment.post.author != request.identity:
|
||||
raise ApiError(401, "Not the author of this attachment")
|
||||
elif attachment.author != request.identity:
|
||||
raise ApiError(401, "Not the author of this attachment")
|
||||
attachment.name = description or None
|
||||
attachment.save()
|
||||
return attachment.to_mastodon_json()
|
||||
return schemas.MediaAttachment.from_post_attachment(attachment)
|
||||
|
|
|
@ -1,56 +1,100 @@
|
|||
from django.http import HttpRequest, HttpResponse
|
||||
from django.http import HttpRequest
|
||||
from django.shortcuts import get_object_or_404
|
||||
from hatchway import ApiResponse, api_view
|
||||
|
||||
from activities.models import PostInteraction, TimelineEvent
|
||||
from activities.services import TimelineService
|
||||
from api import schemas
|
||||
from api.decorators import identity_required
|
||||
from api.pagination import MastodonPaginator
|
||||
from api.views.base import api_router
|
||||
from api.decorators import scope_required
|
||||
from api.pagination import MastodonPaginator, PaginatingApiResponse, PaginationResult
|
||||
|
||||
# Types/exclude_types use weird syntax so we have to handle them manually
|
||||
NOTIFICATION_TYPES = {
|
||||
"favourite": TimelineEvent.Types.liked,
|
||||
"reblog": TimelineEvent.Types.boosted,
|
||||
"mention": TimelineEvent.Types.mentioned,
|
||||
"follow": TimelineEvent.Types.followed,
|
||||
"admin.sign_up": TimelineEvent.Types.identity_created,
|
||||
}
|
||||
|
||||
|
||||
@api_router.get("/v1/notifications", response=list[schemas.Notification])
|
||||
@identity_required
|
||||
@scope_required("read:notifications")
|
||||
@api_view.get
|
||||
def notifications(
|
||||
request: HttpRequest,
|
||||
response: HttpResponse,
|
||||
max_id: str | None = None,
|
||||
since_id: str | None = None,
|
||||
min_id: str | None = None,
|
||||
limit: int = 20,
|
||||
account_id: str | None = None,
|
||||
):
|
||||
# Types/exclude_types use weird syntax so we have to handle them manually
|
||||
base_types = {
|
||||
"favourite": TimelineEvent.Types.liked,
|
||||
"reblog": TimelineEvent.Types.boosted,
|
||||
"mention": TimelineEvent.Types.mentioned,
|
||||
"follow": TimelineEvent.Types.followed,
|
||||
}
|
||||
) -> ApiResponse[list[schemas.Notification]]:
|
||||
requested_types = set(request.GET.getlist("types[]"))
|
||||
excluded_types = set(request.GET.getlist("exclude_types[]"))
|
||||
if not requested_types:
|
||||
requested_types = set(base_types.keys())
|
||||
requested_types = set(NOTIFICATION_TYPES.keys())
|
||||
requested_types.difference_update(excluded_types)
|
||||
# Use that to pull relevant events
|
||||
queryset = TimelineService(request.identity).notifications(
|
||||
[base_types[r] for r in requested_types]
|
||||
[NOTIFICATION_TYPES[r] for r in requested_types if r in NOTIFICATION_TYPES]
|
||||
)
|
||||
paginator = MastodonPaginator(TimelineEvent)
|
||||
pager = paginator.paginate(
|
||||
paginator = MastodonPaginator()
|
||||
pager: PaginationResult[TimelineEvent] = paginator.paginate(
|
||||
queryset,
|
||||
min_id=min_id,
|
||||
max_id=max_id,
|
||||
since_id=since_id,
|
||||
limit=limit,
|
||||
)
|
||||
|
||||
if pager.results:
|
||||
response.headers["Link"] = pager.link_header(request, ["limit", "account_id"])
|
||||
|
||||
interactions = PostInteraction.get_event_interactions(
|
||||
pager.results, request.identity
|
||||
pager.results,
|
||||
request.identity,
|
||||
)
|
||||
return [
|
||||
event.to_mastodon_notification_json(interactions=interactions)
|
||||
for event in pager.results
|
||||
]
|
||||
return PaginatingApiResponse(
|
||||
[
|
||||
schemas.Notification.from_timeline_event(event, interactions=interactions)
|
||||
for event in pager.results
|
||||
],
|
||||
request=request,
|
||||
include_params=["limit", "account_id"],
|
||||
)
|
||||
|
||||
|
||||
@scope_required("read:notifications")
|
||||
@api_view.get
|
||||
def get_notification(
|
||||
request: HttpRequest,
|
||||
id: str,
|
||||
) -> schemas.Notification:
|
||||
notification = get_object_or_404(
|
||||
TimelineService(request.identity).notifications(
|
||||
list(NOTIFICATION_TYPES.values())
|
||||
),
|
||||
id=id,
|
||||
)
|
||||
return schemas.Notification.from_timeline_event(notification)
|
||||
|
||||
|
||||
@scope_required("write:notifications")
|
||||
@api_view.post
|
||||
def dismiss_notifications(request: HttpRequest) -> dict:
|
||||
TimelineService(request.identity).notifications(
|
||||
list(NOTIFICATION_TYPES.values())
|
||||
).update(dismissed=True)
|
||||
|
||||
return {}
|
||||
|
||||
|
||||
@scope_required("write:notifications")
|
||||
@api_view.post
|
||||
def dismiss_notification(request: HttpRequest, id: str) -> dict:
|
||||
notification = get_object_or_404(
|
||||
TimelineService(request.identity).notifications(
|
||||
list(NOTIFICATION_TYPES.values())
|
||||
),
|
||||
id=id,
|
||||
)
|
||||
|
||||
notification.dismissed = True
|
||||
notification.save()
|
||||
|
||||
return {}
|
||||
|
|
|
@ -1,31 +1,60 @@
|
|||
import base64
|
||||
import json
|
||||
import secrets
|
||||
import time
|
||||
from urllib.parse import urlparse, urlunparse
|
||||
|
||||
from django.contrib.auth.mixins import LoginRequiredMixin
|
||||
from django.http import HttpResponseRedirect, JsonResponse
|
||||
from django.http import (
|
||||
HttpResponse,
|
||||
HttpResponseForbidden,
|
||||
HttpResponseRedirect,
|
||||
JsonResponse,
|
||||
)
|
||||
from django.shortcuts import render
|
||||
from django.utils import timezone
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
from django.views.generic import TemplateView, View
|
||||
from django.views.generic import View
|
||||
|
||||
from api.models import Application, Token
|
||||
from api.parser import FormOrJsonParser
|
||||
from api.models import Application, Authorization, Token
|
||||
|
||||
|
||||
class OauthRedirect(HttpResponseRedirect):
|
||||
def __init__(self, redirect_uri, key, value):
|
||||
def __init__(self, redirect_uri, **kwargs):
|
||||
url_parts = urlparse(redirect_uri)
|
||||
self.allowed_schemes = [url_parts.scheme]
|
||||
# Either add or join the query section
|
||||
url_parts = list(url_parts)
|
||||
if url_parts[4]:
|
||||
url_parts[4] = url_parts[4] + f"&{key}={value}"
|
||||
else:
|
||||
url_parts[4] = f"{key}={value}"
|
||||
|
||||
query_string = url_parts[4]
|
||||
|
||||
for key, value in kwargs.items():
|
||||
if value is None:
|
||||
continue
|
||||
if not query_string:
|
||||
query_string = f"{key}={value}"
|
||||
else:
|
||||
query_string += f"&{key}={value}"
|
||||
|
||||
url_parts[4] = query_string
|
||||
super().__init__(urlunparse(url_parts))
|
||||
|
||||
|
||||
class AuthorizationView(LoginRequiredMixin, TemplateView):
|
||||
def get_json_and_formdata(request):
|
||||
# Did they submit JSON?
|
||||
if request.content_type == "application/json" and request.body.strip():
|
||||
return json.loads(request.body)
|
||||
# Fall back to form data
|
||||
value = {}
|
||||
for key, item in request.POST.items():
|
||||
value[key] = item
|
||||
for key, item in request.GET.items():
|
||||
value[key] = item
|
||||
return value
|
||||
|
||||
|
||||
class AuthorizationView(LoginRequiredMixin, View):
|
||||
"""
|
||||
Asks the user to authorize access.
|
||||
|
||||
|
@ -33,52 +62,110 @@ class AuthorizationView(LoginRequiredMixin, TemplateView):
|
|||
POST manually.
|
||||
"""
|
||||
|
||||
template_name = "api/oauth_authorize.html"
|
||||
|
||||
def get_context_data(self):
|
||||
def get(self, request):
|
||||
redirect_uri = self.request.GET["redirect_uri"]
|
||||
scope = self.request.GET.get("scope", "read")
|
||||
try:
|
||||
application = Application.objects.get(
|
||||
client_id=self.request.GET["client_id"]
|
||||
state = self.request.GET.get("state")
|
||||
|
||||
response_type = self.request.GET.get("response_type")
|
||||
if response_type != "code":
|
||||
return render(
|
||||
request,
|
||||
"api/oauth_error.html",
|
||||
{"error": f"Invalid response type '{response_type}'"},
|
||||
status=400,
|
||||
)
|
||||
except (Application.DoesNotExist, KeyError):
|
||||
return OauthRedirect(redirect_uri, "error", "invalid_application")
|
||||
return {
|
||||
|
||||
application = Application.objects.filter(
|
||||
client_id=self.request.GET.get("client_id"),
|
||||
).first()
|
||||
|
||||
if application is None:
|
||||
return render(
|
||||
request,
|
||||
"api/oauth_error.html",
|
||||
{"error": "Invalid client_id"},
|
||||
status=400,
|
||||
)
|
||||
|
||||
if application.redirect_uris and redirect_uri not in application.redirect_uris:
|
||||
return render(
|
||||
request,
|
||||
"api/oauth_error.html",
|
||||
{"error": "Invalid application redirect URI"},
|
||||
status=401,
|
||||
)
|
||||
|
||||
context = {
|
||||
"application": application,
|
||||
"state": state,
|
||||
"redirect_uri": redirect_uri,
|
||||
"scope": scope,
|
||||
"identities": self.request.user.identities.all(),
|
||||
}
|
||||
return render(request, "api/oauth_authorize.html", context)
|
||||
|
||||
def post(self, request):
|
||||
post_data = FormOrJsonParser().parse_body(request)
|
||||
post_data = get_json_and_formdata(request)
|
||||
# Grab the application and other details again
|
||||
redirect_uri = post_data["redirect_uri"]
|
||||
scope = post_data["scope"]
|
||||
application = Application.objects.get(client_id=post_data["client_id"])
|
||||
# Get the identity
|
||||
identity = self.request.user.identities.get(pk=post_data["identity"])
|
||||
|
||||
extra_args = {}
|
||||
if post_data.get("state"):
|
||||
extra_args["state"] = post_data["state"]
|
||||
|
||||
# Make a token
|
||||
token = Token.objects.create(
|
||||
token = Authorization.objects.create(
|
||||
application=application,
|
||||
user=self.request.user,
|
||||
identity=identity,
|
||||
token=secrets.token_urlsafe(32),
|
||||
code=secrets.token_urlsafe(16),
|
||||
code=secrets.token_urlsafe(43),
|
||||
redirect_uri=redirect_uri,
|
||||
scopes=scope.split(),
|
||||
)
|
||||
# If it's an out of band request, show the code
|
||||
if redirect_uri == "urn:ietf:wg:oauth:2.0:oob":
|
||||
return render(request, "api/oauth_code.html", {"code": token.code})
|
||||
# Redirect with the token's code
|
||||
return OauthRedirect(redirect_uri, "code", token.code)
|
||||
return OauthRedirect(redirect_uri, code=token.code, **extra_args)
|
||||
|
||||
|
||||
def extract_client_info_from_basic_auth(request):
|
||||
if "authorization" in request.headers:
|
||||
auth = request.headers["authorization"].split()
|
||||
if len(auth) == 2:
|
||||
if auth[0].lower() == "basic":
|
||||
client_id, client_secret = (
|
||||
base64.b64decode(auth[1]).decode("utf8").split(":", 1)
|
||||
)
|
||||
|
||||
return client_id, client_secret
|
||||
return None, None
|
||||
|
||||
|
||||
@method_decorator(csrf_exempt, name="dispatch")
|
||||
class TokenView(View):
|
||||
def verify_code(
|
||||
self, authorization: Authorization, client_id, client_secret, redirect_uri
|
||||
):
|
||||
application = authorization.application
|
||||
return (
|
||||
application.client_id == client_id
|
||||
and application.client_secret == client_secret
|
||||
and authorization.redirect_uri == redirect_uri
|
||||
)
|
||||
|
||||
def post(self, request):
|
||||
post_data = FormOrJsonParser().parse_body(request)
|
||||
post_data = get_json_and_formdata(request)
|
||||
auth_client_id, auth_client_secret = extract_client_info_from_basic_auth(
|
||||
request
|
||||
)
|
||||
post_data.setdefault("client_id", auth_client_id)
|
||||
post_data.setdefault("client_secret", auth_client_secret)
|
||||
|
||||
grant_type = post_data.get("grant_type")
|
||||
if grant_type not in (
|
||||
|
@ -87,25 +174,60 @@ class TokenView(View):
|
|||
):
|
||||
return JsonResponse({"error": "invalid_grant_type"}, status=400)
|
||||
|
||||
try:
|
||||
application = Application.objects.get(client_id=post_data["client_id"])
|
||||
except (Application.DoesNotExist, KeyError):
|
||||
return JsonResponse({"error": "invalid_client_id"}, status=400)
|
||||
# TODO: Implement client credentials flow
|
||||
if grant_type == "client_credentials":
|
||||
return JsonResponse({"error": "invalid_grant_type"}, status=400)
|
||||
# We don't support individual client credential tokens, but instead
|
||||
# just have a fixed one (since anyone can register an app at any
|
||||
# time anyway)
|
||||
return JsonResponse(
|
||||
{
|
||||
"access_token": "__app__",
|
||||
"token_type": "Bearer",
|
||||
"scope": "read",
|
||||
"created_at": int(time.time()),
|
||||
}
|
||||
)
|
||||
elif grant_type == "authorization_code":
|
||||
code = post_data.get("code")
|
||||
if not code:
|
||||
return JsonResponse({"error": "invalid_code"}, status=400)
|
||||
# Retrieve the token by code
|
||||
# TODO: Check code expiry based on created date
|
||||
try:
|
||||
token = Token.objects.get(code=code, application=application)
|
||||
except Token.DoesNotExist:
|
||||
return JsonResponse({"error": "invalid_code"}, status=400)
|
||||
# Update the token to remove its code
|
||||
token.code = None
|
||||
return JsonResponse(
|
||||
{
|
||||
"error": "invalid_request",
|
||||
"error_description": "Required param : code",
|
||||
},
|
||||
status=400,
|
||||
)
|
||||
|
||||
authorization = Authorization.objects.get(code=code)
|
||||
if (
|
||||
not authorization
|
||||
or timezone.now() - authorization.created
|
||||
> timezone.timedelta(seconds=authorization.valid_for_seconds)
|
||||
):
|
||||
return JsonResponse({"error": "access_denied"}, status=401)
|
||||
|
||||
application = Application.objects.filter(
|
||||
client_id=post_data["client_id"],
|
||||
client_secret=post_data["client_secret"],
|
||||
).first()
|
||||
|
||||
code_verified = self.verify_code(
|
||||
authorization,
|
||||
client_id=post_data.get("client_id"),
|
||||
client_secret=post_data.get("client_secret"),
|
||||
redirect_uri=post_data.get("redirect_uri"),
|
||||
)
|
||||
|
||||
if not application or authorization.token or not code_verified:
|
||||
# this authorization code has already been used
|
||||
return JsonResponse({"error": "access_denied"}, status=401)
|
||||
|
||||
token = Token.objects.create(
|
||||
application=application,
|
||||
user=authorization.user,
|
||||
identity=authorization.identity,
|
||||
token=secrets.token_urlsafe(43),
|
||||
scopes=authorization.scopes,
|
||||
)
|
||||
token.save()
|
||||
# Return them the token
|
||||
return JsonResponse(
|
||||
|
@ -118,5 +240,26 @@ class TokenView(View):
|
|||
)
|
||||
|
||||
|
||||
@method_decorator(csrf_exempt, name="dispatch")
|
||||
class RevokeTokenView(View):
|
||||
pass
|
||||
def post(self, request):
|
||||
post_data = get_json_and_formdata(request)
|
||||
auth_client_id, auth_client_secret = extract_client_info_from_basic_auth(
|
||||
request
|
||||
)
|
||||
post_data.setdefault("client_id", auth_client_id)
|
||||
post_data.setdefault("client_secret", auth_client_secret)
|
||||
token_str = post_data["token"]
|
||||
|
||||
application = Application.objects.filter(
|
||||
client_id=post_data["client_id"],
|
||||
client_secret=post_data["client_secret"],
|
||||
).first()
|
||||
|
||||
token = Token.objects.filter(application=application, token=token_str).first()
|
||||
if token is None:
|
||||
return HttpResponseForbidden()
|
||||
|
||||
token.revoked = timezone.now()
|
||||
token.save()
|
||||
return HttpResponse("")
|
||||
|
|
26
api/views/polls.py
Normal file
26
api/views/polls.py
Normal file
|
@ -0,0 +1,26 @@
|
|||
from django.shortcuts import get_object_or_404
|
||||
from hatchway import Schema, api_view
|
||||
|
||||
from activities.models import Post, PostInteraction
|
||||
from api import schemas
|
||||
from api.decorators import scope_required
|
||||
|
||||
|
||||
class PostVoteSchema(Schema):
|
||||
choices: list[int]
|
||||
|
||||
|
||||
@scope_required("read:statuses")
|
||||
@api_view.get
|
||||
def get_poll(request, id: str) -> schemas.Poll:
|
||||
post = get_object_or_404(Post, pk=id, type=Post.Types.question)
|
||||
return schemas.Poll.from_post(post, identity=request.identity)
|
||||
|
||||
|
||||
@scope_required("write:statuses")
|
||||
@api_view.post
|
||||
def vote_poll(request, id: str, details: PostVoteSchema) -> schemas.Poll:
|
||||
post = get_object_or_404(Post, pk=id, type=Post.Types.question)
|
||||
PostInteraction.create_votes(post, request.identity, details.choices)
|
||||
post.refresh_from_db()
|
||||
return schemas.Poll.from_post(post, identity=request.identity)
|
13
api/views/preferences.py
Normal file
13
api/views/preferences.py
Normal file
|
@ -0,0 +1,13 @@
|
|||
from django.http import HttpRequest
|
||||
from hatchway import api_view
|
||||
|
||||
from api import schemas
|
||||
from api.decorators import scope_required
|
||||
|
||||
|
||||
@scope_required("read:accounts")
|
||||
@api_view.get
|
||||
def preferences(request: HttpRequest) -> dict:
|
||||
# Ideally this should just return Preferences; maybe hatchway needs a way to
|
||||
# indicate response models should be serialized by alias?
|
||||
return schemas.Preferences.from_identity(request.identity).dict(by_alias=True)
|
70
api/views/push.py
Normal file
70
api/views/push.py
Normal file
|
@ -0,0 +1,70 @@
|
|||
from django.conf import settings
|
||||
from django.http import Http404
|
||||
from hatchway import ApiError, QueryOrBody, api_view
|
||||
|
||||
from api import schemas
|
||||
from api.decorators import scope_required
|
||||
|
||||
|
||||
@scope_required("push")
|
||||
@api_view.post
|
||||
def create_subscription(
|
||||
request,
|
||||
subscription: QueryOrBody[schemas.PushSubscriptionCreation],
|
||||
data: QueryOrBody[schemas.PushData],
|
||||
) -> schemas.PushSubscription:
|
||||
# First, check the server is set up to do push notifications
|
||||
if not settings.SETUP.VAPID_PRIVATE_KEY:
|
||||
raise Http404("Push not available")
|
||||
# Then, register this with our token
|
||||
request.token.set_push_subscription(
|
||||
{
|
||||
"endpoint": subscription.endpoint,
|
||||
"keys": subscription.keys,
|
||||
"alerts": data.alerts,
|
||||
"policy": data.policy,
|
||||
}
|
||||
)
|
||||
# Then return the subscription
|
||||
return schemas.PushSubscription.from_token(request.token) # type:ignore
|
||||
|
||||
|
||||
@scope_required("push")
|
||||
@api_view.get
|
||||
def get_subscription(request) -> schemas.PushSubscription:
|
||||
# First, check the server is set up to do push notifications
|
||||
if not settings.SETUP.VAPID_PRIVATE_KEY:
|
||||
raise Http404("Push not available")
|
||||
# Get the subscription if it exists
|
||||
subscription = schemas.PushSubscription.from_token(request.token)
|
||||
if not subscription:
|
||||
raise ApiError(404, "Not Found")
|
||||
return subscription
|
||||
|
||||
|
||||
@scope_required("push")
|
||||
@api_view.put
|
||||
def update_subscription(
|
||||
request, data: QueryOrBody[schemas.PushData]
|
||||
) -> schemas.PushSubscription:
|
||||
# First, check the server is set up to do push notifications
|
||||
if not settings.SETUP.VAPID_PRIVATE_KEY:
|
||||
raise Http404("Push not available")
|
||||
# Get the subscription if it exists
|
||||
subscription = schemas.PushSubscription.from_token(request.token)
|
||||
if not subscription:
|
||||
raise ApiError(404, "Not Found")
|
||||
# Update the subscription
|
||||
subscription.alerts = data.alerts
|
||||
subscription.policy = data.policy
|
||||
request.token.set_push_subscription(subscription)
|
||||
# Then return the subscription
|
||||
return schemas.PushSubscription.from_token(request.token) # type:ignore
|
||||
|
||||
|
||||
@scope_required("push")
|
||||
@api_view.delete
|
||||
def delete_subscription(request) -> dict:
|
||||
# Unset the subscription
|
||||
request.token.push_subscription = None
|
||||
return {}
|
|
@ -1,20 +1,19 @@
|
|||
from typing import Literal
|
||||
|
||||
from ninja import Field
|
||||
from hatchway import Field, api_view
|
||||
|
||||
from activities.models import PostInteraction
|
||||
from activities.services.search import SearchService
|
||||
from api import schemas
|
||||
from api.decorators import identity_required
|
||||
from api.views.base import api_router
|
||||
from api.decorators import scope_required
|
||||
|
||||
|
||||
@api_router.get("/v2/search", response=schemas.Search)
|
||||
@identity_required
|
||||
@scope_required("read")
|
||||
@api_view.get
|
||||
def search(
|
||||
request,
|
||||
q: str,
|
||||
type: Literal["accounts", "hashtags", "statuses"] | None = None,
|
||||
type: Literal["accounts", "hashtags", "statuses", ""] | None = None,
|
||||
fetch_identities: bool = Field(False, alias="resolve"),
|
||||
following: bool = False,
|
||||
exclude_unreviewed: bool = False,
|
||||
|
@ -24,26 +23,35 @@ def search(
|
|||
min_id: str | None = None,
|
||||
limit: int = 20,
|
||||
offset: int = 0,
|
||||
):
|
||||
) -> schemas.Search:
|
||||
if limit > 40:
|
||||
limit = 40
|
||||
result: dict[str, list] = {"accounts": [], "statuses": [], "hashtags": []}
|
||||
# We don't support pagination for searches yet
|
||||
if max_id or since_id or min_id or offset:
|
||||
return result
|
||||
return schemas.Search(**result)
|
||||
# Run search
|
||||
searcher = SearchService(q, request.identity)
|
||||
search_result = searcher.search_all()
|
||||
if type == "":
|
||||
type = None
|
||||
if type is None or type == "accounts":
|
||||
result["accounts"] = [i.to_mastodon_json() for i in search_result["identities"]]
|
||||
result["accounts"] = [
|
||||
schemas.Account.from_identity(i, include_counts=False)
|
||||
for i in search_result["identities"]
|
||||
]
|
||||
if type is None or type == "hashtag":
|
||||
result["hashtag"] = [h.to_mastodon_json() for h in search_result["hashtags"]]
|
||||
result["hashtags"] = [
|
||||
schemas.Tag.from_hashtag(h) for h in search_result["hashtags"]
|
||||
]
|
||||
if type is None or type == "statuses":
|
||||
interactions = PostInteraction.get_post_interactions(
|
||||
search_result["posts"], request.identity
|
||||
)
|
||||
result["statuses"] = [
|
||||
p.to_mastodon_json(interactions=interactions)
|
||||
schemas.Status.from_post(
|
||||
p, interactions=interactions, identity=request.identity
|
||||
)
|
||||
for p in search_result["posts"]
|
||||
]
|
||||
return result
|
||||
return schemas.Search(**result)
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
from datetime import timedelta
|
||||
from typing import Literal
|
||||
|
||||
from django.forms import ValidationError
|
||||
from django.http import HttpRequest, HttpResponse
|
||||
from django.http import HttpRequest
|
||||
from django.shortcuts import get_object_or_404
|
||||
from ninja import Schema
|
||||
from django.utils import timezone
|
||||
from hatchway import ApiError, ApiResponse, Schema, api_view
|
||||
|
||||
from activities.models import (
|
||||
Post,
|
||||
|
@ -14,16 +15,31 @@ from activities.models import (
|
|||
)
|
||||
from activities.services import PostService
|
||||
from api import schemas
|
||||
from api.views.base import api_router
|
||||
from api.decorators import scope_required
|
||||
from api.pagination import MastodonPaginator, PaginatingApiResponse, PaginationResult
|
||||
from core.models import Config
|
||||
from users.models import Identity
|
||||
|
||||
from ..decorators import identity_required
|
||||
from ..pagination import MastodonPaginator
|
||||
|
||||
class PostPollSchema(Schema):
|
||||
options: list[str]
|
||||
expires_in: int
|
||||
multiple: bool = False
|
||||
hide_totals: bool = False
|
||||
|
||||
def dict(self):
|
||||
return {
|
||||
"type": "Question",
|
||||
"mode": "anyOf" if self.multiple else "oneOf",
|
||||
"options": [
|
||||
{"name": name, "type": "Note", "votes": 0} for name in self.options
|
||||
],
|
||||
"voter_count": 0,
|
||||
"end_time": timezone.now() + timedelta(seconds=self.expires_in),
|
||||
}
|
||||
|
||||
|
||||
class PostStatusSchema(Schema):
|
||||
status: str
|
||||
status: str | None
|
||||
in_reply_to_id: str | None = None
|
||||
sensitive: bool = False
|
||||
spoiler_text: str | None = None
|
||||
|
@ -31,16 +47,45 @@ class PostStatusSchema(Schema):
|
|||
language: str | None = None
|
||||
scheduled_at: str | None = None
|
||||
media_ids: list[str] = []
|
||||
poll: PostPollSchema | None = None
|
||||
|
||||
|
||||
@api_router.post("/v1/statuses", response=schemas.Status)
|
||||
@identity_required
|
||||
def post_status(request, details: PostStatusSchema):
|
||||
class MediaAttributesSchema(Schema):
|
||||
id: str
|
||||
description: str
|
||||
|
||||
|
||||
class EditStatusSchema(Schema):
|
||||
status: str
|
||||
sensitive: bool = False
|
||||
spoiler_text: str | None = None
|
||||
language: str | None = None
|
||||
media_ids: list[str] = []
|
||||
media_attributes: list[MediaAttributesSchema] = []
|
||||
|
||||
|
||||
def post_for_id(request: HttpRequest, id: str) -> Post:
|
||||
"""
|
||||
Common logic to get a Post object for an ID, taking visibility into
|
||||
account.
|
||||
"""
|
||||
if request.identity:
|
||||
queryset = Post.objects.not_hidden().visible_to(
|
||||
request.identity, include_replies=True
|
||||
)
|
||||
else:
|
||||
queryset = Post.objects.not_hidden().unlisted()
|
||||
return get_object_or_404(queryset, pk=id)
|
||||
|
||||
|
||||
@scope_required("write:statuses")
|
||||
@api_view.post
|
||||
def post_status(request, details: PostStatusSchema) -> schemas.Status:
|
||||
# Check text length
|
||||
if len(details.status) > Config.system.post_length:
|
||||
raise ValidationError("Status is too long")
|
||||
if len(details.status) == 0 and not details.media_ids:
|
||||
raise ValidationError("Status is empty")
|
||||
if details.status and len(details.status) > Config.system.post_length:
|
||||
raise ApiError(400, "Status is too long")
|
||||
if not details.status and not details.media_ids:
|
||||
raise ApiError(400, "Status is empty")
|
||||
# Grab attachments
|
||||
attachments = [get_object_or_404(PostAttachment, pk=id) for id in details.media_ids]
|
||||
# Create the Post
|
||||
|
@ -58,128 +103,264 @@ def post_status(request, details: PostStatusSchema):
|
|||
pass
|
||||
post = Post.create_local(
|
||||
author=request.identity,
|
||||
content=details.status,
|
||||
content=details.status or "",
|
||||
summary=details.spoiler_text,
|
||||
sensitive=details.sensitive,
|
||||
visibility=visibility_map[details.visibility],
|
||||
reply_to=reply_post,
|
||||
attachments=attachments,
|
||||
question=details.poll.dict() if details.poll else None,
|
||||
)
|
||||
# Add their own timeline event for immediate visibility
|
||||
TimelineEvent.add_post(request.identity, post)
|
||||
return post.to_mastodon_json()
|
||||
return schemas.Status.from_post(post, identity=request.identity)
|
||||
|
||||
|
||||
@api_router.get("/v1/statuses/{id}", response=schemas.Status)
|
||||
@identity_required
|
||||
def status(request, id: str):
|
||||
post = get_object_or_404(Post, pk=id)
|
||||
@scope_required("read:statuses")
|
||||
@api_view.get
|
||||
def status(request, id: str) -> schemas.Status:
|
||||
post = post_for_id(request, id)
|
||||
interactions = PostInteraction.get_post_interactions([post], request.identity)
|
||||
return post.to_mastodon_json(interactions=interactions)
|
||||
return schemas.Status.from_post(
|
||||
post, interactions=interactions, identity=request.identity
|
||||
)
|
||||
|
||||
|
||||
@api_router.delete("/v1/statuses/{id}", response=schemas.Status)
|
||||
@identity_required
|
||||
def delete_status(request, id: str):
|
||||
post = get_object_or_404(Post, pk=id)
|
||||
@scope_required("write:statuses")
|
||||
@api_view.put
|
||||
def edit_status(request, id: str, details: EditStatusSchema) -> schemas.Status:
|
||||
post = post_for_id(request, id)
|
||||
if post.author != request.identity:
|
||||
raise ApiError(401, "Not the author of this status")
|
||||
# Grab attachments
|
||||
attachments = [get_object_or_404(PostAttachment, pk=id) for id in details.media_ids]
|
||||
# Update all details, as the client must provide them all
|
||||
post.edit_local(
|
||||
content=details.status,
|
||||
summary=details.spoiler_text,
|
||||
sensitive=details.sensitive,
|
||||
attachments=attachments,
|
||||
attachment_attributes=details.media_attributes,
|
||||
)
|
||||
return schemas.Status.from_post(post)
|
||||
|
||||
|
||||
@scope_required("write:statuses")
|
||||
@api_view.delete
|
||||
def delete_status(request, id: str) -> schemas.Status:
|
||||
post = post_for_id(request, id)
|
||||
if post.author != request.identity:
|
||||
raise ApiError(401, "Not the author of this status")
|
||||
PostService(post).delete()
|
||||
return post.to_mastodon_json()
|
||||
return schemas.Status.from_post(post, identity=request.identity)
|
||||
|
||||
|
||||
@api_router.get("/v1/statuses/{id}/context", response=schemas.Context)
|
||||
@identity_required
|
||||
def status_context(request, id: str):
|
||||
post = get_object_or_404(Post, pk=id)
|
||||
@scope_required("read:statuses")
|
||||
@api_view.get
|
||||
def status_source(request, id: str) -> schemas.StatusSource:
|
||||
post = post_for_id(request, id)
|
||||
return schemas.StatusSource.from_post(post)
|
||||
|
||||
|
||||
@scope_required("read:statuses")
|
||||
@api_view.get
|
||||
def status_context(request, id: str) -> schemas.Context:
|
||||
post = post_for_id(request, id)
|
||||
service = PostService(post)
|
||||
ancestors, descendants = service.context(request.identity)
|
||||
interactions = PostInteraction.get_post_interactions(
|
||||
ancestors + descendants, request.identity
|
||||
)
|
||||
return {
|
||||
"ancestors": [
|
||||
p.to_mastodon_json(interactions=interactions) for p in reversed(ancestors)
|
||||
return schemas.Context(
|
||||
ancestors=[
|
||||
schemas.Status.from_post(
|
||||
p, interactions=interactions, identity=request.identity
|
||||
)
|
||||
for p in reversed(ancestors)
|
||||
],
|
||||
"descendants": [
|
||||
p.to_mastodon_json(interactions=interactions) for p in descendants
|
||||
descendants=[
|
||||
schemas.Status.from_post(
|
||||
p, interactions=interactions, identity=request.identity
|
||||
)
|
||||
for p in descendants
|
||||
],
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@api_router.post("/v1/statuses/{id}/favourite", response=schemas.Status)
|
||||
@identity_required
|
||||
def favourite_status(request, id: str):
|
||||
post = get_object_or_404(Post, pk=id)
|
||||
@scope_required("write:favourites")
|
||||
@api_view.post
|
||||
def favourite_status(request, id: str) -> schemas.Status:
|
||||
post = post_for_id(request, id)
|
||||
service = PostService(post)
|
||||
service.like_as(request.identity)
|
||||
interactions = PostInteraction.get_post_interactions([post], request.identity)
|
||||
return post.to_mastodon_json(interactions=interactions)
|
||||
return schemas.Status.from_post(
|
||||
post, interactions=interactions, identity=request.identity
|
||||
)
|
||||
|
||||
|
||||
@api_router.post("/v1/statuses/{id}/unfavourite", response=schemas.Status)
|
||||
@identity_required
|
||||
def unfavourite_status(request, id: str):
|
||||
post = get_object_or_404(Post, pk=id)
|
||||
@scope_required("write:favourites")
|
||||
@api_view.post
|
||||
def unfavourite_status(request, id: str) -> schemas.Status:
|
||||
post = post_for_id(request, id)
|
||||
service = PostService(post)
|
||||
service.unlike_as(request.identity)
|
||||
interactions = PostInteraction.get_post_interactions([post], request.identity)
|
||||
return post.to_mastodon_json(interactions=interactions)
|
||||
return schemas.Status.from_post(
|
||||
post, interactions=interactions, identity=request.identity
|
||||
)
|
||||
|
||||
|
||||
@api_router.get("/v1/statuses/{id}/favourited_by", response=list[schemas.Account])
|
||||
@api_view.get
|
||||
def favourited_by(
|
||||
request: HttpRequest,
|
||||
response: HttpResponse,
|
||||
id: str,
|
||||
max_id: str | None = None,
|
||||
since_id: str | None = None,
|
||||
min_id: str | None = None,
|
||||
limit: int = 20,
|
||||
):
|
||||
) -> ApiResponse[list[schemas.Account]]:
|
||||
"""
|
||||
View who favourited a given status.
|
||||
"""
|
||||
# This method should filter out private statuses, but we don't really have
|
||||
# a concept of "private status" yet.
|
||||
post = get_object_or_404(Post, pk=id)
|
||||
post = post_for_id(request, id)
|
||||
|
||||
paginator = MastodonPaginator(Identity, sort_attribute="published")
|
||||
pager = paginator.paginate(
|
||||
paginator = MastodonPaginator()
|
||||
pager: PaginationResult[PostInteraction] = paginator.paginate(
|
||||
post.interactions.filter(
|
||||
type=PostInteraction.Types.like,
|
||||
state__in=PostInteractionStates.group_active(),
|
||||
)
|
||||
.select_related("identity")
|
||||
.order_by("published"),
|
||||
).select_related("identity"),
|
||||
min_id=min_id,
|
||||
max_id=max_id,
|
||||
since_id=since_id,
|
||||
limit=limit,
|
||||
)
|
||||
|
||||
if pager.results:
|
||||
response.headers["Link"] = pager.link_header(
|
||||
request,
|
||||
["limit"],
|
||||
)
|
||||
|
||||
return [result.identity.to_mastodon_json() for result in pager.results]
|
||||
return PaginatingApiResponse(
|
||||
[
|
||||
schemas.Account.from_identity(
|
||||
interaction.identity,
|
||||
include_counts=False,
|
||||
)
|
||||
for interaction in pager.results
|
||||
],
|
||||
request=request,
|
||||
include_params=[
|
||||
"limit",
|
||||
"id",
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
@api_router.post("/v1/statuses/{id}/reblog", response=schemas.Status)
|
||||
@identity_required
|
||||
def reblog_status(request, id: str):
|
||||
post = get_object_or_404(Post, pk=id)
|
||||
@api_view.get
|
||||
def reblogged_by(
|
||||
request: HttpRequest,
|
||||
id: str,
|
||||
max_id: str | None = None,
|
||||
since_id: str | None = None,
|
||||
min_id: str | None = None,
|
||||
limit: int = 20,
|
||||
) -> ApiResponse[list[schemas.Account]]:
|
||||
"""
|
||||
View who reblogged a given status.
|
||||
"""
|
||||
post = post_for_id(request, id)
|
||||
|
||||
paginator = MastodonPaginator()
|
||||
pager: PaginationResult[PostInteraction] = paginator.paginate(
|
||||
post.interactions.filter(
|
||||
type=PostInteraction.Types.boost,
|
||||
state__in=PostInteractionStates.group_active(),
|
||||
).select_related("identity"),
|
||||
min_id=min_id,
|
||||
max_id=max_id,
|
||||
since_id=since_id,
|
||||
limit=limit,
|
||||
)
|
||||
|
||||
return PaginatingApiResponse(
|
||||
[
|
||||
schemas.Account.from_identity(
|
||||
interaction.identity,
|
||||
include_counts=False,
|
||||
)
|
||||
for interaction in pager.results
|
||||
],
|
||||
request=request,
|
||||
include_params=[
|
||||
"limit",
|
||||
"id",
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
@scope_required("write:favourites")
|
||||
@api_view.post
|
||||
def reblog_status(request, id: str) -> schemas.Status:
|
||||
post = post_for_id(request, id)
|
||||
service = PostService(post)
|
||||
service.boost_as(request.identity)
|
||||
interactions = PostInteraction.get_post_interactions([post], request.identity)
|
||||
return post.to_mastodon_json(interactions=interactions)
|
||||
return schemas.Status.from_post(
|
||||
post, interactions=interactions, identity=request.identity
|
||||
)
|
||||
|
||||
|
||||
@api_router.post("/v1/statuses/{id}/unreblog", response=schemas.Status)
|
||||
@identity_required
|
||||
def unreblog_status(request, id: str):
|
||||
post = get_object_or_404(Post, pk=id)
|
||||
@scope_required("write:favourites")
|
||||
@api_view.post
|
||||
def unreblog_status(request, id: str) -> schemas.Status:
|
||||
post = post_for_id(request, id)
|
||||
service = PostService(post)
|
||||
service.unboost_as(request.identity)
|
||||
interactions = PostInteraction.get_post_interactions([post], request.identity)
|
||||
return post.to_mastodon_json(interactions=interactions)
|
||||
return schemas.Status.from_post(
|
||||
post, interactions=interactions, identity=request.identity
|
||||
)
|
||||
|
||||
|
||||
@scope_required("write:bookmarks")
|
||||
@api_view.post
|
||||
def bookmark_status(request, id: str) -> schemas.Status:
|
||||
post = post_for_id(request, id)
|
||||
request.identity.bookmarks.get_or_create(post=post)
|
||||
interactions = PostInteraction.get_post_interactions([post], request.identity)
|
||||
return schemas.Status.from_post(
|
||||
post, interactions=interactions, bookmarks={post.pk}, identity=request.identity
|
||||
)
|
||||
|
||||
|
||||
@scope_required("write:bookmarks")
|
||||
@api_view.post
|
||||
def unbookmark_status(request, id: str) -> schemas.Status:
|
||||
post = post_for_id(request, id)
|
||||
request.identity.bookmarks.filter(post=post).delete()
|
||||
interactions = PostInteraction.get_post_interactions([post], request.identity)
|
||||
return schemas.Status.from_post(
|
||||
post, interactions=interactions, identity=request.identity
|
||||
)
|
||||
|
||||
|
||||
@scope_required("write:accounts")
|
||||
@api_view.post
|
||||
def pin_status(request, id: str) -> schemas.Status:
|
||||
post = post_for_id(request, id)
|
||||
try:
|
||||
PostService(post).pin_as(request.identity)
|
||||
interactions = PostInteraction.get_post_interactions([post], request.identity)
|
||||
return schemas.Status.from_post(
|
||||
post, identity=request.identity, interactions=interactions
|
||||
)
|
||||
except ValueError as e:
|
||||
raise ApiError(422, str(e))
|
||||
|
||||
|
||||
@scope_required("write:accounts")
|
||||
@api_view.post
|
||||
def unpin_status(request, id: str) -> schemas.Status:
|
||||
post = post_for_id(request, id)
|
||||
PostService(post).unpin_as(request.identity)
|
||||
interactions = PostInteraction.get_post_interactions([post], request.identity)
|
||||
return schemas.Status.from_post(
|
||||
post, identity=request.identity, interactions=interactions
|
||||
)
|
||||
|
|
16
api/views/suggestions.py
Normal file
16
api/views/suggestions.py
Normal file
|
@ -0,0 +1,16 @@
|
|||
from django.http import HttpRequest
|
||||
from hatchway import api_view
|
||||
|
||||
from api import schemas
|
||||
from api.decorators import scope_required
|
||||
|
||||
|
||||
@scope_required("read")
|
||||
@api_view.get
|
||||
def suggested_users(
|
||||
request: HttpRequest,
|
||||
limit: int = 10,
|
||||
offset: int | None = None,
|
||||
) -> list[schemas.Account]:
|
||||
# We don't implement this yet
|
||||
return []
|
84
api/views/tags.py
Normal file
84
api/views/tags.py
Normal file
|
@ -0,0 +1,84 @@
|
|||
from django.http import HttpRequest
|
||||
from django.shortcuts import get_object_or_404
|
||||
from hatchway import api_view
|
||||
|
||||
from activities.models import Hashtag
|
||||
from api import schemas
|
||||
from api.decorators import scope_required
|
||||
from api.pagination import MastodonPaginator, PaginatingApiResponse, PaginationResult
|
||||
from users.models import HashtagFollow
|
||||
|
||||
|
||||
@api_view.get
|
||||
def hashtag(request: HttpRequest, hashtag: str) -> schemas.Tag:
|
||||
tag = get_object_or_404(
|
||||
Hashtag,
|
||||
pk=hashtag.lower(),
|
||||
)
|
||||
following = None
|
||||
if request.identity:
|
||||
following = tag.followers.filter(identity=request.identity).exists()
|
||||
|
||||
return schemas.Tag.from_hashtag(
|
||||
tag,
|
||||
following=following,
|
||||
)
|
||||
|
||||
|
||||
@scope_required("read:follows")
|
||||
@api_view.get
|
||||
def followed_tags(
|
||||
request: HttpRequest,
|
||||
max_id: str | None = None,
|
||||
since_id: str | None = None,
|
||||
min_id: str | None = None,
|
||||
limit: int = 100,
|
||||
) -> list[schemas.Tag]:
|
||||
queryset = HashtagFollow.objects.by_identity(request.identity)
|
||||
paginator = MastodonPaginator()
|
||||
pager: PaginationResult[HashtagFollow] = paginator.paginate(
|
||||
queryset,
|
||||
min_id=min_id,
|
||||
max_id=max_id,
|
||||
since_id=since_id,
|
||||
limit=limit,
|
||||
)
|
||||
return PaginatingApiResponse(
|
||||
schemas.FollowedTag.map_from_follows(pager.results),
|
||||
request=request,
|
||||
include_params=["limit"],
|
||||
)
|
||||
|
||||
|
||||
@scope_required("write:follows")
|
||||
@api_view.post
|
||||
def follow(
|
||||
request: HttpRequest,
|
||||
id: str,
|
||||
) -> schemas.Tag:
|
||||
hashtag = get_object_or_404(
|
||||
Hashtag,
|
||||
pk=id.lower(),
|
||||
)
|
||||
request.identity.hashtag_follows.get_or_create(hashtag=hashtag)
|
||||
return schemas.Tag.from_hashtag(
|
||||
hashtag,
|
||||
following=True,
|
||||
)
|
||||
|
||||
|
||||
@scope_required("write:follows")
|
||||
@api_view.post
|
||||
def unfollow(
|
||||
request: HttpRequest,
|
||||
id: str,
|
||||
) -> schemas.Tag:
|
||||
hashtag = get_object_or_404(
|
||||
Hashtag,
|
||||
pk=id.lower(),
|
||||
)
|
||||
request.identity.hashtag_follows.filter(hashtag=hashtag).delete()
|
||||
return schemas.Tag.from_hashtag(
|
||||
hashtag,
|
||||
following=False,
|
||||
)
|
|
@ -1,50 +1,57 @@
|
|||
from django.http import HttpRequest, HttpResponse, JsonResponse
|
||||
from django.http import HttpRequest
|
||||
from hatchway import ApiError, ApiResponse, api_view
|
||||
|
||||
from activities.models import Post, PostInteraction
|
||||
from activities.models import Post, TimelineEvent
|
||||
from activities.services import TimelineService
|
||||
from api import schemas
|
||||
from api.decorators import identity_required
|
||||
from api.pagination import MastodonPaginator
|
||||
from api.views.base import api_router
|
||||
from api.decorators import scope_required
|
||||
from api.pagination import MastodonPaginator, PaginatingApiResponse, PaginationResult
|
||||
from core.models import Config
|
||||
|
||||
|
||||
@api_router.get("/v1/timelines/home", response=list[schemas.Status])
|
||||
@identity_required
|
||||
@scope_required("read:statuses")
|
||||
@api_view.get
|
||||
def home(
|
||||
request: HttpRequest,
|
||||
response: HttpResponse,
|
||||
max_id: str | None = None,
|
||||
since_id: str | None = None,
|
||||
min_id: str | None = None,
|
||||
limit: int = 20,
|
||||
):
|
||||
paginator = MastodonPaginator(Post, sort_attribute="published")
|
||||
) -> ApiResponse[list[schemas.Status]]:
|
||||
# Grab a paginated result set of instances
|
||||
paginator = MastodonPaginator()
|
||||
queryset = TimelineService(request.identity).home()
|
||||
pager = paginator.paginate(
|
||||
queryset = queryset.select_related(
|
||||
"subject_post_interaction__post",
|
||||
"subject_post_interaction__post__author",
|
||||
"subject_post_interaction__post__author__domain",
|
||||
)
|
||||
queryset = queryset.prefetch_related(
|
||||
"subject_post__mentions__domain",
|
||||
"subject_post_interaction__post__attachments",
|
||||
"subject_post_interaction__post__mentions",
|
||||
"subject_post_interaction__post__emojis",
|
||||
"subject_post_interaction__post__mentions__domain",
|
||||
"subject_post_interaction__post__author__posts",
|
||||
)
|
||||
pager: PaginationResult[TimelineEvent] = paginator.paginate(
|
||||
queryset,
|
||||
min_id=min_id,
|
||||
max_id=max_id,
|
||||
since_id=since_id,
|
||||
limit=limit,
|
||||
home=True,
|
||||
)
|
||||
interactions = PostInteraction.get_event_interactions(
|
||||
pager.results, request.identity
|
||||
return PaginatingApiResponse(
|
||||
schemas.Status.map_from_timeline_event(pager.results, request.identity),
|
||||
request=request,
|
||||
include_params=["limit"],
|
||||
)
|
||||
|
||||
if pager.results:
|
||||
response.headers["Link"] = pager.link_header(request, ["limit"])
|
||||
|
||||
return [
|
||||
event.to_mastodon_status_json(interactions=interactions)
|
||||
for event in pager.results
|
||||
]
|
||||
|
||||
|
||||
@api_router.get("/v1/timelines/public", response=list[schemas.Status])
|
||||
@api_view.get
|
||||
def public(
|
||||
request: HttpRequest,
|
||||
response: HttpResponse,
|
||||
local: bool = False,
|
||||
remote: bool = False,
|
||||
only_media: bool = False,
|
||||
|
@ -52,9 +59,9 @@ def public(
|
|||
since_id: str | None = None,
|
||||
min_id: str | None = None,
|
||||
limit: int = 20,
|
||||
):
|
||||
) -> ApiResponse[list[schemas.Status]]:
|
||||
if not request.identity and not Config.system.public_timeline:
|
||||
return JsonResponse({"error": "public timeline is disabled"}, status=422)
|
||||
raise ApiError(error="public timeline is disabled", status=422)
|
||||
|
||||
if local:
|
||||
queryset = TimelineService(request.identity).local()
|
||||
|
@ -64,32 +71,26 @@ def public(
|
|||
queryset = queryset.filter(local=False)
|
||||
if only_media:
|
||||
queryset = queryset.filter(attachments__id__isnull=True)
|
||||
paginator = MastodonPaginator(Post, sort_attribute="published")
|
||||
pager = paginator.paginate(
|
||||
# Grab a paginated result set of instances
|
||||
paginator = MastodonPaginator()
|
||||
pager: PaginationResult[Post] = paginator.paginate(
|
||||
queryset,
|
||||
min_id=min_id,
|
||||
max_id=max_id,
|
||||
since_id=since_id,
|
||||
limit=limit,
|
||||
)
|
||||
|
||||
if pager.results:
|
||||
response.headers["Link"] = pager.link_header(
|
||||
request,
|
||||
["limit", "local", "remote", "only_media"],
|
||||
)
|
||||
|
||||
interactions = PostInteraction.get_post_interactions(
|
||||
pager.results, request.identity
|
||||
return PaginatingApiResponse(
|
||||
schemas.Status.map_from_post(pager.results, request.identity),
|
||||
request=request,
|
||||
include_params=["limit", "local", "remote", "only_media"],
|
||||
)
|
||||
return [post.to_mastodon_json(interactions=interactions) for post in pager.results]
|
||||
|
||||
|
||||
@api_router.get("/v1/timelines/tag/{hashtag}", response=list[schemas.Status])
|
||||
@identity_required
|
||||
@scope_required("read:statuses")
|
||||
@api_view.get
|
||||
def hashtag(
|
||||
request: HttpRequest,
|
||||
response: HttpResponse,
|
||||
hashtag: str,
|
||||
local: bool = False,
|
||||
only_media: bool = False,
|
||||
|
@ -97,44 +98,64 @@ def hashtag(
|
|||
since_id: str | None = None,
|
||||
min_id: str | None = None,
|
||||
limit: int = 20,
|
||||
):
|
||||
) -> ApiResponse[list[schemas.Status]]:
|
||||
if limit > 40:
|
||||
limit = 40
|
||||
queryset = TimelineService(request.identity).hashtag(hashtag)
|
||||
queryset = TimelineService(request.identity).hashtag(hashtag.lower())
|
||||
if local:
|
||||
queryset = queryset.filter(local=True)
|
||||
if only_media:
|
||||
queryset = queryset.filter(attachments__id__isnull=True)
|
||||
paginator = MastodonPaginator(Post, sort_attribute="published")
|
||||
pager = paginator.paginate(
|
||||
# Grab a paginated result set of instances
|
||||
paginator = MastodonPaginator()
|
||||
pager: PaginationResult[Post] = paginator.paginate(
|
||||
queryset,
|
||||
min_id=min_id,
|
||||
max_id=max_id,
|
||||
since_id=since_id,
|
||||
limit=limit,
|
||||
)
|
||||
|
||||
if pager.results:
|
||||
response.headers["Link"] = pager.link_header(
|
||||
request,
|
||||
["limit", "local", "remote", "only_media"],
|
||||
)
|
||||
|
||||
interactions = PostInteraction.get_post_interactions(
|
||||
pager.results, request.identity
|
||||
return PaginatingApiResponse(
|
||||
schemas.Status.map_from_post(pager.results, request.identity),
|
||||
request=request,
|
||||
include_params=["limit", "local", "remote", "only_media"],
|
||||
)
|
||||
return [post.to_mastodon_json(interactions=interactions) for post in pager.results]
|
||||
|
||||
|
||||
@api_router.get("/v1/conversations", response=list[schemas.Status])
|
||||
@identity_required
|
||||
@scope_required("read:conversations")
|
||||
@api_view.get
|
||||
def conversations(
|
||||
request: HttpRequest,
|
||||
response: HttpResponse,
|
||||
max_id: str | None = None,
|
||||
since_id: str | None = None,
|
||||
min_id: str | None = None,
|
||||
limit: int = 20,
|
||||
):
|
||||
) -> list[schemas.Status]:
|
||||
# We don't implement this yet
|
||||
return []
|
||||
|
||||
|
||||
@scope_required("read:favourites")
|
||||
@api_view.get
|
||||
def favourites(
|
||||
request: HttpRequest,
|
||||
max_id: str | None = None,
|
||||
since_id: str | None = None,
|
||||
min_id: str | None = None,
|
||||
limit: int = 20,
|
||||
) -> ApiResponse[list[schemas.Status]]:
|
||||
queryset = TimelineService(request.identity).likes()
|
||||
|
||||
paginator = MastodonPaginator()
|
||||
pager: PaginationResult[Post] = paginator.paginate(
|
||||
queryset,
|
||||
min_id=min_id,
|
||||
max_id=max_id,
|
||||
since_id=since_id,
|
||||
limit=limit,
|
||||
)
|
||||
return PaginatingApiResponse(
|
||||
schemas.Status.map_from_post(pager.results, request.identity),
|
||||
request=request,
|
||||
include_params=["limit"],
|
||||
)
|
||||
|
|
38
api/views/trends.py
Normal file
38
api/views/trends.py
Normal file
|
@ -0,0 +1,38 @@
|
|||
from django.http import HttpRequest
|
||||
from hatchway import api_view
|
||||
|
||||
from api import schemas
|
||||
from api.decorators import scope_required
|
||||
|
||||
|
||||
@scope_required("read")
|
||||
@api_view.get
|
||||
def trends_tags(
|
||||
request: HttpRequest,
|
||||
limit: int = 10,
|
||||
offset: int | None = None,
|
||||
) -> list[schemas.Tag]:
|
||||
# We don't implement this yet
|
||||
return []
|
||||
|
||||
|
||||
@scope_required("read")
|
||||
@api_view.get
|
||||
def trends_statuses(
|
||||
request: HttpRequest,
|
||||
limit: int = 10,
|
||||
offset: int | None = None,
|
||||
) -> list[schemas.Status]:
|
||||
# We don't implement this yet
|
||||
return []
|
||||
|
||||
|
||||
@scope_required("read")
|
||||
@api_view.get
|
||||
def trends_links(
|
||||
request: HttpRequest,
|
||||
limit: int = 10,
|
||||
offset: int | None = None,
|
||||
) -> list:
|
||||
# We don't implement this yet
|
||||
return []
|
|
@ -1,12 +1,12 @@
|
|||
from django.conf import settings
|
||||
|
||||
from core.models import Config
|
||||
|
||||
|
||||
def config_context(request):
|
||||
return {
|
||||
"config": Config.system,
|
||||
"config_identity": (
|
||||
request.identity.config_identity if request.identity else None
|
||||
),
|
||||
"allow_migration": settings.SETUP.ALLOW_USER_MIGRATION,
|
||||
"top_section": request.path.strip("/").split("/")[0],
|
||||
"opengraph_defaults": {
|
||||
"og:site_name": Config.system.site_name,
|
||||
|
|
|
@ -20,16 +20,6 @@ def vary_by_ap_json(request, *args, **kwargs) -> str:
|
|||
return "not_ap"
|
||||
|
||||
|
||||
def vary_by_identity(request, *args, **kwargs) -> str:
|
||||
"""
|
||||
Return a cache usable string token that is different based upon the
|
||||
request.identity
|
||||
"""
|
||||
if request.identity:
|
||||
return f"ident{request.identity.pk}"
|
||||
return "identNone"
|
||||
|
||||
|
||||
def cache_page(
|
||||
timeout: int | str = "cache_timeout_page_default",
|
||||
*,
|
||||
|
|
|
@ -1,43 +1,16 @@
|
|||
import traceback
|
||||
|
||||
from asgiref.sync import sync_to_async
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
class ActivityPubError(BaseException):
|
||||
"""
|
||||
A problem with an ActivityPub message
|
||||
"""
|
||||
|
||||
|
||||
class ActivityPubFormatError(ActivityPubError):
|
||||
"""
|
||||
A problem with an ActivityPub message's format/keys
|
||||
"""
|
||||
|
||||
|
||||
class ActorMismatchError(ActivityPubError):
|
||||
"""
|
||||
The actor is not authorised to do the action we saw
|
||||
"""
|
||||
|
||||
|
||||
def capture_message(message: str):
|
||||
"""
|
||||
Sends the informational message to Sentry if it's configured
|
||||
"""
|
||||
if settings.SETUP.SENTRY_DSN and settings.SETUP.SENTRY_CAPTURE_MESSAGES:
|
||||
from sentry_sdk import capture_message
|
||||
|
||||
capture_message(message)
|
||||
elif settings.DEBUG:
|
||||
print(message)
|
||||
|
||||
|
||||
def capture_exception(exception: BaseException):
|
||||
"""
|
||||
Sends the exception to Sentry if it's configured
|
||||
"""
|
||||
if settings.SETUP.SENTRY_DSN:
|
||||
from sentry_sdk import capture_exception
|
||||
|
||||
capture_exception(exception)
|
||||
elif settings.DEBUG:
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
acapture_exception = sync_to_async(capture_exception, thread_sensitive=False)
|
||||
|
|
|
@ -24,6 +24,16 @@ def resize_image(
|
|||
to fit if needed)
|
||||
"""
|
||||
with Image.open(image) as img:
|
||||
try:
|
||||
# Take any orientation EXIF data, apply it, and strip the
|
||||
# orientation data from the new image.
|
||||
img = ImageOps.exif_transpose(img)
|
||||
except Exception: # noqa
|
||||
# exif_transpose can crash with different errors depending on
|
||||
# the EXIF keys. Just ignore them all, better to have a rotated
|
||||
# image than no image.
|
||||
pass
|
||||
|
||||
if cover:
|
||||
resized_image = ImageOps.fit(img, size, method=Image.Resampling.BILINEAR)
|
||||
else:
|
||||
|
@ -34,7 +44,7 @@ def resize_image(
|
|||
resized_image.save(new_image_bytes, format=img.format)
|
||||
file = ImageFile(new_image_bytes)
|
||||
else:
|
||||
resized_image.save(new_image_bytes, format="webp")
|
||||
resized_image.save(new_image_bytes, format="webp", save_all=True)
|
||||
file = ImageFile(new_image_bytes, name="image.webp")
|
||||
file.image = resized_image
|
||||
return file
|
||||
|
@ -47,7 +57,7 @@ def blurhash_image(file) -> str:
|
|||
return blurhash.encode(file, 4, 4)
|
||||
|
||||
|
||||
async def get_remote_file(
|
||||
def get_remote_file(
|
||||
url: str,
|
||||
*,
|
||||
timeout: float = settings.SETUP.REMOTE_TIMEOUT,
|
||||
|
@ -60,8 +70,10 @@ async def get_remote_file(
|
|||
"User-Agent": settings.TAKAHE_USER_AGENT,
|
||||
}
|
||||
|
||||
async with httpx.AsyncClient(headers=headers) as client:
|
||||
async with client.stream("GET", url, timeout=timeout) as stream:
|
||||
with httpx.Client(headers=headers) as client:
|
||||
with client.stream(
|
||||
"GET", url, timeout=timeout, follow_redirects=True
|
||||
) as stream:
|
||||
allow_download = max_size is None
|
||||
if max_size:
|
||||
try:
|
||||
|
@ -70,7 +82,7 @@ async def get_remote_file(
|
|||
except (KeyError, TypeError):
|
||||
pass
|
||||
if allow_download:
|
||||
file = ContentFile(await stream.aread(), name=url)
|
||||
file = ContentFile(stream.read(), name=url)
|
||||
return file, stream.headers.get(
|
||||
"content-type", "application/octet-stream"
|
||||
)
|
||||
|
|
531
core/html.py
531
core/html.py
|
@ -1,72 +1,324 @@
|
|||
import html
|
||||
import re
|
||||
from functools import partial
|
||||
from html.parser import HTMLParser
|
||||
|
||||
import bleach
|
||||
from bleach.linkifier import LinkifyFilter
|
||||
from django.utils.safestring import mark_safe
|
||||
|
||||
url_regex = re.compile(
|
||||
r"""\(* # Match any opening parentheses.
|
||||
\b(?<![@.])(?:https?://(?:(?:\w+:)?\w+@)?) # http://
|
||||
([\w-]+\.)+(?:[\w-]+)(?:\:[0-9]+)?(?!\.\w)\b # xx.yy.tld(:##)?
|
||||
(?:[/?][^\s\{{\}}\|\\\^\[\]`<>"]*)?
|
||||
|
||||
class FediverseHtmlParser(HTMLParser):
|
||||
"""
|
||||
A custom HTML parser that only allows a certain tag subset and behaviour:
|
||||
- br, p tags are passed through
|
||||
- a tags are passed through if they're not hashtags or mentions
|
||||
- Another set of tags are converted to p
|
||||
|
||||
It also linkifies URLs, mentions, hashtags, and imagifies emoji.
|
||||
"""
|
||||
|
||||
REWRITE_TO_P = [
|
||||
"p",
|
||||
"h1",
|
||||
"h2",
|
||||
"h3",
|
||||
"h4",
|
||||
"h5",
|
||||
"h6",
|
||||
"blockquote",
|
||||
"pre",
|
||||
"ul",
|
||||
"ol",
|
||||
]
|
||||
|
||||
REWRITE_TO_BR = [
|
||||
"br",
|
||||
"li",
|
||||
]
|
||||
|
||||
MENTION_REGEX = re.compile(
|
||||
r"(^|[^\w\d\-_/])@([\w\d\-_]+(?:@[\w\d\-_\.]+[\w\d\-_]+)?)"
|
||||
)
|
||||
|
||||
HASHTAG_REGEX = re.compile(r"\B#([\w()]+\b)(?!;)")
|
||||
|
||||
EMOJI_REGEX = re.compile(r"\B:([a-zA-Z0-9(_)-]+):\B")
|
||||
|
||||
URL_REGEX = re.compile(
|
||||
r"""(\(* # Match any opening parentheses.
|
||||
\b(?<![@.])(?:https?://(?:(?:\w+:)?\w+@)?) # http://
|
||||
(?:[\w-]+\.)+(?:[\w-]+)(?:\:[0-9]+)?(?!\.\w)\b # xx.yy.tld(:##)?
|
||||
(?:[/?][^\s\{{\}}\|\\\^\[\]`<>"]*)?)
|
||||
# /path/zz (excluding "unsafe" chars from RFC 1738,
|
||||
# except for # and ~, which happen in practice)
|
||||
""",
|
||||
re.IGNORECASE | re.VERBOSE | re.UNICODE,
|
||||
)
|
||||
|
||||
|
||||
def allow_a(tag: str, name: str, value: str):
|
||||
if name in ["href", "title", "class"]:
|
||||
return True
|
||||
elif name == "rel":
|
||||
# Only allow rel attributes with a small subset of values
|
||||
# (we're defending against, for example, rel=me)
|
||||
rel_values = value.split()
|
||||
if all(v in ["nofollow", "noopener", "noreferrer", "tag"] for v in rel_values):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def sanitize_html(post_html: str) -> str:
|
||||
"""
|
||||
Only allows a, br, p and span tags, and class attributes.
|
||||
"""
|
||||
cleaner = bleach.Cleaner(
|
||||
tags=["br", "p", "a"],
|
||||
attributes={ # type:ignore
|
||||
"a": allow_a,
|
||||
"p": ["class"],
|
||||
},
|
||||
filters=[partial(LinkifyFilter, url_re=url_regex)],
|
||||
strip=True,
|
||||
""",
|
||||
re.IGNORECASE | re.VERBOSE | re.UNICODE,
|
||||
)
|
||||
return mark_safe(cleaner.clean(post_html))
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
html: str,
|
||||
uri_domain: str | None = None,
|
||||
mentions: list | None = None,
|
||||
find_mentions: bool = False,
|
||||
find_hashtags: bool = False,
|
||||
find_emojis: bool = False,
|
||||
emoji_domain=None,
|
||||
):
|
||||
super().__init__()
|
||||
self.uri_domain = uri_domain
|
||||
self.emoji_domain = emoji_domain
|
||||
self.find_mentions = find_mentions
|
||||
self.find_hashtags = find_hashtags
|
||||
self.find_emojis = find_emojis
|
||||
self.calculate_mentions(mentions)
|
||||
self._data_buffer = ""
|
||||
self.html_output = ""
|
||||
self.text_output = ""
|
||||
self.emojis: set[str] = set()
|
||||
self.mentions: set[str] = set()
|
||||
self.hashtags: set[str] = set()
|
||||
self._pending_a: dict | None = None
|
||||
self._fresh_p = False
|
||||
self.feed(html.replace("\n", ""))
|
||||
self.flush_data()
|
||||
|
||||
def strip_html(post_html: str, *, linkify: bool = True) -> str:
|
||||
"""
|
||||
Strips all tags from the text, then linkifies it.
|
||||
"""
|
||||
cleaner = bleach.Cleaner(
|
||||
tags=[],
|
||||
strip=True,
|
||||
filters=[partial(LinkifyFilter, url_re=url_regex)] if linkify else [],
|
||||
)
|
||||
return mark_safe(cleaner.clean(post_html))
|
||||
def calculate_mentions(self, mentions: list | None):
|
||||
"""
|
||||
Prepares a set of content that we expect to see mentions look like
|
||||
(this imp)
|
||||
"""
|
||||
self.mention_matches: dict[str, str] = {}
|
||||
self.mention_aliases: dict[str, str] = {}
|
||||
for mention in mentions or []:
|
||||
if self.uri_domain:
|
||||
url = mention.absolute_profile_uri()
|
||||
elif not mention.local:
|
||||
url = mention.profile_uri
|
||||
else:
|
||||
url = str(mention.urls.view)
|
||||
if mention.username:
|
||||
username = mention.username.lower()
|
||||
domain = mention.domain_id.lower()
|
||||
self.mention_matches[f"{username}"] = url
|
||||
self.mention_matches[f"{username}@{domain}"] = url
|
||||
self.mention_matches[mention.absolute_profile_uri()] = url
|
||||
|
||||
def handle_starttag(self, tag: str, attrs: list[tuple[str, str | None]]) -> None:
|
||||
if tag in self.REWRITE_TO_P:
|
||||
self.flush_data()
|
||||
self.html_output += "<p>"
|
||||
elif tag in self.REWRITE_TO_BR:
|
||||
self.flush_data()
|
||||
if not self._fresh_p:
|
||||
self.html_output += "<br>"
|
||||
self.text_output += "\n"
|
||||
elif tag == "a":
|
||||
self.flush_data()
|
||||
self._pending_a = {"attrs": dict(attrs), "content": ""}
|
||||
self._fresh_p = tag in self.REWRITE_TO_P
|
||||
|
||||
def html_to_plaintext(post_html: str) -> str:
|
||||
"""
|
||||
Tries to do the inverse of the linebreaks filter.
|
||||
"""
|
||||
# TODO: Handle HTML entities
|
||||
# Remove all newlines, then replace br with a newline and /p with two (one comes from bleach)
|
||||
post_html = post_html.replace("\n", "").replace("<br>", "\n").replace("</p>", "\n")
|
||||
# Remove all other HTML and return
|
||||
cleaner = bleach.Cleaner(tags=[], strip=True, filters=[])
|
||||
return cleaner.clean(post_html).strip()
|
||||
def handle_endtag(self, tag: str) -> None:
|
||||
self._fresh_p = False
|
||||
if tag in self.REWRITE_TO_P:
|
||||
self.flush_data()
|
||||
self.html_output += "</p>"
|
||||
self.text_output += "\n\n"
|
||||
elif tag == "a":
|
||||
if self._pending_a:
|
||||
href = self._pending_a["attrs"].get("href")
|
||||
content = self._pending_a["content"].strip()
|
||||
has_ellipsis = "ellipsis" in self._pending_a["attrs"].get("class", "")
|
||||
# Is it a mention?
|
||||
if content.lower().lstrip("@") in self.mention_matches:
|
||||
self.html_output += self.create_mention(content, href)
|
||||
self.text_output += content
|
||||
# Is it a hashtag?
|
||||
elif self.HASHTAG_REGEX.match(content):
|
||||
self.html_output += self.create_hashtag(content)
|
||||
self.text_output += content
|
||||
elif content:
|
||||
# Shorten the link if we need to
|
||||
self.html_output += self.create_link(
|
||||
href,
|
||||
content,
|
||||
has_ellipsis=has_ellipsis,
|
||||
)
|
||||
self.text_output += href
|
||||
self._pending_a = None
|
||||
|
||||
def handle_data(self, data: str) -> None:
|
||||
self._fresh_p = False
|
||||
if self._pending_a:
|
||||
self._pending_a["content"] += data
|
||||
else:
|
||||
self._data_buffer += data
|
||||
|
||||
def flush_data(self) -> None:
|
||||
"""
|
||||
We collect data segments until we encounter a tag we care about,
|
||||
so we can treat <span>#</span>hashtag as #hashtag
|
||||
"""
|
||||
self.text_output += self._data_buffer
|
||||
self.html_output += self.linkify(self._data_buffer)
|
||||
self._data_buffer = ""
|
||||
|
||||
def create_link(self, href, content, has_ellipsis=False):
|
||||
"""
|
||||
Generates a link, doing optional shortening.
|
||||
|
||||
All return values from this function should be HTML-safe.
|
||||
"""
|
||||
looks_like_link = bool(self.URL_REGEX.match(content))
|
||||
if looks_like_link:
|
||||
protocol, content = content.split("://", 1)
|
||||
else:
|
||||
protocol = ""
|
||||
if (looks_like_link and len(content) > 30) or has_ellipsis:
|
||||
return f'<a href="{html.escape(href)}" rel="nofollow" class="ellipsis" title="{html.escape(content)}"><span class="invisible">{html.escape(protocol)}://</span><span class="ellipsis">{html.escape(content[:30])}</span><span class="invisible">{html.escape(content[30:])}</span></a>'
|
||||
elif looks_like_link:
|
||||
return f'<a href="{html.escape(href)}" rel="nofollow"><span class="invisible">{html.escape(protocol)}://</span>{html.escape(content)}</a>'
|
||||
else:
|
||||
return f'<a href="{html.escape(href)}" rel="nofollow">{html.escape(content)}</a>'
|
||||
|
||||
def create_mention(self, handle, href: str | None = None) -> str:
|
||||
"""
|
||||
Generates a mention link. Handle should have a leading @.
|
||||
|
||||
All return values from this function should be HTML-safe
|
||||
"""
|
||||
handle = handle.lstrip("@")
|
||||
if "@" in handle:
|
||||
short_handle = handle.split("@", 1)[0]
|
||||
else:
|
||||
short_handle = handle
|
||||
handle_hash = handle.lower()
|
||||
short_hash = short_handle.lower()
|
||||
self.mentions.add(handle_hash)
|
||||
url = self.mention_matches.get(handle_hash)
|
||||
# If we have a captured link out, use that as the actual resolver
|
||||
if href and href in self.mention_matches:
|
||||
url = self.mention_matches[href]
|
||||
if url:
|
||||
if short_hash not in self.mention_aliases:
|
||||
self.mention_aliases[short_hash] = handle_hash
|
||||
elif self.mention_aliases.get(short_hash) != handle_hash:
|
||||
short_handle = handle
|
||||
return f'<span class="h-card"><a href="{html.escape(url)}" class="u-url mention" rel="nofollow noopener noreferrer" target="_blank">@<span>{html.escape(short_handle)}</span></a></span>'
|
||||
else:
|
||||
return "@" + html.escape(handle)
|
||||
|
||||
def create_hashtag(self, hashtag) -> str:
|
||||
"""
|
||||
Generates a hashtag link. Hashtag does not need to start with #
|
||||
|
||||
All return values from this function should be HTML-safe
|
||||
"""
|
||||
hashtag = hashtag.lstrip("#")
|
||||
self.hashtags.add(hashtag.lower())
|
||||
if self.uri_domain:
|
||||
return f'<a href="https://{self.uri_domain}/tags/{hashtag.lower()}/" class="mention hashtag" rel="tag">#{hashtag}</a>'
|
||||
else:
|
||||
return f'<a href="/tags/{hashtag.lower()}/" rel="tag">#{hashtag}</a>'
|
||||
|
||||
def create_emoji(self, shortcode) -> str:
|
||||
"""
|
||||
Generates an emoji <img> tag
|
||||
|
||||
All return values from this function should be HTML-safe
|
||||
"""
|
||||
from activities.models import Emoji
|
||||
|
||||
emoji = Emoji.get_by_domain(shortcode, self.emoji_domain)
|
||||
if emoji and emoji.is_usable:
|
||||
self.emojis.add(shortcode)
|
||||
return emoji.as_html()
|
||||
return f":{shortcode}:"
|
||||
|
||||
def linkify(self, data):
|
||||
"""
|
||||
Linkifies some content that is plaintext.
|
||||
|
||||
Handles URLs first, then mentions. Note that this takes great care to
|
||||
keep track of what is HTML and what needs to be escaped.
|
||||
"""
|
||||
# Split the string by the URL regex so we know what to escape and what
|
||||
# not to escape.
|
||||
bits = self.URL_REGEX.split(data)
|
||||
result = ""
|
||||
# Even indices are data we should pass though, odd indices are links
|
||||
for i, bit in enumerate(bits):
|
||||
# A link!
|
||||
if i % 2 == 1:
|
||||
result += self.create_link(bit, bit)
|
||||
# Not a link
|
||||
elif self.mention_matches or self.find_mentions:
|
||||
result += self.linkify_mentions(bit)
|
||||
elif self.find_hashtags:
|
||||
result += self.linkify_hashtags(bit)
|
||||
elif self.find_emojis:
|
||||
result += self.linkify_emoji(bit)
|
||||
else:
|
||||
result += html.escape(bit)
|
||||
return result
|
||||
|
||||
def linkify_mentions(self, data):
|
||||
"""
|
||||
Linkifies mentions
|
||||
"""
|
||||
bits = self.MENTION_REGEX.split(data)
|
||||
result = ""
|
||||
for i, bit in enumerate(bits):
|
||||
# Mention content
|
||||
if i % 3 == 2:
|
||||
result += self.create_mention(bit)
|
||||
# Not part of a mention (0) or mention preamble (1)
|
||||
elif self.find_hashtags:
|
||||
result += self.linkify_hashtags(bit)
|
||||
elif self.find_emojis:
|
||||
result += self.linkify_emoji(bit)
|
||||
else:
|
||||
result += html.escape(bit)
|
||||
return result
|
||||
|
||||
def linkify_hashtags(self, data):
|
||||
"""
|
||||
Linkifies hashtags
|
||||
"""
|
||||
bits = self.HASHTAG_REGEX.split(data)
|
||||
result = ""
|
||||
for i, bit in enumerate(bits):
|
||||
# Not part of a hashtag
|
||||
if i % 2 == 0:
|
||||
if self.find_emojis:
|
||||
result += self.linkify_emoji(bit)
|
||||
else:
|
||||
result += html.escape(bit)
|
||||
# Hashtag content
|
||||
else:
|
||||
result += self.create_hashtag(bit)
|
||||
return result
|
||||
|
||||
def linkify_emoji(self, data):
|
||||
"""
|
||||
Linkifies emoji
|
||||
"""
|
||||
bits = self.EMOJI_REGEX.split(data)
|
||||
result = ""
|
||||
for i, bit in enumerate(bits):
|
||||
# Not part of an emoji
|
||||
if i % 2 == 0:
|
||||
result += html.escape(bit)
|
||||
# Emoji content
|
||||
else:
|
||||
result += self.create_emoji(bit)
|
||||
return result
|
||||
|
||||
@property
|
||||
def html(self):
|
||||
return self.html_output.strip()
|
||||
|
||||
@property
|
||||
def plain_text(self):
|
||||
return self.text_output.strip()
|
||||
|
||||
|
||||
class ContentRenderer:
|
||||
|
@ -85,33 +337,30 @@ class ContentRenderer:
|
|||
"""
|
||||
if not html:
|
||||
return ""
|
||||
html = sanitize_html(html)
|
||||
html = self.linkify_mentions(html, post=post)
|
||||
html = self.linkify_hashtags(html, identity=post.author)
|
||||
if self.local:
|
||||
html = self.imageify_emojis(
|
||||
html,
|
||||
identity=post.author,
|
||||
emojis=post.emojis.all(),
|
||||
)
|
||||
html = self.remove_extra_newlines(html)
|
||||
return mark_safe(html)
|
||||
parser = FediverseHtmlParser(
|
||||
html,
|
||||
mentions=post.mentions.all(),
|
||||
uri_domain=(None if self.local else post.author.domain.uri_domain),
|
||||
find_hashtags=True,
|
||||
find_emojis=self.local,
|
||||
emoji_domain=post.author.domain,
|
||||
)
|
||||
return mark_safe(parser.html)
|
||||
|
||||
def render_identity_summary(self, html: str, identity, strip: bool = False) -> str:
|
||||
def render_identity_summary(self, html: str, identity) -> str:
|
||||
"""
|
||||
Given identity summary HTML, normalises it and renders it for presentation.
|
||||
"""
|
||||
if not html:
|
||||
return ""
|
||||
if strip:
|
||||
html = strip_html(html)
|
||||
else:
|
||||
html = sanitize_html(html)
|
||||
html = self.linkify_hashtags(html, identity=identity)
|
||||
if self.local:
|
||||
html = self.imageify_emojis(html, identity=identity)
|
||||
html = self.remove_extra_newlines(html)
|
||||
return mark_safe(html)
|
||||
parser = FediverseHtmlParser(
|
||||
html,
|
||||
uri_domain=(None if self.local else identity.domain.uri_domain),
|
||||
find_hashtags=True,
|
||||
find_emojis=self.local,
|
||||
emoji_domain=identity.domain,
|
||||
)
|
||||
return mark_safe(parser.html)
|
||||
|
||||
def render_identity_data(self, html: str, identity, strip: bool = False) -> str:
|
||||
"""
|
||||
|
@ -119,118 +368,14 @@ class ContentRenderer:
|
|||
"""
|
||||
if not html:
|
||||
return ""
|
||||
if strip:
|
||||
html = strip_html(html)
|
||||
else:
|
||||
html = sanitize_html(html)
|
||||
if self.local:
|
||||
html = self.imageify_emojis(html, identity=identity)
|
||||
html = self.remove_extra_newlines(html)
|
||||
return mark_safe(html)
|
||||
|
||||
def linkify_mentions(self, html: str, post) -> str:
|
||||
"""
|
||||
Links mentions _in the context of the post_ - as in, using the mentions
|
||||
property as the only source (as we might be doing this without other
|
||||
DB access allowed)
|
||||
"""
|
||||
from activities.models import Post
|
||||
|
||||
possible_matches = {}
|
||||
for mention in post.mentions.all():
|
||||
if self.local:
|
||||
url = str(mention.urls.view)
|
||||
else:
|
||||
url = mention.absolute_profile_uri()
|
||||
# Might not have fetched it (yet)
|
||||
if mention.username:
|
||||
username = mention.username.lower()
|
||||
possible_matches[username] = url
|
||||
possible_matches[f"{username}@{mention.domain_id}"] = url
|
||||
|
||||
collapse_name: dict[str, str] = {}
|
||||
|
||||
def replacer(match):
|
||||
precursor = match.group(1)
|
||||
handle = match.group(2)
|
||||
if "@" in handle:
|
||||
short_handle = handle.split("@", 1)[0]
|
||||
else:
|
||||
short_handle = handle
|
||||
handle_hash = handle.lower()
|
||||
short_hash = short_handle.lower()
|
||||
if handle_hash in possible_matches:
|
||||
if short_hash not in collapse_name:
|
||||
collapse_name[short_hash] = handle_hash
|
||||
elif collapse_name.get(short_hash) != handle_hash:
|
||||
short_handle = handle
|
||||
return f'{precursor}<a href="{possible_matches[handle_hash]}">@{short_handle}</a>'
|
||||
else:
|
||||
return match.group()
|
||||
|
||||
return Post.mention_regex.sub(replacer, html)
|
||||
|
||||
def linkify_hashtags(self, html, identity) -> str:
|
||||
from activities.models import Hashtag
|
||||
|
||||
def replacer(attrs, new=False):
|
||||
# See if the text in this link looks like a hashtag
|
||||
if not Hashtag.hashtag_regex.match(attrs.get("_text", "")):
|
||||
return attrs
|
||||
hashtag = attrs["_text"].strip().lstrip("#")
|
||||
attrs[None, "class"] = "hashtag"
|
||||
if (None, "rel") in attrs:
|
||||
del attrs[None, "rel"]
|
||||
if self.local:
|
||||
attrs[None, "href"] = f"/tags/{hashtag.lower()}/"
|
||||
else:
|
||||
attrs[
|
||||
None, "href"
|
||||
] = f"https://{identity.domain.uri_domain}/tags/{hashtag.lower()}/"
|
||||
return attrs
|
||||
|
||||
linker = bleach.linkifier.Linker(
|
||||
url_re=Hashtag.hashtag_regex, callbacks=[replacer]
|
||||
parser = FediverseHtmlParser(
|
||||
html,
|
||||
uri_domain=(None if self.local else identity.domain.uri_domain),
|
||||
find_hashtags=False,
|
||||
find_emojis=self.local,
|
||||
emoji_domain=identity.domain,
|
||||
)
|
||||
return linker.linkify(html)
|
||||
|
||||
def imageify_emojis(
|
||||
self, html: str, identity, include_local: bool = True, emojis=None
|
||||
):
|
||||
"""
|
||||
Find :emoji: in content and convert to <img>. If include_local is True,
|
||||
the local emoji will be used as a fallback for any shortcodes not defined
|
||||
by emojis.
|
||||
"""
|
||||
from activities.models import Emoji
|
||||
|
||||
# If precached emojis were passed, prep them
|
||||
cached_emojis = {}
|
||||
if emojis:
|
||||
for emoji in emojis:
|
||||
cached_emojis[emoji.shortcode] = emoji
|
||||
|
||||
def replacer(match):
|
||||
shortcode = match.group(1).lower()
|
||||
if shortcode in cached_emojis:
|
||||
return cached_emojis[shortcode].as_html()
|
||||
try:
|
||||
emoji = Emoji.get_by_domain(shortcode, identity.domain)
|
||||
if emoji.is_usable:
|
||||
return emoji.as_html()
|
||||
except Emoji.DoesNotExist:
|
||||
if include_local:
|
||||
try:
|
||||
return Emoji.get_by_domain(shortcode, identity.domain).as_html()
|
||||
except Emoji.DoesNotExist:
|
||||
pass
|
||||
return match.group()
|
||||
|
||||
return Emoji.emoji_regex.sub(replacer, html)
|
||||
|
||||
def remove_extra_newlines(self, html: str) -> str:
|
||||
"""
|
||||
Some clients are sensitive to extra newlines even though it's HTML
|
||||
"""
|
||||
# TODO: More intelligent way to strip these?
|
||||
return html.replace("\n", "")
|
||||
if strip:
|
||||
return mark_safe(parser.html)
|
||||
else:
|
||||
return mark_safe(parser.html)
|
||||
|
|
32
core/json.py
Normal file
32
core/json.py
Normal file
|
@ -0,0 +1,32 @@
|
|||
import json
|
||||
|
||||
from httpx import Response
|
||||
|
||||
JSON_CONTENT_TYPES = [
|
||||
"application/json",
|
||||
"application/ld+json",
|
||||
"application/activity+json",
|
||||
]
|
||||
|
||||
|
||||
def json_from_response(response: Response) -> dict | None:
|
||||
content_type, *parameters = (
|
||||
response.headers.get("Content-Type", "invalid").lower().split(";")
|
||||
)
|
||||
|
||||
if content_type not in JSON_CONTENT_TYPES:
|
||||
return None
|
||||
|
||||
charset = None
|
||||
|
||||
for parameter in parameters:
|
||||
key, value = parameter.split("=")
|
||||
if key.strip() == "charset":
|
||||
charset = value.strip()
|
||||
|
||||
if charset:
|
||||
return json.loads(response.content.decode(charset))
|
||||
else:
|
||||
# if no charset informed, default to
|
||||
# httpx json for encoding inference
|
||||
return response.json()
|
280
core/ld.py
280
core/ld.py
|
@ -1,12 +1,24 @@
|
|||
import datetime
|
||||
import logging
|
||||
import os
|
||||
import urllib.parse as urllib_parse
|
||||
|
||||
from dateutil import parser
|
||||
from pyld import jsonld
|
||||
from pyld.jsonld import JsonLdError
|
||||
|
||||
from core.exceptions import ActivityPubFormatError
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
schemas = {
|
||||
"unknown": {
|
||||
"contentType": "application/ld+json",
|
||||
"documentUrl": "unknown",
|
||||
"contextUrl": None,
|
||||
"document": {
|
||||
"@context": {},
|
||||
},
|
||||
},
|
||||
"www.w3.org/ns/activitystreams": {
|
||||
"contentType": "application/ld+json",
|
||||
"documentUrl": "http://www.w3.org/ns/activitystreams",
|
||||
|
@ -315,6 +327,187 @@ schemas = {
|
|||
}
|
||||
},
|
||||
},
|
||||
"www.w3.org/ns/did/v1": {
|
||||
"contentType": "application/ld+json",
|
||||
"documentUrl": "www.w3.org/ns/did/v1",
|
||||
"contextUrl": None,
|
||||
"document": {
|
||||
"@context": {
|
||||
"@protected": True,
|
||||
"id": "@id",
|
||||
"type": "@type",
|
||||
"alsoKnownAs": {
|
||||
"@id": "https://www.w3.org/ns/activitystreams#alsoKnownAs",
|
||||
"@type": "@id",
|
||||
},
|
||||
"assertionMethod": {
|
||||
"@id": "https://w3id.org/security#assertionMethod",
|
||||
"@type": "@id",
|
||||
"@container": "@set",
|
||||
},
|
||||
"authentication": {
|
||||
"@id": "https://w3id.org/security#authenticationMethod",
|
||||
"@type": "@id",
|
||||
"@container": "@set",
|
||||
},
|
||||
"capabilityDelegation": {
|
||||
"@id": "https://w3id.org/security#capabilityDelegationMethod",
|
||||
"@type": "@id",
|
||||
"@container": "@set",
|
||||
},
|
||||
"capabilityInvocation": {
|
||||
"@id": "https://w3id.org/security#capabilityInvocationMethod",
|
||||
"@type": "@id",
|
||||
"@container": "@set",
|
||||
},
|
||||
"controller": {
|
||||
"@id": "https://w3id.org/security#controller",
|
||||
"@type": "@id",
|
||||
},
|
||||
"keyAgreement": {
|
||||
"@id": "https://w3id.org/security#keyAgreementMethod",
|
||||
"@type": "@id",
|
||||
"@container": "@set",
|
||||
},
|
||||
"service": {
|
||||
"@id": "https://www.w3.org/ns/did#service",
|
||||
"@type": "@id",
|
||||
"@context": {
|
||||
"@protected": True,
|
||||
"id": "@id",
|
||||
"type": "@type",
|
||||
"serviceEndpoint": {
|
||||
"@id": "https://www.w3.org/ns/did#serviceEndpoint",
|
||||
"@type": "@id",
|
||||
},
|
||||
},
|
||||
},
|
||||
"verificationMethod": {
|
||||
"@id": "https://w3id.org/security#verificationMethod",
|
||||
"@type": "@id",
|
||||
},
|
||||
}
|
||||
},
|
||||
},
|
||||
"w3id.org/security/data-integrity/v1": {
|
||||
"contentType": "application/ld+json",
|
||||
"documentUrl": "https://w3id.org/security/data-integrity/v1",
|
||||
"contextUrl": None,
|
||||
"document": {
|
||||
"@context": {
|
||||
"id": "@id",
|
||||
"type": "@type",
|
||||
"@protected": True,
|
||||
"proof": {
|
||||
"@id": "https://w3id.org/security#proof",
|
||||
"@type": "@id",
|
||||
"@container": "@graph",
|
||||
},
|
||||
"DataIntegrityProof": {
|
||||
"@id": "https://w3id.org/security#DataIntegrityProof",
|
||||
"@context": {
|
||||
"@protected": True,
|
||||
"id": "@id",
|
||||
"type": "@type",
|
||||
"challenge": "https://w3id.org/security#challenge",
|
||||
"created": {
|
||||
"@id": "http://purl.org/dc/terms/created",
|
||||
"@type": "http://www.w3.org/2001/XMLSchema#dateTime",
|
||||
},
|
||||
"domain": "https://w3id.org/security#domain",
|
||||
"expires": {
|
||||
"@id": "https://w3id.org/security#expiration",
|
||||
"@type": "http://www.w3.org/2001/XMLSchema#dateTime",
|
||||
},
|
||||
"nonce": "https://w3id.org/security#nonce",
|
||||
"proofPurpose": {
|
||||
"@id": "https://w3id.org/security#proofPurpose",
|
||||
"@type": "@vocab",
|
||||
"@context": {
|
||||
"@protected": True,
|
||||
"id": "@id",
|
||||
"type": "@type",
|
||||
"assertionMethod": {
|
||||
"@id": "https://w3id.org/security#assertionMethod",
|
||||
"@type": "@id",
|
||||
"@container": "@set",
|
||||
},
|
||||
"authentication": {
|
||||
"@id": "https://w3id.org/security#authenticationMethod",
|
||||
"@type": "@id",
|
||||
"@container": "@set",
|
||||
},
|
||||
"capabilityInvocation": {
|
||||
"@id": "https://w3id.org/security#capabilityInvocationMethod",
|
||||
"@type": "@id",
|
||||
"@container": "@set",
|
||||
},
|
||||
"capabilityDelegation": {
|
||||
"@id": "https://w3id.org/security#capabilityDelegationMethod",
|
||||
"@type": "@id",
|
||||
"@container": "@set",
|
||||
},
|
||||
"keyAgreement": {
|
||||
"@id": "https://w3id.org/security#keyAgreementMethod",
|
||||
"@type": "@id",
|
||||
"@container": "@set",
|
||||
},
|
||||
},
|
||||
},
|
||||
"cryptosuite": "https://w3id.org/security#cryptosuite",
|
||||
"proofValue": {
|
||||
"@id": "https://w3id.org/security#proofValue",
|
||||
"@type": "https://w3id.org/security#multibase",
|
||||
},
|
||||
"verificationMethod": {
|
||||
"@id": "https://w3id.org/security#verificationMethod",
|
||||
"@type": "@id",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
},
|
||||
"w3id.org/security/multikey/v1": {
|
||||
"contentType": "application/ld+json",
|
||||
"documentUrl": "https://w3id.org/security/multikey/v1",
|
||||
"contextUrl": None,
|
||||
"document": {
|
||||
"@context": {
|
||||
"id": "@id",
|
||||
"type": "@type",
|
||||
"@protected": True,
|
||||
"Multikey": {
|
||||
"@id": "https://w3id.org/security#Multikey",
|
||||
"@context": {
|
||||
"@protected": True,
|
||||
"id": "@id",
|
||||
"type": "@type",
|
||||
"controller": {
|
||||
"@id": "https://w3id.org/security#controller",
|
||||
"@type": "@id",
|
||||
},
|
||||
"revoked": {
|
||||
"@id": "https://w3id.org/security#revoked",
|
||||
"@type": "http://www.w3.org/2001/XMLSchema#dateTime",
|
||||
},
|
||||
"expires": {
|
||||
"@id": "https://w3id.org/security#expiration",
|
||||
"@type": "http://www.w3.org/2001/XMLSchema#dateTime",
|
||||
},
|
||||
"publicKeyMultibase": {
|
||||
"@id": "https://w3id.org/security#publicKeyMultibase",
|
||||
"@type": "https://w3id.org/security#multibase",
|
||||
},
|
||||
"secretKeyMultibase": {
|
||||
"@id": "https://w3id.org/security#secretKeyMultibase",
|
||||
"@type": "https://w3id.org/security#multibase",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"*/schemas/litepub-0.1.jsonld": {
|
||||
"contentType": "application/ld+json",
|
||||
"documentUrl": "http://w3id.org/security/v1",
|
||||
|
@ -406,6 +599,28 @@ schemas = {
|
|||
}
|
||||
},
|
||||
},
|
||||
"schema.org": {
|
||||
"contentType": "application/ld+json",
|
||||
"documentUrl": "https://schema.org/docs/jsonldcontext.json",
|
||||
"contextUrl": None,
|
||||
"document": {
|
||||
"@context": {
|
||||
"schema": "http://schema.org/",
|
||||
"PropertyValue": {"@id": "schema:PropertyValue"},
|
||||
"value": {"@id": "schema:value"},
|
||||
},
|
||||
},
|
||||
},
|
||||
"purl.org/wytchspace/ns/ap/1.0": {
|
||||
"contentType": "application/ld+json",
|
||||
"documentUrl": "https://purl.org/wytchspace/ns/ap/1.0",
|
||||
"contextUrl": None,
|
||||
"document": {
|
||||
"@context": {
|
||||
"wytch": "https://ns.wytch.space/ap/1.0.jsonld",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S.Z"
|
||||
|
@ -417,12 +632,8 @@ def builtin_document_loader(url: str, options={}):
|
|||
# Get URL without scheme
|
||||
pieces = urllib_parse.urlparse(url)
|
||||
if pieces.hostname is None:
|
||||
raise JsonLdError(
|
||||
f"No schema built-in for {url!r}",
|
||||
"jsonld.LoadDocumentError",
|
||||
code="loading document failed",
|
||||
cause="NoHostnameError",
|
||||
)
|
||||
logger.info(f"No host name for json-ld schema: {url!r}")
|
||||
return schemas["unknown"]
|
||||
key = pieces.hostname + pieces.path.rstrip("/")
|
||||
try:
|
||||
return schemas[key]
|
||||
|
@ -431,12 +642,9 @@ def builtin_document_loader(url: str, options={}):
|
|||
key = "*" + pieces.path.rstrip("/")
|
||||
return schemas[key]
|
||||
except KeyError:
|
||||
raise JsonLdError(
|
||||
f"No schema built-in for {key!r}",
|
||||
"jsonld.LoadDocumentError",
|
||||
code="loading document failed",
|
||||
cause="KeyError",
|
||||
)
|
||||
# return an empty context instead of throwing an error
|
||||
logger.info(f"Ignoring unknown json-ld schema: {url!r}")
|
||||
return schemas["unknown"]
|
||||
|
||||
|
||||
def canonicalise(json_data: dict, include_security: bool = False) -> dict:
|
||||
|
@ -451,24 +659,32 @@ def canonicalise(json_data: dict, include_security: bool = False) -> dict:
|
|||
"""
|
||||
if not isinstance(json_data, dict):
|
||||
raise ValueError("Pass decoded JSON data into LDDocument")
|
||||
context = [
|
||||
"https://www.w3.org/ns/activitystreams",
|
||||
{
|
||||
"blurhash": "toot:blurhash",
|
||||
"Emoji": "toot:Emoji",
|
||||
"focalPoint": {"@container": "@list", "@id": "toot:focalPoint"},
|
||||
"Hashtag": "as:Hashtag",
|
||||
"manuallyApprovesFollowers": "as:manuallyApprovesFollowers",
|
||||
"Public": "as:Public",
|
||||
"sensitive": "as:sensitive",
|
||||
"toot": "http://joinmastodon.org/ns#",
|
||||
"votersCount": "toot:votersCount",
|
||||
},
|
||||
]
|
||||
|
||||
context = json_data.get("@context", [])
|
||||
|
||||
if not isinstance(context, list):
|
||||
context = [context]
|
||||
|
||||
if not context:
|
||||
context.append("https://www.w3.org/ns/activitystreams")
|
||||
context.append(
|
||||
{
|
||||
"blurhash": "toot:blurhash",
|
||||
"Emoji": "toot:Emoji",
|
||||
"focalPoint": {"@container": "@list", "@id": "toot:focalPoint"},
|
||||
"Hashtag": "as:Hashtag",
|
||||
"manuallyApprovesFollowers": "as:manuallyApprovesFollowers",
|
||||
"sensitive": "as:sensitive",
|
||||
"toot": "http://joinmastodon.org/ns#",
|
||||
"votersCount": "toot:votersCount",
|
||||
"featured": {"@id": "toot:featured", "@type": "@id"},
|
||||
}
|
||||
)
|
||||
|
||||
if include_security:
|
||||
context.append("https://w3id.org/security/v1")
|
||||
if "@context" not in json_data:
|
||||
json_data["@context"] = context
|
||||
|
||||
json_data["@context"] = context
|
||||
|
||||
return jsonld.compact(jsonld.expand(json_data), context)
|
||||
|
||||
|
@ -485,14 +701,14 @@ def get_list(container, key) -> list:
|
|||
return value
|
||||
|
||||
|
||||
def get_str_or_id(value: str | dict | None) -> str | None:
|
||||
def get_str_or_id(value: str | dict | None, key: str = "id") -> str | None:
|
||||
"""
|
||||
Given a value that could be a str or {"id": str}, return the str
|
||||
"""
|
||||
if isinstance(value, str):
|
||||
return value
|
||||
elif isinstance(value, dict):
|
||||
return value.get("id")
|
||||
return value.get(key)
|
||||
return None
|
||||
|
||||
|
||||
|
@ -534,7 +750,7 @@ def get_value_or_map(data, key, map_key):
|
|||
if "und" in map_key:
|
||||
return data[map_key]["und"]
|
||||
return list(data[map_key].values())[0]
|
||||
raise KeyError(f"Cannot find {key} or {map_key}")
|
||||
raise ActivityPubFormatError(f"Cannot find {key} or {map_key}")
|
||||
|
||||
|
||||
def media_type_from_filename(filename):
|
||||
|
|
|
@ -10,7 +10,6 @@ import core.uploads
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
|
|
35
core/migrations/0002_domain_config.py
Normal file
35
core/migrations/0002_domain_config.py
Normal file
|
@ -0,0 +1,35 @@
|
|||
# Generated by Django 4.2 on 2023-04-29 18:49
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("users", "0016_hashtagfollow"),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
("core", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterUniqueTogether(
|
||||
name="config",
|
||||
unique_together=set(),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="config",
|
||||
name="domain",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="configs",
|
||||
to="users.domain",
|
||||
),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="config",
|
||||
unique_together={("key", "user", "identity", "domain")},
|
||||
),
|
||||
]
|
|
@ -2,7 +2,6 @@ from functools import partial
|
|||
from typing import ClassVar
|
||||
|
||||
import pydantic
|
||||
from asgiref.sync import sync_to_async
|
||||
from django.core.files import File
|
||||
from django.db import models
|
||||
from django.utils.functional import lazy
|
||||
|
@ -43,6 +42,14 @@ class Config(models.Model):
|
|||
on_delete=models.CASCADE,
|
||||
)
|
||||
|
||||
domain = models.ForeignKey(
|
||||
"users.domain",
|
||||
blank=True,
|
||||
null=True,
|
||||
related_name="configs",
|
||||
on_delete=models.CASCADE,
|
||||
)
|
||||
|
||||
json = models.JSONField(blank=True, null=True)
|
||||
image = models.ImageField(
|
||||
blank=True,
|
||||
|
@ -52,7 +59,7 @@ class Config(models.Model):
|
|||
|
||||
class Meta:
|
||||
unique_together = [
|
||||
("key", "user", "identity"),
|
||||
("key", "user", "identity", "domain"),
|
||||
]
|
||||
|
||||
system: ClassVar["Config.ConfigOptions"] # type: ignore
|
||||
|
@ -86,17 +93,7 @@ class Config(models.Model):
|
|||
"""
|
||||
return cls.load_values(
|
||||
cls.SystemOptions,
|
||||
{"identity__isnull": True, "user__isnull": True},
|
||||
)
|
||||
|
||||
@classmethod
|
||||
async def aload_system(cls):
|
||||
"""
|
||||
Async loads the system config options object
|
||||
"""
|
||||
return await sync_to_async(cls.load_values)(
|
||||
cls.SystemOptions,
|
||||
{"identity__isnull": True, "user__isnull": True},
|
||||
{"identity__isnull": True, "user__isnull": True, "domain__isnull": True},
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
@ -106,17 +103,7 @@ class Config(models.Model):
|
|||
"""
|
||||
return cls.load_values(
|
||||
cls.UserOptions,
|
||||
{"identity__isnull": True, "user": user},
|
||||
)
|
||||
|
||||
@classmethod
|
||||
async def aload_user(cls, user):
|
||||
"""
|
||||
Async loads the user config options object
|
||||
"""
|
||||
return await sync_to_async(cls.load_values)(
|
||||
cls.UserOptions,
|
||||
{"identity__isnull": True, "user": user},
|
||||
{"identity__isnull": True, "user": user, "domain__isnull": True},
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
@ -126,17 +113,17 @@ class Config(models.Model):
|
|||
"""
|
||||
return cls.load_values(
|
||||
cls.IdentityOptions,
|
||||
{"identity": identity, "user__isnull": True},
|
||||
{"identity": identity, "user__isnull": True, "domain__isnull": True},
|
||||
)
|
||||
|
||||
@classmethod
|
||||
async def aload_identity(cls, identity):
|
||||
def load_domain(cls, domain):
|
||||
"""
|
||||
Async loads an identity config options object
|
||||
Loads an domain config options object
|
||||
"""
|
||||
return await sync_to_async(cls.load_values)(
|
||||
cls.IdentityOptions,
|
||||
{"identity": identity, "user__isnull": True},
|
||||
return cls.load_values(
|
||||
cls.DomainOptions,
|
||||
{"domain": domain, "user__isnull": True, "identity__isnull": True},
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
@ -170,7 +157,7 @@ class Config(models.Model):
|
|||
key,
|
||||
value,
|
||||
cls.SystemOptions,
|
||||
{"identity__isnull": True, "user__isnull": True},
|
||||
{"identity__isnull": True, "user__isnull": True, "domain__isnull": True},
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
@ -179,7 +166,7 @@ class Config(models.Model):
|
|||
key,
|
||||
value,
|
||||
cls.UserOptions,
|
||||
{"identity__isnull": True, "user": user},
|
||||
{"identity__isnull": True, "user": user, "domain__isnull": True},
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
@ -188,11 +175,19 @@ class Config(models.Model):
|
|||
key,
|
||||
value,
|
||||
cls.IdentityOptions,
|
||||
{"identity": identity, "user__isnull": True},
|
||||
{"identity": identity, "user__isnull": True, "domain__isnull": True},
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def set_domain(cls, domain, key, value):
|
||||
cls.set_value(
|
||||
key,
|
||||
value,
|
||||
cls.DomainOptions,
|
||||
{"domain": domain, "user__isnull": True, "identity__isnull": True},
|
||||
)
|
||||
|
||||
class SystemOptions(pydantic.BaseModel):
|
||||
|
||||
version: str = __version__
|
||||
|
||||
system_actor_public_key: str = ""
|
||||
|
@ -210,17 +205,20 @@ class Config(models.Model):
|
|||
policy_terms: str = ""
|
||||
policy_privacy: str = ""
|
||||
policy_rules: str = ""
|
||||
policy_issues: str = ""
|
||||
|
||||
signup_allowed: bool = True
|
||||
signup_text: str = ""
|
||||
signup_max_users: int = 0
|
||||
signup_email_admins: bool = True
|
||||
content_warning_text: str = "Content Warning"
|
||||
|
||||
post_length: int = 500
|
||||
max_media_attachments: int = 4
|
||||
post_minimum_interval: int = 3 # seconds
|
||||
identity_min_length: int = 2
|
||||
identity_max_per_user: int = 5
|
||||
identity_max_age: int = 24 * 60 * 60
|
||||
inbox_message_purge_after: int = 24 * 60 * 60
|
||||
public_timeline: bool = True
|
||||
|
||||
hashtag_unreviewed_are_public: bool = True
|
||||
|
@ -235,19 +233,23 @@ class Config(models.Model):
|
|||
|
||||
restricted_usernames: str = "admin\nadmins\nadministrator\nadministrators\nsystem\nroot\nannounce\nannouncement\nannouncements"
|
||||
|
||||
class UserOptions(pydantic.BaseModel):
|
||||
custom_head: str | None
|
||||
|
||||
pass
|
||||
class UserOptions(pydantic.BaseModel):
|
||||
light_theme: bool = False
|
||||
|
||||
class IdentityOptions(pydantic.BaseModel):
|
||||
|
||||
toot_mode: bool = False
|
||||
default_post_visibility: int = 0 # Post.Visibilities.public
|
||||
visible_follows: bool = True
|
||||
|
||||
# wellness Options
|
||||
search_enabled: bool = True
|
||||
visible_reaction_counts: bool = True
|
||||
expand_linked_cws: bool = True
|
||||
infinite_scroll: bool = True
|
||||
expand_content_warnings: bool = False
|
||||
boosts_on_profile: bool = True
|
||||
|
||||
custom_css: str | None
|
||||
class DomainOptions(pydantic.BaseModel):
|
||||
site_name: str = ""
|
||||
site_icon: UploadedImage | None = None
|
||||
hide_login: bool = False
|
||||
custom_css: str = ""
|
||||
single_user: str = ""
|
||||
|
|
|
@ -27,12 +27,14 @@ if SENTRY_ENABLED:
|
|||
set_context = sentry_sdk.set_context
|
||||
set_tag = sentry_sdk.set_tag
|
||||
start_transaction = sentry_sdk.start_transaction
|
||||
start_span = sentry_sdk.start_span
|
||||
else:
|
||||
configure_scope = noop_context
|
||||
push_scope = noop_context
|
||||
set_context = noop
|
||||
set_tag = noop
|
||||
start_transaction = noop_context
|
||||
start_span = noop_context
|
||||
|
||||
|
||||
def set_takahe_app(name: str):
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
import base64
|
||||
import json
|
||||
from typing import Literal, TypedDict
|
||||
import logging
|
||||
from ssl import SSLCertVerificationError, SSLError
|
||||
from typing import Literal, TypedDict, cast
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import httpx
|
||||
|
@ -12,10 +14,13 @@ from django.http import HttpRequest
|
|||
from django.utils import timezone
|
||||
from django.utils.http import http_date, parse_http_date
|
||||
from httpx._types import TimeoutTypes
|
||||
from idna.core import InvalidCodepoint
|
||||
from pyld import jsonld
|
||||
|
||||
from core.ld import format_ld_date
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class VerificationError(BaseException):
|
||||
"""
|
||||
|
@ -86,9 +91,9 @@ class HttpSignature:
|
|||
if header_name == "(request-target)":
|
||||
value = f"{request.method.lower()} {request.path}"
|
||||
elif header_name == "content-type":
|
||||
value = request.META["CONTENT_TYPE"]
|
||||
value = request.headers["content-type"]
|
||||
elif header_name == "content-length":
|
||||
value = request.META["CONTENT_LENGTH"]
|
||||
value = request.headers["content-length"]
|
||||
else:
|
||||
value = request.META["HTTP_%s" % header_name.upper().replace("-", "_")]
|
||||
headers[header_name] = value
|
||||
|
@ -101,12 +106,18 @@ class HttpSignature:
|
|||
name, value = item.split("=", 1)
|
||||
value = value.strip('"')
|
||||
bits[name.lower()] = value
|
||||
signature_details: HttpSignatureDetails = {
|
||||
"headers": bits["headers"].split(),
|
||||
"signature": base64.b64decode(bits["signature"]),
|
||||
"algorithm": bits["algorithm"],
|
||||
"keyid": bits["keyid"],
|
||||
}
|
||||
try:
|
||||
signature_details: HttpSignatureDetails = {
|
||||
"headers": bits["headers"].split(),
|
||||
"signature": base64.b64decode(bits["signature"]),
|
||||
"algorithm": bits["algorithm"],
|
||||
"keyid": bits["keyid"],
|
||||
}
|
||||
except KeyError as e:
|
||||
key_names = " ".join(bits.keys())
|
||||
raise VerificationError(
|
||||
f"Missing item from details (have: {key_names}, error: {e})"
|
||||
)
|
||||
return signature_details
|
||||
|
||||
@classmethod
|
||||
|
@ -125,13 +136,14 @@ class HttpSignature:
|
|||
cleartext: str,
|
||||
public_key: str,
|
||||
):
|
||||
public_key_instance = serialization.load_pem_public_key(
|
||||
public_key.encode("ascii")
|
||||
public_key_instance: rsa.RSAPublicKey = cast(
|
||||
rsa.RSAPublicKey,
|
||||
serialization.load_pem_public_key(public_key.encode("ascii")),
|
||||
)
|
||||
try:
|
||||
public_key_instance.verify(
|
||||
signature,
|
||||
cleartext.encode("ascii"),
|
||||
cleartext.encode("utf8"),
|
||||
padding.PKCS1v15(),
|
||||
hashes.SHA256(),
|
||||
)
|
||||
|
@ -158,7 +170,12 @@ class HttpSignature:
|
|||
raise VerificationFormatError("No signature header present")
|
||||
signature_details = cls.parse_signature(request.headers["signature"])
|
||||
# Reject unknown algorithms
|
||||
if signature_details["algorithm"] != "rsa-sha256":
|
||||
# hs2019 is used by some libraries to obfuscate the real algorithm per the spec
|
||||
# https://datatracker.ietf.org/doc/html/draft-cavage-http-signatures-12
|
||||
if (
|
||||
signature_details["algorithm"] != "rsa-sha256"
|
||||
and signature_details["algorithm"] != "hs2019"
|
||||
):
|
||||
raise VerificationFormatError("Unknown signature algorithm")
|
||||
# Create the signature payload
|
||||
headers_string = cls.headers_from_request(request, signature_details["headers"])
|
||||
|
@ -169,13 +186,13 @@ class HttpSignature:
|
|||
)
|
||||
|
||||
@classmethod
|
||||
async def signed_request(
|
||||
def signed_request(
|
||||
cls,
|
||||
uri: str,
|
||||
body: dict | None,
|
||||
private_key: str,
|
||||
key_id: str,
|
||||
content_type: str = "application/json",
|
||||
content_type: str = "application/activity+json",
|
||||
method: Literal["get", "post"] = "post",
|
||||
timeout: TimeoutTypes = settings.SETUP.REMOTE_TIMEOUT,
|
||||
):
|
||||
|
@ -202,17 +219,20 @@ class HttpSignature:
|
|||
body_bytes = b""
|
||||
# GET requests get implicit accept headers added
|
||||
if method == "get":
|
||||
headers["Accept"] = "application/ld+json"
|
||||
headers["Accept"] = "application/activity+json,application/ld+json"
|
||||
# Sign the headers
|
||||
signed_string = "\n".join(
|
||||
f"{name.lower()}: {value}" for name, value in headers.items()
|
||||
)
|
||||
private_key_instance = serialization.load_pem_private_key(
|
||||
private_key.encode("ascii"),
|
||||
password=None,
|
||||
private_key_instance: rsa.RSAPrivateKey = cast(
|
||||
rsa.RSAPrivateKey,
|
||||
serialization.load_pem_private_key(
|
||||
private_key.encode("ascii"),
|
||||
password=None,
|
||||
),
|
||||
)
|
||||
signature = private_key_instance.sign(
|
||||
signed_string.encode("ascii"),
|
||||
signed_string.encode("utf8"),
|
||||
padding.PKCS1v15(),
|
||||
hashes.SHA256(),
|
||||
)
|
||||
|
@ -230,14 +250,23 @@ class HttpSignature:
|
|||
|
||||
# Send the request with all those headers except the pseudo one
|
||||
del headers["(request-target)"]
|
||||
async with httpx.AsyncClient(timeout=timeout) as client:
|
||||
response = await client.request(
|
||||
method,
|
||||
uri,
|
||||
headers=headers,
|
||||
content=body_bytes,
|
||||
follow_redirects=method == "get",
|
||||
)
|
||||
with httpx.Client(timeout=timeout) as client:
|
||||
try:
|
||||
response = client.request(
|
||||
method,
|
||||
uri,
|
||||
headers=headers,
|
||||
content=body_bytes,
|
||||
follow_redirects=method == "get",
|
||||
)
|
||||
except SSLError as invalid_cert:
|
||||
# Not our problem if the other end doesn't have proper SSL
|
||||
logger.info("Invalid cert on %s %s", uri, invalid_cert)
|
||||
raise SSLCertVerificationError(invalid_cert) from invalid_cert
|
||||
except InvalidCodepoint as ex:
|
||||
# Convert to a more generic error we handle
|
||||
raise httpx.HTTPError(f"InvalidCodepoint: {str(ex)}") from None
|
||||
|
||||
if (
|
||||
method == "post"
|
||||
and response.status_code >= 400
|
||||
|
@ -268,6 +297,8 @@ class LDSignature:
|
|||
Verifies a document
|
||||
"""
|
||||
try:
|
||||
# causing side effects to the original document is bad form
|
||||
document = document.copy()
|
||||
# Strip out the signature from the incoming document
|
||||
signature = document.pop("signature")
|
||||
# Create the options document
|
||||
|
@ -283,8 +314,9 @@ class LDSignature:
|
|||
# Get the normalised hash of each document
|
||||
final_hash = cls.normalized_hash(options) + cls.normalized_hash(document)
|
||||
# Verify the signature
|
||||
public_key_instance = serialization.load_pem_public_key(
|
||||
public_key.encode("ascii")
|
||||
public_key_instance: rsa.RSAPublicKey = cast(
|
||||
rsa.RSAPublicKey,
|
||||
serialization.load_pem_public_key(public_key.encode("ascii")),
|
||||
)
|
||||
try:
|
||||
public_key_instance.verify(
|
||||
|
@ -294,7 +326,7 @@ class LDSignature:
|
|||
hashes.SHA256(),
|
||||
)
|
||||
except InvalidSignature:
|
||||
raise VerificationError("Signature mismatch")
|
||||
raise VerificationError("LDSignature mismatch")
|
||||
|
||||
@classmethod
|
||||
def create_signature(
|
||||
|
@ -312,9 +344,12 @@ class LDSignature:
|
|||
# Get the normalised hash of each document
|
||||
final_hash = cls.normalized_hash(options) + cls.normalized_hash(document)
|
||||
# Create the signature
|
||||
private_key_instance = serialization.load_pem_private_key(
|
||||
private_key.encode("ascii"),
|
||||
password=None,
|
||||
private_key_instance: rsa.RSAPrivateKey = cast(
|
||||
rsa.RSAPrivateKey,
|
||||
serialization.load_pem_private_key(
|
||||
private_key.encode("ascii"),
|
||||
password=None,
|
||||
),
|
||||
)
|
||||
signature = base64.b64encode(
|
||||
private_key_instance.sign(
|
||||
|
|
81
core/snowflake.py
Normal file
81
core/snowflake.py
Normal file
|
@ -0,0 +1,81 @@
|
|||
import secrets
|
||||
import time
|
||||
|
||||
|
||||
class Snowflake:
|
||||
"""
|
||||
Snowflake ID generator and parser.
|
||||
"""
|
||||
|
||||
# Epoch is 2022/1/1 at midnight, as these are used for _created_ times in our
|
||||
# own database, not original publish times (which would need an earlier one)
|
||||
EPOCH = 1641020400
|
||||
|
||||
TYPE_POST = 0b000
|
||||
TYPE_POST_INTERACTION = 0b001
|
||||
TYPE_IDENTITY = 0b010
|
||||
TYPE_REPORT = 0b011
|
||||
TYPE_FOLLOW = 0b100
|
||||
|
||||
@classmethod
|
||||
def generate(cls, type_id: int) -> int:
|
||||
"""
|
||||
Generates a snowflake-style ID for the given "type". They are designed
|
||||
to fit inside 63 bits (a signed bigint)
|
||||
|
||||
ID layout is:
|
||||
* 41 bits of millisecond-level timestamp (enough for EPOCH + 69 years)
|
||||
* 19 bits of random data (1% chance of clash at 10000 per millisecond)
|
||||
* 3 bits of type information
|
||||
|
||||
We use random data rather than a sequence ID to try and avoid pushing
|
||||
this job onto the DB - we may do that in future. If a clash does
|
||||
occur, the insert will fail and Stator will retry the work for anything
|
||||
that's coming in remotely, leaving us to just handle that scenario for
|
||||
our own posts, likes, etc.
|
||||
"""
|
||||
# Get the current time in milliseconds
|
||||
now: int = int((time.time() - cls.EPOCH) * 1000)
|
||||
# Generate random data
|
||||
rand_seq: int = secrets.randbits(19)
|
||||
# Compose them together
|
||||
return (now << 22) | (rand_seq << 3) | type_id
|
||||
|
||||
@classmethod
|
||||
def get_type(cls, snowflake: int) -> int:
|
||||
"""
|
||||
Returns the type of a given snowflake ID
|
||||
"""
|
||||
if snowflake < (1 << 22):
|
||||
raise ValueError("Not a valid Snowflake ID")
|
||||
return snowflake & 0b111
|
||||
|
||||
@classmethod
|
||||
def get_time(cls, snowflake: int) -> float:
|
||||
"""
|
||||
Returns the generation time (in UNIX timestamp seconds) of the ID
|
||||
"""
|
||||
if snowflake < (1 << 22):
|
||||
raise ValueError("Not a valid Snowflake ID")
|
||||
return ((snowflake >> 22) / 1000) + cls.EPOCH
|
||||
|
||||
# Handy pre-baked methods for django model defaults
|
||||
@classmethod
|
||||
def generate_post(cls) -> int:
|
||||
return cls.generate(cls.TYPE_POST)
|
||||
|
||||
@classmethod
|
||||
def generate_post_interaction(cls) -> int:
|
||||
return cls.generate(cls.TYPE_POST_INTERACTION)
|
||||
|
||||
@classmethod
|
||||
def generate_identity(cls) -> int:
|
||||
return cls.generate(cls.TYPE_IDENTITY)
|
||||
|
||||
@classmethod
|
||||
def generate_report(cls) -> int:
|
||||
return cls.generate(cls.TYPE_REPORT)
|
||||
|
||||
@classmethod
|
||||
def generate_follow(cls) -> int:
|
||||
return cls.generate(cls.TYPE_FOLLOW)
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue