Merge branch 'main' into csv

This commit is contained in:
Hugh Rundle 2024-06-29 16:03:15 +10:00
commit e5b260e3ee
No known key found for this signature in database
GPG key ID: A7E35779918253F9
400 changed files with 39801 additions and 9079 deletions

View file

@ -16,6 +16,11 @@ DEFAULT_LANGUAGE="English"
## Leave unset to allow all hosts ## Leave unset to allow all hosts
# ALLOWED_HOSTS="localhost,127.0.0.1,[::1]" # ALLOWED_HOSTS="localhost,127.0.0.1,[::1]"
# Specify when the site is served from a port that is not the default
# for the protocol (80 for HTTP or 443 for HTTPS).
# Probably only necessary in development.
# PORT=1333
MEDIA_ROOT=images/ MEDIA_ROOT=images/
# Database configuration # Database configuration
@ -71,14 +76,20 @@ ENABLE_THUMBNAIL_GENERATION=true
USE_S3=false USE_S3=false
AWS_ACCESS_KEY_ID= AWS_ACCESS_KEY_ID=
AWS_SECRET_ACCESS_KEY= AWS_SECRET_ACCESS_KEY=
# seconds for signed S3 urls to expire
# this is currently only used for user export files
S3_SIGNED_URL_EXPIRY=900
# Commented are example values if you use a non-AWS, S3-compatible service # Commented are example values if you use a non-AWS, S3-compatible service
# AWS S3 should work with only AWS_STORAGE_BUCKET_NAME and AWS_S3_REGION_NAME # AWS S3 should work with only AWS_STORAGE_BUCKET_NAME and AWS_S3_REGION_NAME
# non-AWS S3-compatible services will need AWS_STORAGE_BUCKET_NAME, # non-AWS S3-compatible services will need AWS_STORAGE_BUCKET_NAME,
# along with both AWS_S3_CUSTOM_DOMAIN and AWS_S3_ENDPOINT_URL # along with both AWS_S3_CUSTOM_DOMAIN and AWS_S3_ENDPOINT_URL.
# AWS_S3_URL_PROTOCOL must end in ":" and defaults to the same protocol as
# the BookWyrm instance ("http:" or "https:", based on USE_SSL).
# AWS_STORAGE_BUCKET_NAME= # "example-bucket-name" # AWS_STORAGE_BUCKET_NAME= # "example-bucket-name"
# AWS_S3_CUSTOM_DOMAIN=None # "example-bucket-name.s3.fr-par.scw.cloud" # AWS_S3_CUSTOM_DOMAIN=None # "example-bucket-name.s3.fr-par.scw.cloud"
# AWS_S3_URL_PROTOCOL=None # "http:"
# AWS_S3_REGION_NAME=None # "fr-par" # AWS_S3_REGION_NAME=None # "fr-par"
# AWS_S3_ENDPOINT_URL=None # "https://s3.fr-par.scw.cloud" # AWS_S3_ENDPOINT_URL=None # "https://s3.fr-par.scw.cloud"
@ -133,7 +144,14 @@ HTTP_X_FORWARDED_PROTO=false
TWO_FACTOR_LOGIN_VALIDITY_WINDOW=2 TWO_FACTOR_LOGIN_VALIDITY_WINDOW=2
TWO_FACTOR_LOGIN_MAX_SECONDS=60 TWO_FACTOR_LOGIN_MAX_SECONDS=60
# Additional hosts to allow in the Content-Security-Policy, "self" (should be DOMAIN) # Additional hosts to allow in the Content-Security-Policy, "self" (should be
# and AWS_S3_CUSTOM_DOMAIN (if used) are added by default. # DOMAIN with optionally ":" + PORT) and AWS_S3_CUSTOM_DOMAIN (if used) are
# Value should be a comma-separated list of host names. # added by default. Value should be a comma-separated list of host names.
CSP_ADDITIONAL_HOSTS= CSP_ADDITIONAL_HOSTS=
# Time before being logged out (in seconds)
# SESSION_COOKIE_AGE=2592000 # current default: 30 days
# Maximum allowed memory for file uploads (increase if users are having trouble
# uploading BookWyrm export files).
# DATA_UPLOAD_MAX_MEMORY_MiB=100

78
.github/pull_request_template.md vendored Normal file
View file

@ -0,0 +1,78 @@
<!--
Thanks for contributing!
Please ensure the name of your PR is written in imperative present tense. For example:
- "fix color contrast on submit buttons"
- "add 'favourite food' value to Author model"
To check (tick) a list item, replace the space between square brackets with an x, like this:
- [x] I have checked the box
You can find more information and tips for BookWyrm contributors at https://docs.joinbookwyrm.com/contributing.html
-->
## Are you finished?
### Linters
<!--
Please run linters on your code before submitting your PR.
If you miss this step it is likely that the GitHub task runners will fail.
-->
- [ ] I have checked my code with `black`, `pylint`, and `mypy`, or `./bw-dev formatters`
### Tests
<!-- Check one -->
- [ ] My changes do not need new tests
- [ ] All tests I have added are passing
- [ ] I have written tests but need help to make them pass
- [ ] I have not written tests and need help to write them
## What type of Pull Request is this?
<!-- Check all that apply -->
- [ ] Bug Fix
- [ ] Enhancement
- [ ] Plumbing / Internals / Dependencies
- [ ] Refactor
## Does this PR change settings or dependencies, or break something?
<!-- Check all that apply -->
- [ ] This PR changes or adds default settings, configuration, or .env values
- [ ] This PR changes or adds dependencies
- [ ] This PR introduces other breaking changes
### Details of breaking or configuration changes (if any of above checked)
## Description
<!--
Describe what your pull request does here.
For pull requests that relate or close an issue, please include them
below. We like to follow [Github's guidance on linking issues to pull requests](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue).
For example having the text: "closes #1234" would connect the current pull
request to issue 1234. And when we merge the pull request, Github will
automatically close the issue.
-->
- Related Issue #
- Closes #
## Documentation
<!--
Documentation for users, admins, and developers is an important way to keep the BookWyrm community welcoming and make Bookwyrm easy to use.
Our documentation is maintained in a separate repository at https://github.com/bookwyrm-social/documentation
-->
<!-- Check all that apply -->
- [ ] New or amended documentation will be required if this PR is merged
- [ ] I have created a matching pull request in the Documentation repository
- [ ] I intend to create a matching pull request in the Documentation repository after this PR is merged

26
.github/release.yml vendored Normal file
View file

@ -0,0 +1,26 @@
changelog:
exclude:
labels:
- ignore-for-release
categories:
- title: ‼️ Breaking Changes & New Settings ⚙️
labels:
- breaking-change
- config-change
- title: Updated Dependencies 🧸
labels:
- dependencies
- title: New Features 🎉
labels:
- enhancement
- title: Bug Fixes 🐛
labels:
- fix
- bug
- title: Internals/Plumbing 👩‍🔧
- plumbing
- tests
- deployment
- title: Other Changes
labels:
- "*"

View file

@ -1,17 +0,0 @@
name: Python Formatting (run ./bw-dev black to fix)
on:
push:
branches: [ main ]
pull_request:
branches: [ main ]
jobs:
lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
- uses: psf/black@22.12.0
with:
version: 22.12.0

View file

@ -36,11 +36,11 @@ jobs:
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v3 uses: actions/checkout@v4
# Initializes the CodeQL tools for scanning. # Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL - name: Initialize CodeQL
uses: github/codeql-action/init@v2 uses: github/codeql-action/init@v3
with: with:
languages: ${{ matrix.language }} languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file. # If you wish to specify custom queries, you can do so here or in a config file.
@ -51,7 +51,7 @@ jobs:
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below) # If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild - name: Autobuild
uses: github/codeql-action/autobuild@v2 uses: github/codeql-action/autobuild@v3
# Command-line programs to run using the OS shell. # Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl # 📚 https://git.io/JvXDl
@ -65,4 +65,4 @@ jobs:
# make release # make release
- name: Perform CodeQL Analysis - name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2 uses: github/codeql-action/analyze@v3

View file

@ -10,7 +10,7 @@ jobs:
lint: lint:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v4
- name: Install curlylint - name: Install curlylint
run: pip install curlylint run: pip install curlylint

View file

@ -1,70 +0,0 @@
name: Run Python Tests
on:
push:
branches: [ main ]
pull_request:
branches: [ main ]
jobs:
build:
runs-on: ubuntu-20.04
services:
postgres:
image: postgres:13
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: hunter2
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
steps:
- uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: 3.9
- name: Install Dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
- name: Check migrations up-to-date
run: |
python ./manage.py makemigrations --check
env:
SECRET_KEY: beepbeep
DOMAIN: your.domain.here
EMAIL_HOST: ""
EMAIL_HOST_USER: ""
EMAIL_HOST_PASSWORD: ""
- name: Run Tests
env:
SECRET_KEY: beepbeep
DEBUG: false
USE_HTTPS: true
DOMAIN: your.domain.here
BOOKWYRM_DATABASE_BACKEND: postgres
MEDIA_ROOT: images/
POSTGRES_PASSWORD: hunter2
POSTGRES_USER: postgres
POSTGRES_DB: github_actions
POSTGRES_HOST: 127.0.0.1
CELERY_BROKER: ""
REDIS_BROKER_PORT: 6379
REDIS_BROKER_PASSWORD: beep
USE_DUMMY_CACHE: true
FLOWER_PORT: 8888
EMAIL_HOST: "smtp.mailgun.org"
EMAIL_PORT: 587
EMAIL_HOST_USER: ""
EMAIL_HOST_PASSWORD: ""
EMAIL_USE_TLS: true
ENABLE_PREVIEW_IMAGES: false
ENABLE_THUMBNAIL_GENERATION: true
HTTP_X_FORWARDED_PROTO: false
run: |
pytest -n 3

View file

@ -19,10 +19,11 @@ jobs:
steps: steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it. # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it.
- uses: actions/checkout@v3 - uses: actions/checkout@v4
- name: Install modules - name: Install modules
run: npm install stylelint stylelint-config-recommended stylelint-config-standard stylelint-order eslint # run: npm install stylelint stylelint-config-recommended stylelint-config-standard stylelint-order eslint
run: npm install eslint@^8.9.0
# See .stylelintignore for files that are not linted. # See .stylelintignore for files that are not linted.
# - name: Run stylelint # - name: Run stylelint

View file

@ -1,50 +0,0 @@
name: Mypy
on:
push:
branches: [ main ]
pull_request:
branches: [ main ]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python 3.9
uses: actions/setup-python@v4
with:
python-version: 3.9
- name: Install Dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
- name: Analysing the code with mypy
env:
SECRET_KEY: beepbeep
DEBUG: false
USE_HTTPS: true
DOMAIN: your.domain.here
BOOKWYRM_DATABASE_BACKEND: postgres
MEDIA_ROOT: images/
POSTGRES_PASSWORD: hunter2
POSTGRES_USER: postgres
POSTGRES_DB: github_actions
POSTGRES_HOST: 127.0.0.1
CELERY_BROKER: ""
REDIS_BROKER_PORT: 6379
REDIS_BROKER_PASSWORD: beep
USE_DUMMY_CACHE: true
FLOWER_PORT: 8888
EMAIL_HOST: "smtp.mailgun.org"
EMAIL_PORT: 587
EMAIL_HOST_USER: ""
EMAIL_HOST_PASSWORD: ""
EMAIL_USE_TLS: true
ENABLE_PREVIEW_IMAGES: false
ENABLE_THUMBNAIL_GENERATION: true
HTTP_X_FORWARDED_PROTO: false
run: |
mypy bookwyrm celerywyrm

View file

@ -14,7 +14,7 @@ jobs:
steps: steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it. # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it.
- uses: actions/checkout@v3 - uses: actions/checkout@v4
- name: Install modules - name: Install modules
run: npm install prettier@2.5.1 run: npm install prettier@2.5.1

View file

@ -1,27 +0,0 @@
name: Pylint
on:
push:
branches: [ main ]
pull_request:
branches: [ main ]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python 3.9
uses: actions/setup-python@v4
with:
python-version: 3.9
- name: Install Dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
- name: Analysing the code with pylint
run: |
pylint bookwyrm/

99
.github/workflows/python.yml vendored Normal file
View file

@ -0,0 +1,99 @@
name: Python
on:
push:
branches: [ main ]
pull_request:
branches: [ main ]
# overrides for .env.example
env:
POSTGRES_HOST: 127.0.0.1
PGPORT: 5432
POSTGRES_USER: postgres
POSTGRES_PASSWORD: hunter2
POSTGRES_DB: github_actions
SECRET_KEY: beepbeep
EMAIL_HOST_USER: ""
EMAIL_HOST_PASSWORD: ""
jobs:
pytest:
name: Tests (pytest)
runs-on: ubuntu-latest
services:
postgres:
image: postgres:13
env: # does not inherit from jobs.build.env
POSTGRES_USER: postgres
POSTGRES_PASSWORD: hunter2
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
steps:
- uses: actions/checkout@v4
- name: Set up Python 3.11
uses: actions/setup-python@v5
with:
python-version: 3.11
cache: pip
- name: Install Dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
pip install pytest-github-actions-annotate-failures
- name: Set up .env
run: cp .env.example .env
- name: Check migrations up-to-date
run: python ./manage.py makemigrations --check
- name: Run Tests
run: pytest -n 3
pylint:
name: Linting (pylint)
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Python 3.11
uses: actions/setup-python@v5
with:
python-version: 3.11
cache: pip
- name: Install Dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
- name: Analyse code with pylint
run: pylint bookwyrm/
mypy:
name: Typing (mypy)
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Python 3.11
uses: actions/setup-python@v5
with:
python-version: 3.11
cache: pip
- name: Install Dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
- name: Set up .env
run: cp .env.example .env
- name: Analyse code with mypy
run: mypy bookwyrm celerywyrm
black:
name: Formatting (black; run ./bw-dev black to fix)
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
- uses: psf/black@stable
with:
version: "22.*"

5
.gitignore vendored
View file

@ -16,6 +16,8 @@
# BookWyrm # BookWyrm
.env .env
/images/ /images/
/exports/
/static/
bookwyrm/static/css/bookwyrm.css bookwyrm/static/css/bookwyrm.css
bookwyrm/static/css/themes/ bookwyrm/static/css/themes/
!bookwyrm/static/css/themes/bookwyrm-*.scss !bookwyrm/static/css/themes/bookwyrm-*.scss
@ -36,3 +38,6 @@ nginx/default.conf
#macOS #macOS
**/.DS_Store **/.DS_Store
# Docker
docker-compose.override.yml

View file

@ -1,4 +1,4 @@
FROM python:3.9 FROM python:3.11
ENV PYTHONUNBUFFERED 1 ENV PYTHONUNBUFFERED 1

View file

@ -10,7 +10,6 @@ BookWyrm is a social network for tracking your reading, talking about books, wri
## Links ## Links
[![Mastodon Follow](https://img.shields.io/mastodon/follow/000146121?domain=https%3A%2F%2Ftech.lgbt&style=social)](https://tech.lgbt/@bookwyrm) [![Mastodon Follow](https://img.shields.io/mastodon/follow/000146121?domain=https%3A%2F%2Ftech.lgbt&style=social)](https://tech.lgbt/@bookwyrm)
[![Twitter Follow](https://img.shields.io/twitter/follow/BookWyrmSocial?style=social)](https://twitter.com/BookWyrmSocial)
- [Project homepage](https://joinbookwyrm.com/) - [Project homepage](https://joinbookwyrm.com/)
- [Support](https://patreon.com/bookwyrm) - [Support](https://patreon.com/bookwyrm)

View file

@ -1 +1 @@
0.7.0 0.7.3

View file

@ -20,6 +20,7 @@ from bookwyrm.tasks import app, MISC
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
# pylint: disable=invalid-name
TBookWyrmModel = TypeVar("TBookWyrmModel", bound=base_model.BookWyrmModel) TBookWyrmModel = TypeVar("TBookWyrmModel", bound=base_model.BookWyrmModel)
@ -236,7 +237,7 @@ class ActivityObject:
omit = kwargs.get("omit", ()) omit = kwargs.get("omit", ())
data = self.__dict__.copy() data = self.__dict__.copy()
# recursively serialize # recursively serialize
for (k, v) in data.items(): for k, v in data.items():
try: try:
if issubclass(type(v), ActivityObject): if issubclass(type(v), ActivityObject):
data[k] = v.serialize() data[k] = v.serialize()
@ -249,7 +250,10 @@ class ActivityObject:
pass pass
data = {k: v for (k, v) in data.items() if v is not None and k not in omit} data = {k: v for (k, v) in data.items() if v is not None and k not in omit}
if "@context" not in omit: if "@context" not in omit:
data["@context"] = "https://www.w3.org/ns/activitystreams" data["@context"] = [
"https://www.w3.org/ns/activitystreams",
{"Hashtag": "as:Hashtag"},
]
return data return data
@ -397,18 +401,14 @@ def resolve_remote_id(
def get_representative(): def get_representative():
"""Get or create an actor representing the instance """Get or create an actor representing the instance
to sign outgoing HTTP GET requests""" to sign outgoing HTTP GET requests"""
username = f"{INSTANCE_ACTOR_USERNAME}@{DOMAIN}" return models.User.objects.get_or_create(
email = "bookwyrm@localhost" username=f"{INSTANCE_ACTOR_USERNAME}@{DOMAIN}",
try: defaults={
user = models.User.objects.get(username=username) "email": "bookwyrm@localhost",
except models.User.DoesNotExist: "local": True,
user = models.User.objects.create_user( "localname": INSTANCE_ACTOR_USERNAME,
username=username, },
email=email, )[0]
local=True,
localname=INSTANCE_ACTOR_USERNAME,
)
return user
def get_activitypub_data(url): def get_activitypub_data(url):
@ -427,6 +427,7 @@ def get_activitypub_data(url):
"Date": now, "Date": now,
"Signature": make_signature("get", sender, url, now), "Signature": make_signature("get", sender, url, now),
}, },
timeout=15,
) )
except requests.RequestException: except requests.RequestException:
raise ConnectorException() raise ConnectorException()

View file

@ -1,5 +1,5 @@
""" actor serializer """ """ actor serializer """
from dataclasses import dataclass, field from dataclasses import dataclass
from typing import Dict from typing import Dict
from .base_activity import ActivityObject from .base_activity import ActivityObject
@ -35,7 +35,7 @@ class Person(ActivityObject):
endpoints: Dict = None endpoints: Dict = None
name: str = None name: str = None
summary: str = None summary: str = None
icon: Image = field(default_factory=lambda: {}) icon: Image = None
bookwyrmUser: bool = False bookwyrmUser: bool = False
manuallyApprovesFollowers: str = False manuallyApprovesFollowers: str = False
discoverable: str = False discoverable: str = False

View file

@ -171,9 +171,19 @@ class Reject(Verb):
type: str = "Reject" type: str = "Reject"
def action(self, allow_external_connections=True): def action(self, allow_external_connections=True):
"""reject a follow request""" """reject a follow or follow request"""
obj = self.object.to_model(save=False, allow_create=False)
for model_name in ["UserFollowRequest", "UserFollows", None]:
model = apps.get_model(f"bookwyrm.{model_name}") if model_name else None
if obj := self.object.to_model(
model=model,
save=False,
allow_create=False,
allow_external_connections=allow_external_connections,
):
# Reject the first model that can be built.
obj.reject() obj.reject()
break
@dataclass(init=False) @dataclass(init=False)

View file

@ -139,14 +139,14 @@ class ActivityStream(RedisStore):
| ( | (
Q(following=status.user) & Q(following=status.reply_parent.user) Q(following=status.user) & Q(following=status.reply_parent.user)
) # if the user is following both authors ) # if the user is following both authors
).distinct() )
# only visible to the poster's followers and tagged users # only visible to the poster's followers and tagged users
elif status.privacy == "followers": elif status.privacy == "followers":
audience = audience.filter( audience = audience.filter(
Q(following=status.user) # if the user is following the author Q(following=status.user) # if the user is following the author
) )
return audience.distinct() return audience.distinct("id")
@tracer.start_as_current_span("ActivityStream.get_audience") @tracer.start_as_current_span("ActivityStream.get_audience")
def get_audience(self, status): def get_audience(self, status):
@ -156,7 +156,7 @@ class ActivityStream(RedisStore):
status_author = models.User.objects.filter( status_author = models.User.objects.filter(
is_active=True, local=True, id=status.user.id is_active=True, local=True, id=status.user.id
).values_list("id", flat=True) ).values_list("id", flat=True)
return list(set(list(audience) + list(status_author))) return list(set(audience) | set(status_author))
def get_stores_for_users(self, user_ids): def get_stores_for_users(self, user_ids):
"""convert a list of user ids into redis store ids""" """convert a list of user ids into redis store ids"""
@ -183,15 +183,13 @@ class HomeStream(ActivityStream):
def get_audience(self, status): def get_audience(self, status):
trace.get_current_span().set_attribute("stream_id", self.key) trace.get_current_span().set_attribute("stream_id", self.key)
audience = super()._get_audience(status) audience = super()._get_audience(status)
if not audience:
return []
# if the user is following the author # if the user is following the author
audience = audience.filter(following=status.user).values_list("id", flat=True) audience = audience.filter(following=status.user).values_list("id", flat=True)
# if the user is the post's author # if the user is the post's author
status_author = models.User.objects.filter( status_author = models.User.objects.filter(
is_active=True, local=True, id=status.user.id is_active=True, local=True, id=status.user.id
).values_list("id", flat=True) ).values_list("id", flat=True)
return list(set(list(audience) + list(status_author))) return list(set(audience) | set(status_author))
def get_statuses_for_user(self, user): def get_statuses_for_user(self, user):
return models.Status.privacy_filter( return models.Status.privacy_filter(
@ -239,9 +237,7 @@ class BooksStream(ActivityStream):
) )
audience = super()._get_audience(status) audience = super()._get_audience(status)
if not audience: return audience.filter(shelfbook__book__parent_work=work)
return models.User.objects.none()
return audience.filter(shelfbook__book__parent_work=work).distinct()
def get_audience(self, status): def get_audience(self, status):
# only show public statuses on the books feed, # only show public statuses on the books feed,

View file

@ -1,4 +1,5 @@
"""Do further startup configuration and initialization""" """Do further startup configuration and initialization"""
import os import os
import urllib import urllib
import logging import logging
@ -14,16 +15,16 @@ def download_file(url, destination):
"""Downloads a file to the given path""" """Downloads a file to the given path"""
try: try:
# Ensure our destination directory exists # Ensure our destination directory exists
os.makedirs(os.path.dirname(destination)) os.makedirs(os.path.dirname(destination), exist_ok=True)
with urllib.request.urlopen(url) as stream: with urllib.request.urlopen(url) as stream:
with open(destination, "b+w") as outfile: with open(destination, "b+w") as outfile:
outfile.write(stream.read()) outfile.write(stream.read())
except (urllib.error.HTTPError, urllib.error.URLError): except (urllib.error.HTTPError, urllib.error.URLError) as err:
logger.info("Failed to download file %s", url) logger.error("Failed to download file %s: %s", url, err)
except OSError: except OSError as err:
logger.info("Couldn't open font file %s for writing", destination) logger.error("Couldn't open font file %s for writing: %s", destination, err)
except: # pylint: disable=bare-except except Exception as err: # pylint:disable=broad-except
logger.info("Unknown error in file download") logger.error("Unknown error in file download: %s", err)
class BookwyrmConfig(AppConfig): class BookwyrmConfig(AppConfig):

View file

@ -43,6 +43,7 @@ def search(
min_confidence: float = 0, min_confidence: float = 0,
filters: Optional[list[Any]] = None, filters: Optional[list[Any]] = None,
return_first: bool = False, return_first: bool = False,
books: Optional[QuerySet[models.Edition]] = None,
) -> Union[Optional[models.Edition], QuerySet[models.Edition]]: ) -> Union[Optional[models.Edition], QuerySet[models.Edition]]:
"""search your local database""" """search your local database"""
filters = filters or [] filters = filters or []
@ -54,13 +55,15 @@ def search(
# first, try searching unique identifiers # first, try searching unique identifiers
# unique identifiers never have spaces, title/author usually do # unique identifiers never have spaces, title/author usually do
if not " " in query: if not " " in query:
results = search_identifiers(query, *filters, return_first=return_first) results = search_identifiers(
query, *filters, return_first=return_first, books=books
)
# if there were no identifier results... # if there were no identifier results...
if not results: if not results:
# then try searching title/author # then try searching title/author
results = search_title_author( results = search_title_author(
query, min_confidence, *filters, return_first=return_first query, min_confidence, *filters, return_first=return_first, books=books
) )
return results return results
@ -98,9 +101,17 @@ def format_search_result(search_result):
def search_identifiers( def search_identifiers(
query, *filters, return_first=False query,
*filters,
return_first=False,
books=None,
) -> Union[Optional[models.Edition], QuerySet[models.Edition]]: ) -> Union[Optional[models.Edition], QuerySet[models.Edition]]:
"""tries remote_id, isbn; defined as dedupe fields on the model""" """search Editions by deduplication fields
Best for cases when we can assume someone is searching for an exact match on
commonly unique data identifiers like isbn or specific library ids.
"""
books = books or models.Edition.objects
if connectors.maybe_isbn(query): if connectors.maybe_isbn(query):
# Oh did you think the 'S' in ISBN stood for 'standard'? # Oh did you think the 'S' in ISBN stood for 'standard'?
normalized_isbn = query.strip().upper().rjust(10, "0") normalized_isbn = query.strip().upper().rjust(10, "0")
@ -111,7 +122,7 @@ def search_identifiers(
for f in models.Edition._meta.get_fields() for f in models.Edition._meta.get_fields()
if hasattr(f, "deduplication_field") and f.deduplication_field if hasattr(f, "deduplication_field") and f.deduplication_field
] ]
results = models.Edition.objects.filter( results = books.filter(
*filters, reduce(operator.or_, (Q(**f) for f in or_filters)) *filters, reduce(operator.or_, (Q(**f) for f in or_filters))
).distinct() ).distinct()
@ -121,12 +132,17 @@ def search_identifiers(
def search_title_author( def search_title_author(
query, min_confidence, *filters, return_first=False query,
min_confidence,
*filters,
return_first=False,
books=None,
) -> QuerySet[models.Edition]: ) -> QuerySet[models.Edition]:
"""searches for title and author""" """searches for title and author"""
books = books or models.Edition.objects
query = SearchQuery(query, config="simple") | SearchQuery(query, config="english") query = SearchQuery(query, config="simple") | SearchQuery(query, config="english")
results = ( results = (
models.Edition.objects.filter(*filters, search_vector=query) books.filter(*filters, search_vector=query)
.annotate(rank=SearchRank(F("search_vector"), query)) .annotate(rank=SearchRank(F("search_vector"), query))
.filter(rank__gt=min_confidence) .filter(rank__gt=min_confidence)
.order_by("-rank") .order_by("-rank")
@ -137,7 +153,7 @@ def search_title_author(
# filter out multiple editions of the same work # filter out multiple editions of the same work
list_results = [] list_results = []
for work_id in set(editions_of_work[:30]): for work_id in editions_of_work[:30]:
result = ( result = (
results.filter(parent_work=work_id) results.filter(parent_work=work_id)
.order_by("-rank", "-edition_rank") .order_by("-rank", "-edition_rank")

View file

@ -3,7 +3,9 @@ from __future__ import annotations
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
from typing import Optional, TypedDict, Any, Callable, Union, Iterator from typing import Optional, TypedDict, Any, Callable, Union, Iterator
from urllib.parse import quote_plus from urllib.parse import quote_plus
import imghdr
# pylint: disable-next=deprecated-module
import imghdr # Deprecated in 3.11 for removal in 3.13; no good alternative yet
import logging import logging
import re import re
import asyncio import asyncio

View file

@ -118,9 +118,11 @@ def get_connectors() -> Iterator[abstract_connector.AbstractConnector]:
def get_or_create_connector(remote_id: str) -> abstract_connector.AbstractConnector: def get_or_create_connector(remote_id: str) -> abstract_connector.AbstractConnector:
"""get the connector related to the object's server""" """get the connector related to the object's server"""
url = urlparse(remote_id) url = urlparse(remote_id)
identifier = url.netloc identifier = url.hostname
if not identifier: if not identifier:
raise ValueError("Invalid remote id") raise ValueError(f"Invalid remote id: {remote_id}")
base_url = f"{url.scheme}://{url.netloc}"
try: try:
connector_info = models.Connector.objects.get(identifier=identifier) connector_info = models.Connector.objects.get(identifier=identifier)
@ -128,10 +130,10 @@ def get_or_create_connector(remote_id: str) -> abstract_connector.AbstractConnec
connector_info = models.Connector.objects.create( connector_info = models.Connector.objects.create(
identifier=identifier, identifier=identifier,
connector_file="bookwyrm_connector", connector_file="bookwyrm_connector",
base_url=f"https://{identifier}", base_url=base_url,
books_url=f"https://{identifier}/book", books_url=f"{base_url}/book",
covers_url=f"https://{identifier}/images/covers", covers_url=f"{base_url}/images/covers",
search_url=f"https://{identifier}/search?q=", search_url=f"{base_url}/search?q=",
priority=2, priority=2,
) )
@ -143,7 +145,9 @@ def load_more_data(connector_id: str, book_id: str) -> None:
"""background the work of getting all 10,000 editions of LoTR""" """background the work of getting all 10,000 editions of LoTR"""
connector_info = models.Connector.objects.get(id=connector_id) connector_info = models.Connector.objects.get(id=connector_id)
connector = load_connector(connector_info) connector = load_connector(connector_info)
book = models.Book.objects.select_subclasses().get(id=book_id) book = models.Book.objects.select_subclasses().get( # type: ignore[no-untyped-call]
id=book_id
)
connector.expand_book_data(book) connector.expand_book_data(book)
@ -154,7 +158,9 @@ def create_edition_task(
"""separate task for each of the 10,000 editions of LoTR""" """separate task for each of the 10,000 editions of LoTR"""
connector_info = models.Connector.objects.get(id=connector_id) connector_info = models.Connector.objects.get(id=connector_id)
connector = load_connector(connector_info) connector = load_connector(connector_info)
work = models.Work.objects.select_subclasses().get(id=work_id) work = models.Work.objects.select_subclasses().get( # type: ignore[no-untyped-call]
id=work_id
)
connector.create_edition_from_data(work, data) connector.create_edition_from_data(work, data)
@ -188,8 +194,11 @@ def raise_not_valid_url(url: str) -> None:
if not parsed.scheme in ["http", "https"]: if not parsed.scheme in ["http", "https"]:
raise ConnectorException("Invalid scheme: ", url) raise ConnectorException("Invalid scheme: ", url)
if not parsed.hostname:
raise ConnectorException("Hostname missing: ", url)
try: try:
ipaddress.ip_address(parsed.netloc) ipaddress.ip_address(parsed.hostname)
raise ConnectorException("Provided url is an IP address: ", url) raise ConnectorException("Provided url is an IP address: ", url)
except ValueError: except ValueError:
# it's not an IP address, which is good # it's not an IP address, which is good

View file

@ -229,7 +229,7 @@ class Connector(AbstractConnector):
data = get_data(url) data = get_data(url)
except ConnectorException: except ConnectorException:
return "" return ""
return data.get("extract", "") return str(data.get("extract", ""))
def get_remote_id_from_model(self, obj: models.BookDataModel) -> str: def get_remote_id_from_model(self, obj: models.BookDataModel) -> str:
"""use get_remote_id to figure out the link from a model obj""" """use get_remote_id to figure out the link from a model obj"""

View file

@ -4,7 +4,7 @@ from django.template.loader import get_template
from bookwyrm import models, settings from bookwyrm import models, settings
from bookwyrm.tasks import app, EMAIL from bookwyrm.tasks import app, EMAIL
from bookwyrm.settings import DOMAIN from bookwyrm.settings import DOMAIN, BASE_URL
def email_data(): def email_data():
@ -14,6 +14,7 @@ def email_data():
"site_name": site.name, "site_name": site.name,
"logo": site.logo_small_url, "logo": site.logo_small_url,
"domain": DOMAIN, "domain": DOMAIN,
"base_url": BASE_URL,
"user": None, "user": None,
} }

View file

@ -15,6 +15,7 @@ class AuthorForm(CustomForm):
"aliases", "aliases",
"bio", "bio",
"wikipedia_link", "wikipedia_link",
"wikidata",
"website", "website",
"born", "born",
"died", "died",
@ -32,6 +33,7 @@ class AuthorForm(CustomForm):
"wikipedia_link": forms.TextInput( "wikipedia_link": forms.TextInput(
attrs={"aria-describedby": "desc_wikipedia_link"} attrs={"aria-describedby": "desc_wikipedia_link"}
), ),
"wikidata": forms.TextInput(attrs={"aria-describedby": "desc_wikidata"}),
"website": forms.TextInput(attrs={"aria-describedby": "desc_website"}), "website": forms.TextInput(attrs={"aria-describedby": "desc_website"}),
"born": forms.SelectDateWidget(attrs={"aria-describedby": "desc_born"}), "born": forms.SelectDateWidget(attrs={"aria-describedby": "desc_born"}),
"died": forms.SelectDateWidget(attrs={"aria-describedby": "desc_died"}), "died": forms.SelectDateWidget(attrs={"aria-describedby": "desc_died"}),

View file

@ -1,8 +1,9 @@
""" using django model forms """ """ using django model forms """
from django import forms from django import forms
from file_resubmit.widgets import ResubmitImageWidget
from bookwyrm import models from bookwyrm import models
from bookwyrm.models.fields import ClearableFileInputWithWarning
from .custom_form import CustomForm from .custom_form import CustomForm
from .widgets import ArrayWidget, SelectDateWidget, Select from .widgets import ArrayWidget, SelectDateWidget, Select
@ -70,9 +71,7 @@ class EditionForm(CustomForm):
"published_date": SelectDateWidget( "published_date": SelectDateWidget(
attrs={"aria-describedby": "desc_published_date"} attrs={"aria-describedby": "desc_published_date"}
), ),
"cover": ClearableFileInputWithWarning( "cover": ResubmitImageWidget(attrs={"aria-describedby": "desc_cover"}),
attrs={"aria-describedby": "desc_cover"}
),
"physical_format": Select( "physical_format": Select(
attrs={"aria-describedby": "desc_physical_format"} attrs={"aria-describedby": "desc_physical_format"}
), ),

View file

@ -25,6 +25,10 @@ class ImportForm(forms.Form):
csv_file = forms.FileField() csv_file = forms.FileField()
class ImportUserForm(forms.Form):
archive_file = forms.FileField()
class ShelfForm(CustomForm): class ShelfForm(CustomForm):
class Meta: class Meta:
model = models.Shelf model = models.Shelf

View file

@ -1,4 +1,5 @@
""" using django model forms """ """ using django model forms """
from urllib.parse import urlparse from urllib.parse import urlparse
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
@ -25,7 +26,7 @@ class FileLinkForm(CustomForm):
url = cleaned_data.get("url") url = cleaned_data.get("url")
filetype = cleaned_data.get("filetype") filetype = cleaned_data.get("filetype")
book = cleaned_data.get("book") book = cleaned_data.get("book")
domain = urlparse(url).netloc domain = urlparse(url).hostname
if models.LinkDomain.objects.filter(domain=domain).exists(): if models.LinkDomain.objects.filter(domain=domain).exists():
status = models.LinkDomain.objects.get(domain=domain).status status = models.LinkDomain.objects.get(domain=domain).status
if status == "blocked": if status == "blocked":
@ -37,10 +38,9 @@ class FileLinkForm(CustomForm):
), ),
) )
if ( if (
not self.instance models.FileLink.objects.filter(url=url, book=book, filetype=filetype)
and models.FileLink.objects.filter( .exclude(pk=self.instance)
url=url, book=book, filetype=filetype .exists()
).exists()
): ):
# pylint: disable=line-too-long # pylint: disable=line-too-long
self.add_error( self.add_error(

View file

@ -1,7 +1,7 @@
""" import classes """ """ import classes """
from .importer import Importer from .importer import Importer
from .bookwyrm_import import BookwyrmBooksImporter from .bookwyrm_import import BookwyrmImporter
from .calibre_import import CalibreImporter from .calibre_import import CalibreImporter
from .goodreads_import import GoodreadsImporter from .goodreads_import import GoodreadsImporter
from .librarything_import import LibrarythingImporter from .librarything_import import LibrarythingImporter

View file

@ -1,14 +1,24 @@
""" handle reading a csv from BookWyrm """ """Import data from Bookwyrm export files"""
from django.http import QueryDict
from typing import Any from bookwyrm.models import User
from . import Importer from bookwyrm.models.bookwyrm_import_job import BookwyrmImportJob
class BookwyrmBooksImporter(Importer): class BookwyrmImporter:
"""Goodreads is the default importer, we basically just use the same structure""" """Import a Bookwyrm User export file.
This is kind of a combination of an importer and a connector.
"""
service = "BookWyrm" # pylint: disable=no-self-use
def process_import(
self, user: User, archive_file: bytes, settings: QueryDict
) -> BookwyrmImportJob:
"""import user data from a Bookwyrm export file"""
def __init__(self, *args: Any, **kwargs: Any): required = [k for k in settings if settings.get(k) == "on"]
self.row_mappings_guesses.append(("shelf_name", ["shelf_name"]))
super().__init__(*args, **kwargs) job = BookwyrmImportJob.objects.create(
user=user, archive_file=archive_file, required=required
)
return job

View file

@ -14,15 +14,10 @@ class CalibreImporter(Importer):
def __init__(self, *args: Any, **kwargs: Any): def __init__(self, *args: Any, **kwargs: Any):
# Add timestamp to row_mappings_guesses for date_added to avoid # Add timestamp to row_mappings_guesses for date_added to avoid
# integrity error # integrity error
row_mappings_guesses = [] self.row_mappings_guesses = [
(field, mapping + (["timestamp"] if field == "date_added" else []))
for field, mapping in self.row_mappings_guesses: for field, mapping in self.row_mappings_guesses
if field in ("date_added",): ]
row_mappings_guesses.append((field, mapping + ["timestamp"]))
else:
row_mappings_guesses.append((field, mapping))
self.row_mappings_guesses = row_mappings_guesses
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)
def get_shelf(self, normalized_row: dict[str, Optional[str]]) -> Optional[str]: def get_shelf(self, normalized_row: dict[str, Optional[str]]) -> Optional[str]:

View file

@ -26,7 +26,7 @@ class IsbnHyphenator:
def update_range_message(self) -> None: def update_range_message(self) -> None:
"""Download the range message xml file and save it locally""" """Download the range message xml file and save it locally"""
response = requests.get(self.__range_message_url) response = requests.get(self.__range_message_url, timeout=15)
with open(self.__range_file_path, "w", encoding="utf-8") as file: with open(self.__range_file_path, "w", encoding="utf-8") as file:
file.write(response.text) file.write(response.text)
self.__element_tree = None self.__element_tree = None

View file

@ -1,13 +1,14 @@
""" PROCEED WITH CAUTION: uses deduplication fields to permanently """ PROCEED WITH CAUTION: uses deduplication fields to permanently
merge book data objects """ merge book data objects """
from django.core.management.base import BaseCommand from django.core.management.base import BaseCommand
from django.db.models import Count from django.db.models import Count
from bookwyrm import models from bookwyrm import models
from bookwyrm.management.merge import merge_objects
def dedupe_model(model): def dedupe_model(model, dry_run=False):
"""combine duplicate editions and update related models""" """combine duplicate editions and update related models"""
print(f"deduplicating {model.__name__}:")
fields = model._meta.get_fields() fields = model._meta.get_fields()
dedupe_fields = [ dedupe_fields = [
f for f in fields if hasattr(f, "deduplication_field") and f.deduplication_field f for f in fields if hasattr(f, "deduplication_field") and f.deduplication_field
@ -16,30 +17,42 @@ def dedupe_model(model):
dupes = ( dupes = (
model.objects.values(field.name) model.objects.values(field.name)
.annotate(Count(field.name)) .annotate(Count(field.name))
.filter(**{"%s__count__gt" % field.name: 1}) .filter(**{f"{field.name}__count__gt": 1})
.exclude(**{field.name: ""})
.exclude(**{f"{field.name}__isnull": True})
) )
for dupe in dupes: for dupe in dupes:
value = dupe[field.name] value = dupe[field.name]
if not value or value == "":
continue
print("----------") print("----------")
print(dupe)
objs = model.objects.filter(**{field.name: value}).order_by("id") objs = model.objects.filter(**{field.name: value}).order_by("id")
canonical = objs.first() canonical = objs.first()
print("keeping", canonical.remote_id) action = "would merge" if dry_run else "merging"
print(
f"{action} into {model.__name__} {canonical.remote_id} based on {field.name} {value}:"
)
for obj in objs[1:]: for obj in objs[1:]:
print(obj.remote_id) print(f"- {obj.remote_id}")
merge_objects(canonical, obj) absorbed_fields = obj.merge_into(canonical, dry_run=dry_run)
print(f" absorbed fields: {absorbed_fields}")
class Command(BaseCommand): class Command(BaseCommand):
"""deduplicate allllll the book data models""" """deduplicate allllll the book data models"""
help = "merges duplicate book data" help = "merges duplicate book data"
def add_arguments(self, parser):
"""add the arguments for this command"""
parser.add_argument(
"--dry_run",
action="store_true",
help="don't actually merge, only print what would happen",
)
# pylint: disable=no-self-use,unused-argument # pylint: disable=no-self-use,unused-argument
def handle(self, *args, **options): def handle(self, *args, **options):
"""run deduplications""" """run deduplications"""
dedupe_model(models.Edition) dedupe_model(models.Edition, dry_run=options["dry_run"])
dedupe_model(models.Work) dedupe_model(models.Work, dry_run=options["dry_run"])
dedupe_model(models.Author) dedupe_model(models.Author, dry_run=options["dry_run"])

View file

@ -0,0 +1,43 @@
""" Erase any data stored about deleted users """
import sys
from django.core.management.base import BaseCommand, CommandError
from bookwyrm import models
from bookwyrm.models.user import erase_user_data
# pylint: disable=missing-function-docstring
class Command(BaseCommand):
"""command-line options"""
help = "Remove Two Factor Authorisation from user"
def add_arguments(self, parser): # pylint: disable=no-self-use
parser.add_argument(
"--dryrun",
action="store_true",
help="Preview users to be cleared without altering the database",
)
def handle(self, *args, **options): # pylint: disable=unused-argument
# Check for anything fishy
bad_state = models.User.objects.filter(is_deleted=True, is_active=True)
if bad_state.exists():
raise CommandError(
f"{bad_state.count()} user(s) marked as both active and deleted"
)
deleted_users = models.User.objects.filter(is_deleted=True)
self.stdout.write(f"Found {deleted_users.count()} deleted users")
if options["dryrun"]:
self.stdout.write("\n".join(u.username for u in deleted_users[:5]))
if deleted_users.count() > 5:
self.stdout.write("... and more")
sys.exit()
self.stdout.write("Erasing user data:")
for user_id in deleted_users.values_list("id", flat=True):
erase_user_data.delay(user_id)
self.stdout.write(".", ending="")
self.stdout.write("")
self.stdout.write("Tasks created successfully")

View file

@ -1,54 +0,0 @@
""" Get your admin code to allow install """
from django.core.management.base import BaseCommand
from bookwyrm import models
from bookwyrm.settings import VERSION
# pylint: disable=no-self-use
class Command(BaseCommand):
"""command-line options"""
help = "What version is this?"
def add_arguments(self, parser):
"""specify which function to run"""
parser.add_argument(
"--current",
action="store_true",
help="Version stored in database",
)
parser.add_argument(
"--target",
action="store_true",
help="Version stored in settings",
)
parser.add_argument(
"--update",
action="store_true",
help="Update database version",
)
# pylint: disable=unused-argument
def handle(self, *args, **options):
"""execute init"""
site = models.SiteSettings.objects.get()
current = site.version or "0.0.1"
target = VERSION
if options.get("current"):
print(current)
return
if options.get("target"):
print(target)
return
if options.get("update"):
site.version = target
site.save()
return
if current != target:
print(f"{current}/{target}")
else:
print(current)

View file

@ -1,50 +0,0 @@
from django.db.models import ManyToManyField
def update_related(canonical, obj):
"""update all the models with fk to the object being removed"""
# move related models to canonical
related_models = [
(r.remote_field.name, r.related_model) for r in canonical._meta.related_objects
]
for (related_field, related_model) in related_models:
# Skip the ManyToMany fields that arent auto-created. These
# should have a corresponding OneToMany field in the model for
# the linking table anyway. If we update it through that model
# instead then we wont lose the extra fields in the linking
# table.
related_field_obj = related_model._meta.get_field(related_field)
if isinstance(related_field_obj, ManyToManyField):
through = related_field_obj.remote_field.through
if not through._meta.auto_created:
continue
related_objs = related_model.objects.filter(**{related_field: obj})
for related_obj in related_objs:
print("replacing in", related_model.__name__, related_field, related_obj.id)
try:
setattr(related_obj, related_field, canonical)
related_obj.save()
except TypeError:
getattr(related_obj, related_field).add(canonical)
getattr(related_obj, related_field).remove(obj)
def copy_data(canonical, obj):
"""try to get the most data possible"""
for data_field in obj._meta.get_fields():
if not hasattr(data_field, "activitypub_field"):
continue
data_value = getattr(obj, data_field.name)
if not data_value:
continue
if not getattr(canonical, data_field.name):
print("setting data field", data_field.name, data_value)
setattr(canonical, data_field.name, data_value)
canonical.save()
def merge_objects(canonical, obj):
copy_data(canonical, obj)
update_related(canonical, obj)
# remove the outdated entry
obj.delete()

View file

@ -1,4 +1,3 @@
from bookwyrm.management.merge import merge_objects
from django.core.management.base import BaseCommand from django.core.management.base import BaseCommand
@ -9,6 +8,11 @@ class MergeCommand(BaseCommand):
"""add the arguments for this command""" """add the arguments for this command"""
parser.add_argument("--canonical", type=int, required=True) parser.add_argument("--canonical", type=int, required=True)
parser.add_argument("--other", type=int, required=True) parser.add_argument("--other", type=int, required=True)
parser.add_argument(
"--dry_run",
action="store_true",
help="don't actually merge, only print what would happen",
)
# pylint: disable=no-self-use,unused-argument # pylint: disable=no-self-use,unused-argument
def handle(self, *args, **options): def handle(self, *args, **options):
@ -26,4 +30,8 @@ class MergeCommand(BaseCommand):
print("other book doesnt exist!") print("other book doesnt exist!")
return return
merge_objects(canonical, other) absorbed_fields = other.merge_into(canonical, dry_run=options["dry_run"])
action = "would be" if options["dry_run"] else "has been"
print(f"{other.remote_id} {action} merged into {canonical.remote_id}")
print(f"absorbed fields: {absorbed_fields}")

View file

@ -1,3 +1,4 @@
""" look at all this nice middleware! """ """ look at all this nice middleware! """
from .timezone_middleware import TimezoneMiddleware from .timezone_middleware import TimezoneMiddleware
from .ip_middleware import IPBlocklistMiddleware from .ip_middleware import IPBlocklistMiddleware
from .file_too_big import FileTooBig

View file

@ -0,0 +1,30 @@
"""Middleware to display a custom 413 error page"""
from django.http import HttpResponse
from django.shortcuts import render
from django.core.exceptions import RequestDataTooBig
class FileTooBig:
"""Middleware to display a custom page when a
RequestDataTooBig exception is thrown"""
def __init__(self, get_response):
"""boilerplate __init__ from Django docs"""
self.get_response = get_response
def __call__(self, request):
"""If RequestDataTooBig is thrown, render the 413 error page"""
try:
body = request.body # pylint: disable=unused-variable
except RequestDataTooBig:
rendered = render(request, "413.html")
response = HttpResponse(rendered)
return response
response = self.get_response(request)
return response

View file

@ -1,5 +1,5 @@
""" Makes the app aware of the users timezone """ """ Makes the app aware of the users timezone """
import pytz import zoneinfo
from django.utils import timezone from django.utils import timezone
@ -12,9 +12,7 @@ class TimezoneMiddleware:
def __call__(self, request): def __call__(self, request):
if request.user.is_authenticated: if request.user.is_authenticated:
timezone.activate(pytz.timezone(request.user.preferred_timezone)) timezone.activate(zoneinfo.ZoneInfo(request.user.preferred_timezone))
else: else:
timezone.activate(pytz.utc)
response = self.get_response(request)
timezone.deactivate() timezone.deactivate()
return response return self.get_response(request)

View file

@ -10,6 +10,7 @@ class Migration(migrations.Migration):
] ]
operations = [ operations = [
# The new timezones are "Factory" and "localtime"
migrations.AlterField( migrations.AlterField(
model_name="user", model_name="user",
name="preferred_timezone", name="preferred_timezone",

View file

@ -22,17 +22,6 @@ def update_deleted_users(apps, schema_editor):
).update(is_deleted=True) ).update(is_deleted=True)
def erase_deleted_user_data(apps, schema_editor):
"""Retroactively clear user data"""
for user in User.objects.filter(is_deleted=True):
user.erase_user_data()
user.save(
broadcast=False,
update_fields=["email", "avatar", "preview_image", "summary", "name"],
)
user.erase_user_statuses(broadcast=False)
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [ dependencies = [
@ -43,7 +32,4 @@ class Migration(migrations.Migration):
migrations.RunPython( migrations.RunPython(
update_deleted_users, reverse_code=migrations.RunPython.noop update_deleted_users, reverse_code=migrations.RunPython.noop
), ),
migrations.RunPython(
erase_deleted_user_data, reverse_code=migrations.RunPython.noop
),
] ]

View file

@ -0,0 +1,212 @@
# Generated by Django 3.2.20 on 2023-11-16 00:48
from django.conf import settings
import django.contrib.postgres.fields
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0185_alter_notification_notification_type"),
]
operations = [
migrations.CreateModel(
name="ParentJob",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("task_id", models.UUIDField(blank=True, null=True, unique=True)),
(
"created_date",
models.DateTimeField(default=django.utils.timezone.now),
),
(
"updated_date",
models.DateTimeField(default=django.utils.timezone.now),
),
("complete", models.BooleanField(default=False)),
(
"status",
models.CharField(
choices=[
("pending", "Pending"),
("active", "Active"),
("complete", "Complete"),
("stopped", "Stopped"),
("failed", "Failed"),
],
default="pending",
max_length=50,
null=True,
),
),
(
"user",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to=settings.AUTH_USER_MODEL,
),
),
],
options={
"abstract": False,
},
),
migrations.AddField(
model_name="sitesettings",
name="user_import_time_limit",
field=models.IntegerField(default=48),
),
migrations.AlterField(
model_name="notification",
name="notification_type",
field=models.CharField(
choices=[
("FAVORITE", "Favorite"),
("BOOST", "Boost"),
("REPLY", "Reply"),
("MENTION", "Mention"),
("TAG", "Tag"),
("FOLLOW", "Follow"),
("FOLLOW_REQUEST", "Follow Request"),
("IMPORT", "Import"),
("USER_IMPORT", "User Import"),
("USER_EXPORT", "User Export"),
("ADD", "Add"),
("REPORT", "Report"),
("LINK_DOMAIN", "Link Domain"),
("INVITE", "Invite"),
("ACCEPT", "Accept"),
("JOIN", "Join"),
("LEAVE", "Leave"),
("REMOVE", "Remove"),
("GROUP_PRIVACY", "Group Privacy"),
("GROUP_NAME", "Group Name"),
("GROUP_DESCRIPTION", "Group Description"),
("MOVE", "Move"),
],
max_length=255,
),
),
migrations.CreateModel(
name="BookwyrmExportJob",
fields=[
(
"parentjob_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="bookwyrm.parentjob",
),
),
("export_data", models.FileField(null=True, upload_to="")),
],
options={
"abstract": False,
},
bases=("bookwyrm.parentjob",),
),
migrations.CreateModel(
name="BookwyrmImportJob",
fields=[
(
"parentjob_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="bookwyrm.parentjob",
),
),
("archive_file", models.FileField(blank=True, null=True, upload_to="")),
("import_data", models.JSONField(null=True)),
(
"required",
django.contrib.postgres.fields.ArrayField(
base_field=models.CharField(blank=True, max_length=50),
blank=True,
size=None,
),
),
],
options={
"abstract": False,
},
bases=("bookwyrm.parentjob",),
),
migrations.CreateModel(
name="ChildJob",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("task_id", models.UUIDField(blank=True, null=True, unique=True)),
(
"created_date",
models.DateTimeField(default=django.utils.timezone.now),
),
(
"updated_date",
models.DateTimeField(default=django.utils.timezone.now),
),
("complete", models.BooleanField(default=False)),
(
"status",
models.CharField(
choices=[
("pending", "Pending"),
("active", "Active"),
("complete", "Complete"),
("stopped", "Stopped"),
("failed", "Failed"),
],
default="pending",
max_length=50,
null=True,
),
),
(
"parent_job",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="child_jobs",
to="bookwyrm.parentjob",
),
),
],
options={
"abstract": False,
},
),
migrations.AddField(
model_name="notification",
name="related_user_export",
field=models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
to="bookwyrm.bookwyrmexportjob",
),
),
]

View file

@ -0,0 +1,45 @@
# Generated by Django 3.2.23 on 2023-12-12 23:42
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0188_theme_loads"),
]
operations = [
migrations.AlterField(
model_name="user",
name="preferred_language",
field=models.CharField(
blank=True,
choices=[
("en-us", "English"),
("ca-es", "Català (Catalan)"),
("de-de", "Deutsch (German)"),
("eo-uy", "Esperanto (Esperanto)"),
("es-es", "Español (Spanish)"),
("eu-es", "Euskara (Basque)"),
("gl-es", "Galego (Galician)"),
("it-it", "Italiano (Italian)"),
("fi-fi", "Suomi (Finnish)"),
("fr-fr", "Français (French)"),
("lt-lt", "Lietuvių (Lithuanian)"),
("nl-nl", "Nederlands (Dutch)"),
("no-no", "Norsk (Norwegian)"),
("pl-pl", "Polski (Polish)"),
("pt-br", "Português do Brasil (Brazilian Portuguese)"),
("pt-pt", "Português Europeu (European Portuguese)"),
("ro-ro", "Română (Romanian)"),
("sv-se", "Svenska (Swedish)"),
("uk-ua", "Українська (Ukrainian)"),
("zh-hans", "简体中文 (Simplified Chinese)"),
("zh-hant", "繁體中文 (Traditional Chinese)"),
],
max_length=255,
null=True,
),
),
]

View file

@ -0,0 +1,13 @@
# Generated by Django 3.2.23 on 2023-11-22 10:16
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0186_auto_20231116_0048"),
("bookwyrm", "0188_theme_loads"),
]
operations = []

View file

@ -0,0 +1,45 @@
# Generated by Django 3.2.23 on 2023-11-23 19:49
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0189_merge_0186_auto_20231116_0048_0188_theme_loads"),
]
operations = [
migrations.AlterField(
model_name="notification",
name="notification_type",
field=models.CharField(
choices=[
("FAVORITE", "Favorite"),
("BOOST", "Boost"),
("REPLY", "Reply"),
("MENTION", "Mention"),
("TAG", "Tag"),
("FOLLOW", "Follow"),
("FOLLOW_REQUEST", "Follow Request"),
("IMPORT", "Import"),
("USER_IMPORT", "User Import"),
("USER_EXPORT", "User Export"),
("ADD", "Add"),
("REPORT", "Report"),
("LINK_DOMAIN", "Link Domain"),
("INVITE_REQUEST", "Invite Request"),
("INVITE", "Invite"),
("ACCEPT", "Accept"),
("JOIN", "Join"),
("LEAVE", "Leave"),
("REMOVE", "Remove"),
("GROUP_PRIVACY", "Group Privacy"),
("GROUP_NAME", "Group Name"),
("GROUP_DESCRIPTION", "Group Description"),
("MOVE", "Move"),
],
max_length=255,
),
),
]

View file

@ -0,0 +1,16 @@
# Generated by Django 3.2.20 on 2023-11-24 17:11
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0188_theme_loads"),
]
operations = [
migrations.RemoveIndex(
model_name="author",
name="bookwyrm_au_search__b050a8_gin",
),
]

View file

@ -0,0 +1,13 @@
# Generated by Django 3.2.23 on 2024-01-02 03:26
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0189_alter_user_preferred_language"),
("bookwyrm", "0190_alter_notification_notification_type"),
]
operations = []

View file

@ -0,0 +1,76 @@
# Generated by Django 3.2.20 on 2023-11-25 00:47
from importlib import import_module
import re
from django.db import migrations
import pgtrigger.compiler
import pgtrigger.migrations
trigger_migration = import_module("bookwyrm.migrations.0077_auto_20210623_2155")
# it's _very_ convenient for development that this migration be reversible
search_vector_trigger = trigger_migration.Migration.operations[4]
author_search_vector_trigger = trigger_migration.Migration.operations[5]
assert re.search(r"\bCREATE TRIGGER search_vector_trigger\b", search_vector_trigger.sql)
assert re.search(
r"\bCREATE TRIGGER author_search_vector_trigger\b",
author_search_vector_trigger.sql,
)
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0190_book_search_updates"),
]
operations = [
pgtrigger.migrations.AddTrigger(
model_name="book",
trigger=pgtrigger.compiler.Trigger(
name="update_search_vector_on_book_edit",
sql=pgtrigger.compiler.UpsertTriggerSql(
func="new.search_vector := setweight(coalesce(nullif(to_tsvector('english', new.title), ''), to_tsvector('simple', new.title)), 'A') || setweight(to_tsvector('english', coalesce(new.subtitle, '')), 'B') || (SELECT setweight(to_tsvector('simple', coalesce(array_to_string(array_agg(bookwyrm_author.name), ' '), '')), 'C') FROM bookwyrm_author LEFT JOIN bookwyrm_book_authors ON bookwyrm_author.id = bookwyrm_book_authors.author_id WHERE bookwyrm_book_authors.book_id = new.id ) || setweight(to_tsvector('english', coalesce(new.series, '')), 'D');RETURN NEW;",
hash="77d6399497c0a89b0bf09d296e33c396da63705c",
operation='INSERT OR UPDATE OF "title", "subtitle", "series", "search_vector"',
pgid="pgtrigger_update_search_vector_on_book_edit_bec58",
table="bookwyrm_book",
when="BEFORE",
),
),
),
pgtrigger.migrations.AddTrigger(
model_name="author",
trigger=pgtrigger.compiler.Trigger(
name="reset_search_vector_on_author_edit",
sql=pgtrigger.compiler.UpsertTriggerSql(
func="WITH updated_books AS (SELECT book_id FROM bookwyrm_book_authors WHERE author_id = new.id ) UPDATE bookwyrm_book SET search_vector = '' FROM updated_books WHERE id = updated_books.book_id;RETURN NEW;",
hash="e7bbf08711ff3724c58f4d92fb7a082ffb3d7826",
operation='UPDATE OF "name"',
pgid="pgtrigger_reset_search_vector_on_author_edit_a447c",
table="bookwyrm_author",
when="AFTER",
),
),
),
migrations.RunSQL(
sql="""DROP TRIGGER IF EXISTS search_vector_trigger ON bookwyrm_book;
DROP FUNCTION IF EXISTS book_trigger;
""",
reverse_sql=search_vector_trigger.sql,
),
migrations.RunSQL(
sql="""DROP TRIGGER IF EXISTS author_search_vector_trigger ON bookwyrm_author;
DROP FUNCTION IF EXISTS author_trigger;
""",
reverse_sql=author_search_vector_trigger.sql,
),
migrations.RunSQL(
# Recalculate book search vector for any missed author name changes
# due to bug in JOIN in the old trigger.
sql="UPDATE bookwyrm_book SET search_vector = NULL;",
reverse_sql=migrations.RunSQL.noop,
),
]

View file

@ -0,0 +1,23 @@
# Generated by Django 3.2.23 on 2024-01-04 23:56
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0191_merge_20240102_0326"),
]
operations = [
migrations.AlterField(
model_name="quotation",
name="endposition",
field=models.TextField(blank=True, null=True),
),
migrations.AlterField(
model_name="quotation",
name="position",
field=models.TextField(blank=True, null=True),
),
]

View file

@ -0,0 +1,18 @@
# Generated by Django 3.2.23 on 2024-01-02 19:36
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0191_merge_20240102_0326"),
]
operations = [
migrations.RenameField(
model_name="sitesettings",
old_name="version",
new_name="available_version",
),
]

View file

@ -0,0 +1,18 @@
# Generated by Django 3.2.23 on 2024-01-16 10:28
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0191_merge_20240102_0326"),
]
operations = [
migrations.AddField(
model_name="sitesettings",
name="user_exports_enabled",
field=models.BooleanField(default=False),
),
]

View file

@ -0,0 +1,92 @@
# Generated by Django 3.2.23 on 2024-01-28 02:49
import django.core.serializers.json
from django.db import migrations, models
import django.db.models.deletion
from django.core.files.storage import storages
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0192_sitesettings_user_exports_enabled"),
]
operations = [
migrations.AddField(
model_name="bookwyrmexportjob",
name="export_json",
field=models.JSONField(
encoder=django.core.serializers.json.DjangoJSONEncoder, null=True
),
),
migrations.AddField(
model_name="bookwyrmexportjob",
name="json_completed",
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name="bookwyrmexportjob",
name="export_data",
field=models.FileField(
null=True,
storage=storages["exports"],
upload_to="",
),
),
migrations.CreateModel(
name="AddFileToTar",
fields=[
(
"childjob_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="bookwyrm.childjob",
),
),
(
"parent_export_job",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="child_edition_export_jobs",
to="bookwyrm.bookwyrmexportjob",
),
),
],
options={
"abstract": False,
},
bases=("bookwyrm.childjob",),
),
migrations.CreateModel(
name="AddBookToUserExportJob",
fields=[
(
"childjob_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="bookwyrm.childjob",
),
),
(
"edition",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="bookwyrm.edition",
),
),
],
options={
"abstract": False,
},
bases=("bookwyrm.childjob",),
),
]

View file

@ -0,0 +1,13 @@
# Generated by Django 3.2.23 on 2024-02-03 15:39
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0192_make_page_positions_text"),
("bookwyrm", "0192_sitesettings_user_exports_enabled"),
]
operations = []

View file

@ -0,0 +1,13 @@
# Generated by Django 3.2.23 on 2024-02-03 16:19
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0192_rename_version_sitesettings_available_version"),
("bookwyrm", "0193_merge_20240203_1539"),
]
operations = []

View file

@ -0,0 +1,46 @@
# Generated by Django 3.2.23 on 2024-02-21 00:45
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0194_merge_20240203_1619"),
]
operations = [
migrations.AlterField(
model_name="user",
name="preferred_language",
field=models.CharField(
blank=True,
choices=[
("en-us", "English"),
("ca-es", "Català (Catalan)"),
("de-de", "Deutsch (German)"),
("eo-uy", "Esperanto (Esperanto)"),
("es-es", "Español (Spanish)"),
("eu-es", "Euskara (Basque)"),
("gl-es", "Galego (Galician)"),
("it-it", "Italiano (Italian)"),
("ko-kr", "한국어 (Korean)"),
("fi-fi", "Suomi (Finnish)"),
("fr-fr", "Français (French)"),
("lt-lt", "Lietuvių (Lithuanian)"),
("nl-nl", "Nederlands (Dutch)"),
("no-no", "Norsk (Norwegian)"),
("pl-pl", "Polski (Polish)"),
("pt-br", "Português do Brasil (Brazilian Portuguese)"),
("pt-pt", "Português Europeu (European Portuguese)"),
("ro-ro", "Română (Romanian)"),
("sv-se", "Svenska (Swedish)"),
("uk-ua", "Українська (Ukrainian)"),
("zh-hans", "简体中文 (Simplified Chinese)"),
("zh-hant", "繁體中文 (Traditional Chinese)"),
],
max_length=255,
null=True,
),
),
]

View file

@ -0,0 +1,13 @@
# Generated by Django 3.2.23 on 2024-03-18 17:37
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0193_auto_20240128_0249"),
("bookwyrm", "0195_alter_user_preferred_language"),
]
operations = []

View file

@ -0,0 +1,13 @@
# Generated by Django 3.2.23 on 2024-03-18 00:48
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0191_migrate_search_vec_triggers_to_pgtriggers"),
("bookwyrm", "0195_alter_user_preferred_language"),
]
operations = []

View file

@ -0,0 +1,41 @@
# Generated by Django 3.2.25 on 2024-03-20 15:15
import django.contrib.postgres.indexes
from django.db import migrations
import pgtrigger.compiler
import pgtrigger.migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0196_merge_pr3134_into_main"),
]
operations = [
migrations.AddIndex(
model_name="author",
index=django.contrib.postgres.indexes.GinIndex(
fields=["search_vector"], name="bookwyrm_au_search__b050a8_gin"
),
),
pgtrigger.migrations.AddTrigger(
model_name="author",
trigger=pgtrigger.compiler.Trigger(
name="update_search_vector_on_author_edit",
sql=pgtrigger.compiler.UpsertTriggerSql(
func="new.search_vector := setweight(to_tsvector('simple', new.name), 'A') || setweight(to_tsvector('simple', coalesce(array_to_string(new.aliases, ' '), '')), 'B');RETURN NEW;",
hash="b97919016236d74d0ade51a0769a173ea269da64",
operation='INSERT OR UPDATE OF "name", "aliases", "search_vector"',
pgid="pgtrigger_update_search_vector_on_author_edit_c61cb",
table="bookwyrm_author",
when="BEFORE",
),
),
),
migrations.RunSQL(
# Calculate search vector for all Authors.
sql="UPDATE bookwyrm_author SET search_vector = NULL;",
reverse_sql="UPDATE bookwyrm_author SET search_vector = NULL;",
),
]

View file

@ -0,0 +1,13 @@
# Generated by Django 3.2.25 on 2024-03-24 02:35
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0196_merge_20240318_1737"),
("bookwyrm", "0196_merge_pr3134_into_main"),
]
operations = []

View file

@ -0,0 +1,48 @@
# Generated by Django 3.2.24 on 2024-02-28 21:30
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0196_merge_pr3134_into_main"),
]
operations = [
migrations.CreateModel(
name="MergedBook",
fields=[
("deleted_id", models.IntegerField(primary_key=True, serialize=False)),
(
"merged_into",
models.ForeignKey(
on_delete=django.db.models.deletion.PROTECT,
related_name="absorbed",
to="bookwyrm.book",
),
),
],
options={
"abstract": False,
},
),
migrations.CreateModel(
name="MergedAuthor",
fields=[
("deleted_id", models.IntegerField(primary_key=True, serialize=False)),
(
"merged_into",
models.ForeignKey(
on_delete=django.db.models.deletion.PROTECT,
related_name="absorbed",
to="bookwyrm.author",
),
),
],
options={
"abstract": False,
},
),
]

View file

@ -0,0 +1,23 @@
# Generated by Django 3.2.25 on 2024-03-26 11:37
import bookwyrm.models.bookwyrm_export_job
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0197_merge_20240324_0235"),
]
operations = [
migrations.AlterField(
model_name="bookwyrmexportjob",
name="export_data",
field=models.FileField(
null=True,
storage=bookwyrm.models.bookwyrm_export_job.select_exports_storage,
upload_to="",
),
),
]

View file

@ -0,0 +1,57 @@
# Generated by Django 3.2.25 on 2024-03-20 15:52
from django.db import migrations
import pgtrigger.compiler
import pgtrigger.migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0197_author_search_vector"),
]
operations = [
pgtrigger.migrations.RemoveTrigger(
model_name="author",
name="reset_search_vector_on_author_edit",
),
pgtrigger.migrations.RemoveTrigger(
model_name="book",
name="update_search_vector_on_book_edit",
),
pgtrigger.migrations.AddTrigger(
model_name="author",
trigger=pgtrigger.compiler.Trigger(
name="reset_book_search_vector_on_author_edit",
sql=pgtrigger.compiler.UpsertTriggerSql(
func="WITH updated_books AS (SELECT book_id FROM bookwyrm_book_authors WHERE author_id = new.id ) UPDATE bookwyrm_book SET search_vector = '' FROM updated_books WHERE id = updated_books.book_id;RETURN NEW;",
hash="68422c0f29879c5802b82159dde45297eff53e73",
operation='UPDATE OF "name", "aliases"',
pgid="pgtrigger_reset_book_search_vector_on_author_edit_a50c7",
table="bookwyrm_author",
when="AFTER",
),
),
),
pgtrigger.migrations.AddTrigger(
model_name="book",
trigger=pgtrigger.compiler.Trigger(
name="update_search_vector_on_book_edit",
sql=pgtrigger.compiler.UpsertTriggerSql(
func="WITH author_names AS (SELECT array_to_string(bookwyrm_author.name || bookwyrm_author.aliases, ' ') AS name_and_aliases FROM bookwyrm_author LEFT JOIN bookwyrm_book_authors ON bookwyrm_author.id = bookwyrm_book_authors.author_id WHERE bookwyrm_book_authors.book_id = new.id ) SELECT setweight(coalesce(nullif(to_tsvector('english', new.title), ''), to_tsvector('simple', new.title)), 'A') || setweight(to_tsvector('english', coalesce(new.subtitle, '')), 'B') || (SELECT setweight(to_tsvector('simple', coalesce(array_to_string(array_agg(name_and_aliases), ' '), '')), 'C') FROM author_names) || setweight(to_tsvector('english', coalesce(new.series, '')), 'D') INTO new.search_vector;RETURN NEW;",
hash="9324f5ca76a6f5e63931881d62d11da11f595b2c",
operation='INSERT OR UPDATE OF "title", "subtitle", "series", "search_vector"',
pgid="pgtrigger_update_search_vector_on_book_edit_bec58",
table="bookwyrm_book",
when="BEFORE",
),
),
),
migrations.RunSQL(
# Recalculate search vector for all Books because it now includes
# Author aliases.
sql="UPDATE bookwyrm_book SET search_vector = NULL;",
reverse_sql="UPDATE bookwyrm_book SET search_vector = NULL;",
),
]

View file

@ -0,0 +1,70 @@
# Generated by Django 4.2.11 on 2024-03-29 19:25
import bookwyrm.models.fields
from django.conf import settings
from django.db import migrations
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0198_book_search_vector_author_aliases"),
]
operations = [
migrations.AlterField(
model_name="userblocks",
name="user_object",
field=bookwyrm.models.fields.ForeignKey(
on_delete=django.db.models.deletion.PROTECT,
related_name="%(class)s_user_object",
to=settings.AUTH_USER_MODEL,
),
),
migrations.AlterField(
model_name="userblocks",
name="user_subject",
field=bookwyrm.models.fields.ForeignKey(
on_delete=django.db.models.deletion.PROTECT,
related_name="%(class)s_user_subject",
to=settings.AUTH_USER_MODEL,
),
),
migrations.AlterField(
model_name="userfollowrequest",
name="user_object",
field=bookwyrm.models.fields.ForeignKey(
on_delete=django.db.models.deletion.PROTECT,
related_name="%(class)s_user_object",
to=settings.AUTH_USER_MODEL,
),
),
migrations.AlterField(
model_name="userfollowrequest",
name="user_subject",
field=bookwyrm.models.fields.ForeignKey(
on_delete=django.db.models.deletion.PROTECT,
related_name="%(class)s_user_subject",
to=settings.AUTH_USER_MODEL,
),
),
migrations.AlterField(
model_name="userfollows",
name="user_object",
field=bookwyrm.models.fields.ForeignKey(
on_delete=django.db.models.deletion.PROTECT,
related_name="%(class)s_user_object",
to=settings.AUTH_USER_MODEL,
),
),
migrations.AlterField(
model_name="userfollows",
name="user_subject",
field=bookwyrm.models.fields.ForeignKey(
on_delete=django.db.models.deletion.PROTECT,
related_name="%(class)s_user_subject",
to=settings.AUTH_USER_MODEL,
),
),
]

View file

@ -0,0 +1,13 @@
# Generated by Django 3.2.25 on 2024-03-26 12:17
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0198_alter_bookwyrmexportjob_export_data"),
("bookwyrm", "0198_book_search_vector_author_aliases"),
]
operations = []

View file

@ -0,0 +1,19 @@
# Generated by Django 3.2.25 on 2024-04-02 19:53
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0198_book_search_vector_author_aliases"),
]
operations = [
migrations.AddIndex(
model_name="status",
index=models.Index(
fields=["remote_id"], name="bookwyrm_st_remote__06aeba_idx"
),
),
]

View file

@ -0,0 +1,633 @@
# Generated by Django 4.2.11 on 2024-04-01 20:19
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0199_alter_userblocks_user_object_and_more"),
]
operations = [
migrations.AlterField(
model_name="user",
name="preferred_timezone",
field=models.CharField(
choices=[
("Africa/Abidjan", "Africa/Abidjan"),
("Africa/Accra", "Africa/Accra"),
("Africa/Addis_Ababa", "Africa/Addis_Ababa"),
("Africa/Algiers", "Africa/Algiers"),
("Africa/Asmara", "Africa/Asmara"),
("Africa/Asmera", "Africa/Asmera"),
("Africa/Bamako", "Africa/Bamako"),
("Africa/Bangui", "Africa/Bangui"),
("Africa/Banjul", "Africa/Banjul"),
("Africa/Bissau", "Africa/Bissau"),
("Africa/Blantyre", "Africa/Blantyre"),
("Africa/Brazzaville", "Africa/Brazzaville"),
("Africa/Bujumbura", "Africa/Bujumbura"),
("Africa/Cairo", "Africa/Cairo"),
("Africa/Casablanca", "Africa/Casablanca"),
("Africa/Ceuta", "Africa/Ceuta"),
("Africa/Conakry", "Africa/Conakry"),
("Africa/Dakar", "Africa/Dakar"),
("Africa/Dar_es_Salaam", "Africa/Dar_es_Salaam"),
("Africa/Djibouti", "Africa/Djibouti"),
("Africa/Douala", "Africa/Douala"),
("Africa/El_Aaiun", "Africa/El_Aaiun"),
("Africa/Freetown", "Africa/Freetown"),
("Africa/Gaborone", "Africa/Gaborone"),
("Africa/Harare", "Africa/Harare"),
("Africa/Johannesburg", "Africa/Johannesburg"),
("Africa/Juba", "Africa/Juba"),
("Africa/Kampala", "Africa/Kampala"),
("Africa/Khartoum", "Africa/Khartoum"),
("Africa/Kigali", "Africa/Kigali"),
("Africa/Kinshasa", "Africa/Kinshasa"),
("Africa/Lagos", "Africa/Lagos"),
("Africa/Libreville", "Africa/Libreville"),
("Africa/Lome", "Africa/Lome"),
("Africa/Luanda", "Africa/Luanda"),
("Africa/Lubumbashi", "Africa/Lubumbashi"),
("Africa/Lusaka", "Africa/Lusaka"),
("Africa/Malabo", "Africa/Malabo"),
("Africa/Maputo", "Africa/Maputo"),
("Africa/Maseru", "Africa/Maseru"),
("Africa/Mbabane", "Africa/Mbabane"),
("Africa/Mogadishu", "Africa/Mogadishu"),
("Africa/Monrovia", "Africa/Monrovia"),
("Africa/Nairobi", "Africa/Nairobi"),
("Africa/Ndjamena", "Africa/Ndjamena"),
("Africa/Niamey", "Africa/Niamey"),
("Africa/Nouakchott", "Africa/Nouakchott"),
("Africa/Ouagadougou", "Africa/Ouagadougou"),
("Africa/Porto-Novo", "Africa/Porto-Novo"),
("Africa/Sao_Tome", "Africa/Sao_Tome"),
("Africa/Timbuktu", "Africa/Timbuktu"),
("Africa/Tripoli", "Africa/Tripoli"),
("Africa/Tunis", "Africa/Tunis"),
("Africa/Windhoek", "Africa/Windhoek"),
("America/Adak", "America/Adak"),
("America/Anchorage", "America/Anchorage"),
("America/Anguilla", "America/Anguilla"),
("America/Antigua", "America/Antigua"),
("America/Araguaina", "America/Araguaina"),
(
"America/Argentina/Buenos_Aires",
"America/Argentina/Buenos_Aires",
),
("America/Argentina/Catamarca", "America/Argentina/Catamarca"),
(
"America/Argentina/ComodRivadavia",
"America/Argentina/ComodRivadavia",
),
("America/Argentina/Cordoba", "America/Argentina/Cordoba"),
("America/Argentina/Jujuy", "America/Argentina/Jujuy"),
("America/Argentina/La_Rioja", "America/Argentina/La_Rioja"),
("America/Argentina/Mendoza", "America/Argentina/Mendoza"),
(
"America/Argentina/Rio_Gallegos",
"America/Argentina/Rio_Gallegos",
),
("America/Argentina/Salta", "America/Argentina/Salta"),
("America/Argentina/San_Juan", "America/Argentina/San_Juan"),
("America/Argentina/San_Luis", "America/Argentina/San_Luis"),
("America/Argentina/Tucuman", "America/Argentina/Tucuman"),
("America/Argentina/Ushuaia", "America/Argentina/Ushuaia"),
("America/Aruba", "America/Aruba"),
("America/Asuncion", "America/Asuncion"),
("America/Atikokan", "America/Atikokan"),
("America/Atka", "America/Atka"),
("America/Bahia", "America/Bahia"),
("America/Bahia_Banderas", "America/Bahia_Banderas"),
("America/Barbados", "America/Barbados"),
("America/Belem", "America/Belem"),
("America/Belize", "America/Belize"),
("America/Blanc-Sablon", "America/Blanc-Sablon"),
("America/Boa_Vista", "America/Boa_Vista"),
("America/Bogota", "America/Bogota"),
("America/Boise", "America/Boise"),
("America/Buenos_Aires", "America/Buenos_Aires"),
("America/Cambridge_Bay", "America/Cambridge_Bay"),
("America/Campo_Grande", "America/Campo_Grande"),
("America/Cancun", "America/Cancun"),
("America/Caracas", "America/Caracas"),
("America/Catamarca", "America/Catamarca"),
("America/Cayenne", "America/Cayenne"),
("America/Cayman", "America/Cayman"),
("America/Chicago", "America/Chicago"),
("America/Chihuahua", "America/Chihuahua"),
("America/Ciudad_Juarez", "America/Ciudad_Juarez"),
("America/Coral_Harbour", "America/Coral_Harbour"),
("America/Cordoba", "America/Cordoba"),
("America/Costa_Rica", "America/Costa_Rica"),
("America/Creston", "America/Creston"),
("America/Cuiaba", "America/Cuiaba"),
("America/Curacao", "America/Curacao"),
("America/Danmarkshavn", "America/Danmarkshavn"),
("America/Dawson", "America/Dawson"),
("America/Dawson_Creek", "America/Dawson_Creek"),
("America/Denver", "America/Denver"),
("America/Detroit", "America/Detroit"),
("America/Dominica", "America/Dominica"),
("America/Edmonton", "America/Edmonton"),
("America/Eirunepe", "America/Eirunepe"),
("America/El_Salvador", "America/El_Salvador"),
("America/Ensenada", "America/Ensenada"),
("America/Fort_Nelson", "America/Fort_Nelson"),
("America/Fort_Wayne", "America/Fort_Wayne"),
("America/Fortaleza", "America/Fortaleza"),
("America/Glace_Bay", "America/Glace_Bay"),
("America/Godthab", "America/Godthab"),
("America/Goose_Bay", "America/Goose_Bay"),
("America/Grand_Turk", "America/Grand_Turk"),
("America/Grenada", "America/Grenada"),
("America/Guadeloupe", "America/Guadeloupe"),
("America/Guatemala", "America/Guatemala"),
("America/Guayaquil", "America/Guayaquil"),
("America/Guyana", "America/Guyana"),
("America/Halifax", "America/Halifax"),
("America/Havana", "America/Havana"),
("America/Hermosillo", "America/Hermosillo"),
("America/Indiana/Indianapolis", "America/Indiana/Indianapolis"),
("America/Indiana/Knox", "America/Indiana/Knox"),
("America/Indiana/Marengo", "America/Indiana/Marengo"),
("America/Indiana/Petersburg", "America/Indiana/Petersburg"),
("America/Indiana/Tell_City", "America/Indiana/Tell_City"),
("America/Indiana/Vevay", "America/Indiana/Vevay"),
("America/Indiana/Vincennes", "America/Indiana/Vincennes"),
("America/Indiana/Winamac", "America/Indiana/Winamac"),
("America/Indianapolis", "America/Indianapolis"),
("America/Inuvik", "America/Inuvik"),
("America/Iqaluit", "America/Iqaluit"),
("America/Jamaica", "America/Jamaica"),
("America/Jujuy", "America/Jujuy"),
("America/Juneau", "America/Juneau"),
("America/Kentucky/Louisville", "America/Kentucky/Louisville"),
("America/Kentucky/Monticello", "America/Kentucky/Monticello"),
("America/Knox_IN", "America/Knox_IN"),
("America/Kralendijk", "America/Kralendijk"),
("America/La_Paz", "America/La_Paz"),
("America/Lima", "America/Lima"),
("America/Los_Angeles", "America/Los_Angeles"),
("America/Louisville", "America/Louisville"),
("America/Lower_Princes", "America/Lower_Princes"),
("America/Maceio", "America/Maceio"),
("America/Managua", "America/Managua"),
("America/Manaus", "America/Manaus"),
("America/Marigot", "America/Marigot"),
("America/Martinique", "America/Martinique"),
("America/Matamoros", "America/Matamoros"),
("America/Mazatlan", "America/Mazatlan"),
("America/Mendoza", "America/Mendoza"),
("America/Menominee", "America/Menominee"),
("America/Merida", "America/Merida"),
("America/Metlakatla", "America/Metlakatla"),
("America/Mexico_City", "America/Mexico_City"),
("America/Miquelon", "America/Miquelon"),
("America/Moncton", "America/Moncton"),
("America/Monterrey", "America/Monterrey"),
("America/Montevideo", "America/Montevideo"),
("America/Montreal", "America/Montreal"),
("America/Montserrat", "America/Montserrat"),
("America/Nassau", "America/Nassau"),
("America/New_York", "America/New_York"),
("America/Nipigon", "America/Nipigon"),
("America/Nome", "America/Nome"),
("America/Noronha", "America/Noronha"),
("America/North_Dakota/Beulah", "America/North_Dakota/Beulah"),
("America/North_Dakota/Center", "America/North_Dakota/Center"),
(
"America/North_Dakota/New_Salem",
"America/North_Dakota/New_Salem",
),
("America/Nuuk", "America/Nuuk"),
("America/Ojinaga", "America/Ojinaga"),
("America/Panama", "America/Panama"),
("America/Pangnirtung", "America/Pangnirtung"),
("America/Paramaribo", "America/Paramaribo"),
("America/Phoenix", "America/Phoenix"),
("America/Port-au-Prince", "America/Port-au-Prince"),
("America/Port_of_Spain", "America/Port_of_Spain"),
("America/Porto_Acre", "America/Porto_Acre"),
("America/Porto_Velho", "America/Porto_Velho"),
("America/Puerto_Rico", "America/Puerto_Rico"),
("America/Punta_Arenas", "America/Punta_Arenas"),
("America/Rainy_River", "America/Rainy_River"),
("America/Rankin_Inlet", "America/Rankin_Inlet"),
("America/Recife", "America/Recife"),
("America/Regina", "America/Regina"),
("America/Resolute", "America/Resolute"),
("America/Rio_Branco", "America/Rio_Branco"),
("America/Rosario", "America/Rosario"),
("America/Santa_Isabel", "America/Santa_Isabel"),
("America/Santarem", "America/Santarem"),
("America/Santiago", "America/Santiago"),
("America/Santo_Domingo", "America/Santo_Domingo"),
("America/Sao_Paulo", "America/Sao_Paulo"),
("America/Scoresbysund", "America/Scoresbysund"),
("America/Shiprock", "America/Shiprock"),
("America/Sitka", "America/Sitka"),
("America/St_Barthelemy", "America/St_Barthelemy"),
("America/St_Johns", "America/St_Johns"),
("America/St_Kitts", "America/St_Kitts"),
("America/St_Lucia", "America/St_Lucia"),
("America/St_Thomas", "America/St_Thomas"),
("America/St_Vincent", "America/St_Vincent"),
("America/Swift_Current", "America/Swift_Current"),
("America/Tegucigalpa", "America/Tegucigalpa"),
("America/Thule", "America/Thule"),
("America/Thunder_Bay", "America/Thunder_Bay"),
("America/Tijuana", "America/Tijuana"),
("America/Toronto", "America/Toronto"),
("America/Tortola", "America/Tortola"),
("America/Vancouver", "America/Vancouver"),
("America/Virgin", "America/Virgin"),
("America/Whitehorse", "America/Whitehorse"),
("America/Winnipeg", "America/Winnipeg"),
("America/Yakutat", "America/Yakutat"),
("America/Yellowknife", "America/Yellowknife"),
("Antarctica/Casey", "Antarctica/Casey"),
("Antarctica/Davis", "Antarctica/Davis"),
("Antarctica/DumontDUrville", "Antarctica/DumontDUrville"),
("Antarctica/Macquarie", "Antarctica/Macquarie"),
("Antarctica/Mawson", "Antarctica/Mawson"),
("Antarctica/McMurdo", "Antarctica/McMurdo"),
("Antarctica/Palmer", "Antarctica/Palmer"),
("Antarctica/Rothera", "Antarctica/Rothera"),
("Antarctica/South_Pole", "Antarctica/South_Pole"),
("Antarctica/Syowa", "Antarctica/Syowa"),
("Antarctica/Troll", "Antarctica/Troll"),
("Antarctica/Vostok", "Antarctica/Vostok"),
("Arctic/Longyearbyen", "Arctic/Longyearbyen"),
("Asia/Aden", "Asia/Aden"),
("Asia/Almaty", "Asia/Almaty"),
("Asia/Amman", "Asia/Amman"),
("Asia/Anadyr", "Asia/Anadyr"),
("Asia/Aqtau", "Asia/Aqtau"),
("Asia/Aqtobe", "Asia/Aqtobe"),
("Asia/Ashgabat", "Asia/Ashgabat"),
("Asia/Ashkhabad", "Asia/Ashkhabad"),
("Asia/Atyrau", "Asia/Atyrau"),
("Asia/Baghdad", "Asia/Baghdad"),
("Asia/Bahrain", "Asia/Bahrain"),
("Asia/Baku", "Asia/Baku"),
("Asia/Bangkok", "Asia/Bangkok"),
("Asia/Barnaul", "Asia/Barnaul"),
("Asia/Beirut", "Asia/Beirut"),
("Asia/Bishkek", "Asia/Bishkek"),
("Asia/Brunei", "Asia/Brunei"),
("Asia/Calcutta", "Asia/Calcutta"),
("Asia/Chita", "Asia/Chita"),
("Asia/Choibalsan", "Asia/Choibalsan"),
("Asia/Chongqing", "Asia/Chongqing"),
("Asia/Chungking", "Asia/Chungking"),
("Asia/Colombo", "Asia/Colombo"),
("Asia/Dacca", "Asia/Dacca"),
("Asia/Damascus", "Asia/Damascus"),
("Asia/Dhaka", "Asia/Dhaka"),
("Asia/Dili", "Asia/Dili"),
("Asia/Dubai", "Asia/Dubai"),
("Asia/Dushanbe", "Asia/Dushanbe"),
("Asia/Famagusta", "Asia/Famagusta"),
("Asia/Gaza", "Asia/Gaza"),
("Asia/Harbin", "Asia/Harbin"),
("Asia/Hebron", "Asia/Hebron"),
("Asia/Ho_Chi_Minh", "Asia/Ho_Chi_Minh"),
("Asia/Hong_Kong", "Asia/Hong_Kong"),
("Asia/Hovd", "Asia/Hovd"),
("Asia/Irkutsk", "Asia/Irkutsk"),
("Asia/Istanbul", "Asia/Istanbul"),
("Asia/Jakarta", "Asia/Jakarta"),
("Asia/Jayapura", "Asia/Jayapura"),
("Asia/Jerusalem", "Asia/Jerusalem"),
("Asia/Kabul", "Asia/Kabul"),
("Asia/Kamchatka", "Asia/Kamchatka"),
("Asia/Karachi", "Asia/Karachi"),
("Asia/Kashgar", "Asia/Kashgar"),
("Asia/Kathmandu", "Asia/Kathmandu"),
("Asia/Katmandu", "Asia/Katmandu"),
("Asia/Khandyga", "Asia/Khandyga"),
("Asia/Kolkata", "Asia/Kolkata"),
("Asia/Krasnoyarsk", "Asia/Krasnoyarsk"),
("Asia/Kuala_Lumpur", "Asia/Kuala_Lumpur"),
("Asia/Kuching", "Asia/Kuching"),
("Asia/Kuwait", "Asia/Kuwait"),
("Asia/Macao", "Asia/Macao"),
("Asia/Macau", "Asia/Macau"),
("Asia/Magadan", "Asia/Magadan"),
("Asia/Makassar", "Asia/Makassar"),
("Asia/Manila", "Asia/Manila"),
("Asia/Muscat", "Asia/Muscat"),
("Asia/Nicosia", "Asia/Nicosia"),
("Asia/Novokuznetsk", "Asia/Novokuznetsk"),
("Asia/Novosibirsk", "Asia/Novosibirsk"),
("Asia/Omsk", "Asia/Omsk"),
("Asia/Oral", "Asia/Oral"),
("Asia/Phnom_Penh", "Asia/Phnom_Penh"),
("Asia/Pontianak", "Asia/Pontianak"),
("Asia/Pyongyang", "Asia/Pyongyang"),
("Asia/Qatar", "Asia/Qatar"),
("Asia/Qostanay", "Asia/Qostanay"),
("Asia/Qyzylorda", "Asia/Qyzylorda"),
("Asia/Rangoon", "Asia/Rangoon"),
("Asia/Riyadh", "Asia/Riyadh"),
("Asia/Saigon", "Asia/Saigon"),
("Asia/Sakhalin", "Asia/Sakhalin"),
("Asia/Samarkand", "Asia/Samarkand"),
("Asia/Seoul", "Asia/Seoul"),
("Asia/Shanghai", "Asia/Shanghai"),
("Asia/Singapore", "Asia/Singapore"),
("Asia/Srednekolymsk", "Asia/Srednekolymsk"),
("Asia/Taipei", "Asia/Taipei"),
("Asia/Tashkent", "Asia/Tashkent"),
("Asia/Tbilisi", "Asia/Tbilisi"),
("Asia/Tehran", "Asia/Tehran"),
("Asia/Tel_Aviv", "Asia/Tel_Aviv"),
("Asia/Thimbu", "Asia/Thimbu"),
("Asia/Thimphu", "Asia/Thimphu"),
("Asia/Tokyo", "Asia/Tokyo"),
("Asia/Tomsk", "Asia/Tomsk"),
("Asia/Ujung_Pandang", "Asia/Ujung_Pandang"),
("Asia/Ulaanbaatar", "Asia/Ulaanbaatar"),
("Asia/Ulan_Bator", "Asia/Ulan_Bator"),
("Asia/Urumqi", "Asia/Urumqi"),
("Asia/Ust-Nera", "Asia/Ust-Nera"),
("Asia/Vientiane", "Asia/Vientiane"),
("Asia/Vladivostok", "Asia/Vladivostok"),
("Asia/Yakutsk", "Asia/Yakutsk"),
("Asia/Yangon", "Asia/Yangon"),
("Asia/Yekaterinburg", "Asia/Yekaterinburg"),
("Asia/Yerevan", "Asia/Yerevan"),
("Atlantic/Azores", "Atlantic/Azores"),
("Atlantic/Bermuda", "Atlantic/Bermuda"),
("Atlantic/Canary", "Atlantic/Canary"),
("Atlantic/Cape_Verde", "Atlantic/Cape_Verde"),
("Atlantic/Faeroe", "Atlantic/Faeroe"),
("Atlantic/Faroe", "Atlantic/Faroe"),
("Atlantic/Jan_Mayen", "Atlantic/Jan_Mayen"),
("Atlantic/Madeira", "Atlantic/Madeira"),
("Atlantic/Reykjavik", "Atlantic/Reykjavik"),
("Atlantic/South_Georgia", "Atlantic/South_Georgia"),
("Atlantic/St_Helena", "Atlantic/St_Helena"),
("Atlantic/Stanley", "Atlantic/Stanley"),
("Australia/ACT", "Australia/ACT"),
("Australia/Adelaide", "Australia/Adelaide"),
("Australia/Brisbane", "Australia/Brisbane"),
("Australia/Broken_Hill", "Australia/Broken_Hill"),
("Australia/Canberra", "Australia/Canberra"),
("Australia/Currie", "Australia/Currie"),
("Australia/Darwin", "Australia/Darwin"),
("Australia/Eucla", "Australia/Eucla"),
("Australia/Hobart", "Australia/Hobart"),
("Australia/LHI", "Australia/LHI"),
("Australia/Lindeman", "Australia/Lindeman"),
("Australia/Lord_Howe", "Australia/Lord_Howe"),
("Australia/Melbourne", "Australia/Melbourne"),
("Australia/NSW", "Australia/NSW"),
("Australia/North", "Australia/North"),
("Australia/Perth", "Australia/Perth"),
("Australia/Queensland", "Australia/Queensland"),
("Australia/South", "Australia/South"),
("Australia/Sydney", "Australia/Sydney"),
("Australia/Tasmania", "Australia/Tasmania"),
("Australia/Victoria", "Australia/Victoria"),
("Australia/West", "Australia/West"),
("Australia/Yancowinna", "Australia/Yancowinna"),
("Brazil/Acre", "Brazil/Acre"),
("Brazil/DeNoronha", "Brazil/DeNoronha"),
("Brazil/East", "Brazil/East"),
("Brazil/West", "Brazil/West"),
("CET", "CET"),
("CST6CDT", "CST6CDT"),
("Canada/Atlantic", "Canada/Atlantic"),
("Canada/Central", "Canada/Central"),
("Canada/Eastern", "Canada/Eastern"),
("Canada/Mountain", "Canada/Mountain"),
("Canada/Newfoundland", "Canada/Newfoundland"),
("Canada/Pacific", "Canada/Pacific"),
("Canada/Saskatchewan", "Canada/Saskatchewan"),
("Canada/Yukon", "Canada/Yukon"),
("Chile/Continental", "Chile/Continental"),
("Chile/EasterIsland", "Chile/EasterIsland"),
("Cuba", "Cuba"),
("EET", "EET"),
("EST", "EST"),
("EST5EDT", "EST5EDT"),
("Egypt", "Egypt"),
("Eire", "Eire"),
("Etc/GMT", "Etc/GMT"),
("Etc/GMT+0", "Etc/GMT+0"),
("Etc/GMT+1", "Etc/GMT+1"),
("Etc/GMT+10", "Etc/GMT+10"),
("Etc/GMT+11", "Etc/GMT+11"),
("Etc/GMT+12", "Etc/GMT+12"),
("Etc/GMT+2", "Etc/GMT+2"),
("Etc/GMT+3", "Etc/GMT+3"),
("Etc/GMT+4", "Etc/GMT+4"),
("Etc/GMT+5", "Etc/GMT+5"),
("Etc/GMT+6", "Etc/GMT+6"),
("Etc/GMT+7", "Etc/GMT+7"),
("Etc/GMT+8", "Etc/GMT+8"),
("Etc/GMT+9", "Etc/GMT+9"),
("Etc/GMT-0", "Etc/GMT-0"),
("Etc/GMT-1", "Etc/GMT-1"),
("Etc/GMT-10", "Etc/GMT-10"),
("Etc/GMT-11", "Etc/GMT-11"),
("Etc/GMT-12", "Etc/GMT-12"),
("Etc/GMT-13", "Etc/GMT-13"),
("Etc/GMT-14", "Etc/GMT-14"),
("Etc/GMT-2", "Etc/GMT-2"),
("Etc/GMT-3", "Etc/GMT-3"),
("Etc/GMT-4", "Etc/GMT-4"),
("Etc/GMT-5", "Etc/GMT-5"),
("Etc/GMT-6", "Etc/GMT-6"),
("Etc/GMT-7", "Etc/GMT-7"),
("Etc/GMT-8", "Etc/GMT-8"),
("Etc/GMT-9", "Etc/GMT-9"),
("Etc/GMT0", "Etc/GMT0"),
("Etc/Greenwich", "Etc/Greenwich"),
("Etc/UCT", "Etc/UCT"),
("Etc/UTC", "Etc/UTC"),
("Etc/Universal", "Etc/Universal"),
("Etc/Zulu", "Etc/Zulu"),
("Europe/Amsterdam", "Europe/Amsterdam"),
("Europe/Andorra", "Europe/Andorra"),
("Europe/Astrakhan", "Europe/Astrakhan"),
("Europe/Athens", "Europe/Athens"),
("Europe/Belfast", "Europe/Belfast"),
("Europe/Belgrade", "Europe/Belgrade"),
("Europe/Berlin", "Europe/Berlin"),
("Europe/Bratislava", "Europe/Bratislava"),
("Europe/Brussels", "Europe/Brussels"),
("Europe/Bucharest", "Europe/Bucharest"),
("Europe/Budapest", "Europe/Budapest"),
("Europe/Busingen", "Europe/Busingen"),
("Europe/Chisinau", "Europe/Chisinau"),
("Europe/Copenhagen", "Europe/Copenhagen"),
("Europe/Dublin", "Europe/Dublin"),
("Europe/Gibraltar", "Europe/Gibraltar"),
("Europe/Guernsey", "Europe/Guernsey"),
("Europe/Helsinki", "Europe/Helsinki"),
("Europe/Isle_of_Man", "Europe/Isle_of_Man"),
("Europe/Istanbul", "Europe/Istanbul"),
("Europe/Jersey", "Europe/Jersey"),
("Europe/Kaliningrad", "Europe/Kaliningrad"),
("Europe/Kiev", "Europe/Kiev"),
("Europe/Kirov", "Europe/Kirov"),
("Europe/Kyiv", "Europe/Kyiv"),
("Europe/Lisbon", "Europe/Lisbon"),
("Europe/Ljubljana", "Europe/Ljubljana"),
("Europe/London", "Europe/London"),
("Europe/Luxembourg", "Europe/Luxembourg"),
("Europe/Madrid", "Europe/Madrid"),
("Europe/Malta", "Europe/Malta"),
("Europe/Mariehamn", "Europe/Mariehamn"),
("Europe/Minsk", "Europe/Minsk"),
("Europe/Monaco", "Europe/Monaco"),
("Europe/Moscow", "Europe/Moscow"),
("Europe/Nicosia", "Europe/Nicosia"),
("Europe/Oslo", "Europe/Oslo"),
("Europe/Paris", "Europe/Paris"),
("Europe/Podgorica", "Europe/Podgorica"),
("Europe/Prague", "Europe/Prague"),
("Europe/Riga", "Europe/Riga"),
("Europe/Rome", "Europe/Rome"),
("Europe/Samara", "Europe/Samara"),
("Europe/San_Marino", "Europe/San_Marino"),
("Europe/Sarajevo", "Europe/Sarajevo"),
("Europe/Saratov", "Europe/Saratov"),
("Europe/Simferopol", "Europe/Simferopol"),
("Europe/Skopje", "Europe/Skopje"),
("Europe/Sofia", "Europe/Sofia"),
("Europe/Stockholm", "Europe/Stockholm"),
("Europe/Tallinn", "Europe/Tallinn"),
("Europe/Tirane", "Europe/Tirane"),
("Europe/Tiraspol", "Europe/Tiraspol"),
("Europe/Ulyanovsk", "Europe/Ulyanovsk"),
("Europe/Uzhgorod", "Europe/Uzhgorod"),
("Europe/Vaduz", "Europe/Vaduz"),
("Europe/Vatican", "Europe/Vatican"),
("Europe/Vienna", "Europe/Vienna"),
("Europe/Vilnius", "Europe/Vilnius"),
("Europe/Volgograd", "Europe/Volgograd"),
("Europe/Warsaw", "Europe/Warsaw"),
("Europe/Zagreb", "Europe/Zagreb"),
("Europe/Zaporozhye", "Europe/Zaporozhye"),
("Europe/Zurich", "Europe/Zurich"),
("Factory", "Factory"),
("GB", "GB"),
("GB-Eire", "GB-Eire"),
("GMT", "GMT"),
("GMT+0", "GMT+0"),
("GMT-0", "GMT-0"),
("GMT0", "GMT0"),
("Greenwich", "Greenwich"),
("HST", "HST"),
("Hongkong", "Hongkong"),
("Iceland", "Iceland"),
("Indian/Antananarivo", "Indian/Antananarivo"),
("Indian/Chagos", "Indian/Chagos"),
("Indian/Christmas", "Indian/Christmas"),
("Indian/Cocos", "Indian/Cocos"),
("Indian/Comoro", "Indian/Comoro"),
("Indian/Kerguelen", "Indian/Kerguelen"),
("Indian/Mahe", "Indian/Mahe"),
("Indian/Maldives", "Indian/Maldives"),
("Indian/Mauritius", "Indian/Mauritius"),
("Indian/Mayotte", "Indian/Mayotte"),
("Indian/Reunion", "Indian/Reunion"),
("Iran", "Iran"),
("Israel", "Israel"),
("Jamaica", "Jamaica"),
("Japan", "Japan"),
("Kwajalein", "Kwajalein"),
("Libya", "Libya"),
("MET", "MET"),
("MST", "MST"),
("MST7MDT", "MST7MDT"),
("Mexico/BajaNorte", "Mexico/BajaNorte"),
("Mexico/BajaSur", "Mexico/BajaSur"),
("Mexico/General", "Mexico/General"),
("NZ", "NZ"),
("NZ-CHAT", "NZ-CHAT"),
("Navajo", "Navajo"),
("PRC", "PRC"),
("PST8PDT", "PST8PDT"),
("Pacific/Apia", "Pacific/Apia"),
("Pacific/Auckland", "Pacific/Auckland"),
("Pacific/Bougainville", "Pacific/Bougainville"),
("Pacific/Chatham", "Pacific/Chatham"),
("Pacific/Chuuk", "Pacific/Chuuk"),
("Pacific/Easter", "Pacific/Easter"),
("Pacific/Efate", "Pacific/Efate"),
("Pacific/Enderbury", "Pacific/Enderbury"),
("Pacific/Fakaofo", "Pacific/Fakaofo"),
("Pacific/Fiji", "Pacific/Fiji"),
("Pacific/Funafuti", "Pacific/Funafuti"),
("Pacific/Galapagos", "Pacific/Galapagos"),
("Pacific/Gambier", "Pacific/Gambier"),
("Pacific/Guadalcanal", "Pacific/Guadalcanal"),
("Pacific/Guam", "Pacific/Guam"),
("Pacific/Honolulu", "Pacific/Honolulu"),
("Pacific/Johnston", "Pacific/Johnston"),
("Pacific/Kanton", "Pacific/Kanton"),
("Pacific/Kiritimati", "Pacific/Kiritimati"),
("Pacific/Kosrae", "Pacific/Kosrae"),
("Pacific/Kwajalein", "Pacific/Kwajalein"),
("Pacific/Majuro", "Pacific/Majuro"),
("Pacific/Marquesas", "Pacific/Marquesas"),
("Pacific/Midway", "Pacific/Midway"),
("Pacific/Nauru", "Pacific/Nauru"),
("Pacific/Niue", "Pacific/Niue"),
("Pacific/Norfolk", "Pacific/Norfolk"),
("Pacific/Noumea", "Pacific/Noumea"),
("Pacific/Pago_Pago", "Pacific/Pago_Pago"),
("Pacific/Palau", "Pacific/Palau"),
("Pacific/Pitcairn", "Pacific/Pitcairn"),
("Pacific/Pohnpei", "Pacific/Pohnpei"),
("Pacific/Ponape", "Pacific/Ponape"),
("Pacific/Port_Moresby", "Pacific/Port_Moresby"),
("Pacific/Rarotonga", "Pacific/Rarotonga"),
("Pacific/Saipan", "Pacific/Saipan"),
("Pacific/Samoa", "Pacific/Samoa"),
("Pacific/Tahiti", "Pacific/Tahiti"),
("Pacific/Tarawa", "Pacific/Tarawa"),
("Pacific/Tongatapu", "Pacific/Tongatapu"),
("Pacific/Truk", "Pacific/Truk"),
("Pacific/Wake", "Pacific/Wake"),
("Pacific/Wallis", "Pacific/Wallis"),
("Pacific/Yap", "Pacific/Yap"),
("Poland", "Poland"),
("Portugal", "Portugal"),
("ROC", "ROC"),
("ROK", "ROK"),
("Singapore", "Singapore"),
("Turkey", "Turkey"),
("UCT", "UCT"),
("US/Alaska", "US/Alaska"),
("US/Aleutian", "US/Aleutian"),
("US/Arizona", "US/Arizona"),
("US/Central", "US/Central"),
("US/East-Indiana", "US/East-Indiana"),
("US/Eastern", "US/Eastern"),
("US/Hawaii", "US/Hawaii"),
("US/Indiana-Starke", "US/Indiana-Starke"),
("US/Michigan", "US/Michigan"),
("US/Mountain", "US/Mountain"),
("US/Pacific", "US/Pacific"),
("US/Samoa", "US/Samoa"),
("UTC", "UTC"),
("Universal", "Universal"),
("W-SU", "W-SU"),
("WET", "WET"),
("Zulu", "Zulu"),
("localtime", "localtime"),
],
default="UTC",
max_length=255,
),
),
]

View file

@ -0,0 +1,27 @@
# Generated by Django 3.2.25 on 2024-03-27 19:14
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0199_merge_20240326_1217"),
]
operations = [
migrations.RemoveField(
model_name="addfiletotar",
name="childjob_ptr",
),
migrations.RemoveField(
model_name="addfiletotar",
name="parent_export_job",
),
migrations.DeleteModel(
name="AddBookToUserExportJob",
),
migrations.DeleteModel(
name="AddFileToTar",
),
]

View file

@ -0,0 +1,19 @@
# Generated by Django 3.2.25 on 2024-04-03 19:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0199_status_bookwyrm_st_remote__06aeba_idx"),
]
operations = [
migrations.AddIndex(
model_name="status",
index=models.Index(
fields=["thread_id"], name="bookwyrm_st_thread__cf064f_idx"
),
),
]

View file

@ -0,0 +1,39 @@
# Generated by Django 4.2.11 on 2024-04-01 21:09
import bookwyrm.models.fields
from django.db import migrations, models
from django.contrib.postgres.operations import CreateCollation
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0200_alter_user_preferred_timezone"),
]
operations = [
CreateCollation(
"case_insensitive",
provider="icu",
locale="und-u-ks-level2",
deterministic=False,
),
migrations.AlterField(
model_name="hashtag",
name="name",
field=bookwyrm.models.fields.CharField(
db_collation="case_insensitive", max_length=256
),
),
migrations.AlterField(
model_name="user",
name="localname",
field=models.CharField(
db_collation="case_insensitive",
max_length=255,
null=True,
unique=True,
validators=[bookwyrm.models.fields.validate_localname],
),
),
]

View file

@ -0,0 +1,19 @@
# Generated by Django 3.2.25 on 2024-04-03 19:10
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0200_status_bookwyrm_st_thread__cf064f_idx"),
]
operations = [
migrations.AddIndex(
model_name="keypair",
index=models.Index(
fields=["remote_id"], name="bookwyrm_ke_remote__472927_idx"
),
),
]

View file

@ -0,0 +1,19 @@
# Generated by Django 3.2.25 on 2024-04-03 19:14
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0201_keypair_bookwyrm_ke_remote__472927_idx"),
]
operations = [
migrations.AddIndex(
model_name="user",
index=models.Index(
fields=["username"], name="bookwyrm_us_usernam_b2546d_idx"
),
),
]

View file

@ -0,0 +1,19 @@
# Generated by Django 3.2.25 on 2024-04-03 19:22
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0202_user_bookwyrm_us_usernam_b2546d_idx"),
]
operations = [
migrations.AddIndex(
model_name="user",
index=models.Index(
fields=["is_active", "local"], name="bookwyrm_us_is_acti_972dc4_idx"
),
),
]

View file

@ -0,0 +1,13 @@
# Generated by Django 3.2.25 on 2024-04-09 10:42
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0197_mergedauthor_mergedbook"),
("bookwyrm", "0203_user_bookwyrm_us_is_acti_972dc4_idx"),
]
operations = []

View file

@ -0,0 +1,13 @@
# Generated by Django 4.2.11 on 2024-04-10 20:22
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0201_alter_hashtag_name_alter_user_localname"),
("bookwyrm", "0204_merge_20240409_1042"),
]
operations = []

View file

@ -0,0 +1,13 @@
# Generated by Django 3.2.25 on 2024-04-13 02:32
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0200_auto_20240327_1914"),
("bookwyrm", "0204_merge_20240409_1042"),
]
operations = []

View file

@ -0,0 +1,13 @@
# Generated by Django 4.2.11 on 2024-04-15 15:37
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0205_merge_20240410_2022"),
("bookwyrm", "0205_merge_20240413_0232"),
]
operations = []

View file

@ -26,6 +26,8 @@ from .federated_server import FederatedServer
from .group import Group, GroupMember, GroupMemberInvitation from .group import Group, GroupMember, GroupMemberInvitation
from .import_job import ImportJob, ImportItem from .import_job import ImportJob, ImportItem
from .bookwyrm_import_job import BookwyrmImportJob
from .bookwyrm_export_job import BookwyrmExportJob
from .move import MoveUser from .move import MoveUser

View file

@ -152,8 +152,9 @@ class ActivitypubMixin:
# find anyone who's tagged in a status, for example # find anyone who's tagged in a status, for example
mentions = self.recipients if hasattr(self, "recipients") else [] mentions = self.recipients if hasattr(self, "recipients") else []
# we always send activities to explicitly mentioned users' inboxes # we always send activities to explicitly mentioned users (using shared inboxes
recipients = [u.inbox for u in mentions or [] if not u.local] # where available to avoid duplicate submissions to a given instance)
recipients = {u.shared_inbox or u.inbox for u in mentions if not u.local}
# unless it's a dm, all the followers should receive the activity # unless it's a dm, all the followers should receive the activity
if privacy != "direct": if privacy != "direct":
@ -168,23 +169,23 @@ class ActivitypubMixin:
# filter users first by whether they're using the desired software # filter users first by whether they're using the desired software
# this lets us send book updates only to other bw servers # this lets us send book updates only to other bw servers
if software: if software:
queryset = queryset.filter(bookwyrm_user=(software == "bookwyrm")) queryset = queryset.filter(bookwyrm_user=software == "bookwyrm")
# if there's a user, we only want to send to the user's followers # if there's a user, we only want to send to the user's followers
if user: if user:
queryset = queryset.filter(following=user) queryset = queryset.filter(following=user)
# ideally, we will send to shared inboxes for efficiency # as above, we prefer shared inboxes if available
shared_inboxes = ( recipients.update(
queryset.filter(shared_inbox__isnull=False) queryset.filter(shared_inbox__isnull=False).values_list(
.values_list("shared_inbox", flat=True) "shared_inbox", flat=True
.distinct()
) )
# but not everyone has a shared inbox )
inboxes = queryset.filter(shared_inbox__isnull=True).values_list( recipients.update(
queryset.filter(shared_inbox__isnull=True).values_list(
"inbox", flat=True "inbox", flat=True
) )
recipients += list(shared_inboxes) + list(inboxes) )
return list(set(recipients)) return list(recipients)
def to_activity_dataclass(self): def to_activity_dataclass(self):
"""convert from a model to an activity""" """convert from a model to an activity"""
@ -205,14 +206,10 @@ class ObjectMixin(ActivitypubMixin):
created: Optional[bool] = None, created: Optional[bool] = None,
software: Any = None, software: Any = None,
priority: str = BROADCAST, priority: str = BROADCAST,
broadcast: bool = True,
**kwargs: Any, **kwargs: Any,
) -> None: ) -> None:
"""broadcast created/updated/deleted objects as appropriate""" """broadcast created/updated/deleted objects as appropriate"""
broadcast = kwargs.get("broadcast", True)
# this bonus kwarg would cause an error in the base save method
if "broadcast" in kwargs:
del kwargs["broadcast"]
created = created or not bool(self.id) created = created or not bool(self.id)
# first off, we want to save normally no matter what # first off, we want to save normally no matter what
super().save(*args, **kwargs) super().save(*args, **kwargs)
@ -602,7 +599,7 @@ def to_ordered_collection_page(
if activity_page.has_next(): if activity_page.has_next():
next_page = f"{remote_id}?page={activity_page.next_page_number()}" next_page = f"{remote_id}?page={activity_page.next_page_number()}"
if activity_page.has_previous(): if activity_page.has_previous():
prev_page = f"{remote_id}?page=%d{activity_page.previous_page_number()}" prev_page = f"{remote_id}?page={activity_page.previous_page_number()}"
return activitypub.OrderedCollectionPage( return activitypub.OrderedCollectionPage(
id=f"{remote_id}?page={page}", id=f"{remote_id}?page={page}",
partOf=remote_id, partOf=remote_id,

View file

@ -1,20 +1,25 @@
""" database schema for info about authors """ """ database schema for info about authors """
import re
from typing import Tuple, Any
from django.contrib.postgres.indexes import GinIndex import re
from typing import Any
from django.db import models from django.db import models
from django.contrib.postgres.indexes import GinIndex
import pgtrigger
from bookwyrm import activitypub from bookwyrm import activitypub
from bookwyrm.settings import DOMAIN from bookwyrm.settings import BASE_URL
from bookwyrm.utils.db import format_trigger
from .book import BookDataModel from .book import BookDataModel, MergedAuthor
from . import fields from . import fields
class Author(BookDataModel): class Author(BookDataModel):
"""basic biographic info""" """basic biographic info"""
merged_model = MergedAuthor
wikipedia_link = fields.CharField( wikipedia_link = fields.CharField(
max_length=255, blank=True, null=True, deduplication_field=True max_length=255, blank=True, null=True, deduplication_field=True
) )
@ -40,12 +45,12 @@ class Author(BookDataModel):
) )
bio = fields.HtmlField(null=True, blank=True) bio = fields.HtmlField(null=True, blank=True)
def save(self, *args: Tuple[Any, ...], **kwargs: dict[str, Any]) -> None: def save(self, *args: Any, **kwargs: Any) -> None:
"""normalize isni format""" """normalize isni format"""
if self.isni: if self.isni is not None:
self.isni = re.sub(r"\s", "", self.isni) self.isni = re.sub(r"\s", "", self.isni)
return super().save(*args, **kwargs) super().save(*args, **kwargs)
@property @property
def isni_link(self): def isni_link(self):
@ -65,11 +70,48 @@ class Author(BookDataModel):
def get_remote_id(self): def get_remote_id(self):
"""editions and works both use "book" instead of model_name""" """editions and works both use "book" instead of model_name"""
return f"https://{DOMAIN}/author/{self.id}" return f"{BASE_URL}/author/{self.id}"
activity_serializer = activitypub.Author
class Meta: class Meta:
"""sets up postgres GIN index field""" """sets up indexes and triggers"""
# pylint: disable=line-too-long
indexes = (GinIndex(fields=["search_vector"]),) indexes = (GinIndex(fields=["search_vector"]),)
triggers = [
pgtrigger.Trigger(
name="update_search_vector_on_author_edit",
when=pgtrigger.Before,
operation=pgtrigger.Insert
| pgtrigger.UpdateOf("name", "aliases", "search_vector"),
func=format_trigger(
"""new.search_vector :=
-- author name, with priority A
setweight(to_tsvector('simple', new.name), 'A') ||
-- author aliases, with priority B
setweight(to_tsvector('simple', coalesce(array_to_string(new.aliases, ' '), '')), 'B');
RETURN new;
"""
),
),
pgtrigger.Trigger(
name="reset_book_search_vector_on_author_edit",
when=pgtrigger.After,
operation=pgtrigger.UpdateOf("name", "aliases"),
func=format_trigger(
"""WITH updated_books AS (
SELECT book_id
FROM bookwyrm_book_authors
WHERE author_id = new.id
)
UPDATE bookwyrm_book
SET search_vector = ''
FROM updated_books
WHERE id = updated_books.book_id;
RETURN new;
"""
),
),
]
activity_serializer = activitypub.Author

View file

@ -10,7 +10,7 @@ from django.http import Http404
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
from django.utils.text import slugify from django.utils.text import slugify
from bookwyrm.settings import DOMAIN from bookwyrm.settings import BASE_URL
from .fields import RemoteIdField from .fields import RemoteIdField
@ -38,7 +38,7 @@ class BookWyrmModel(models.Model):
def get_remote_id(self): def get_remote_id(self):
"""generate the url that resolves to the local object, without a slug""" """generate the url that resolves to the local object, without a slug"""
base_path = f"https://{DOMAIN}" base_path = BASE_URL
if hasattr(self, "user"): if hasattr(self, "user"):
base_path = f"{base_path}{self.user.local_path}" base_path = f"{base_path}{self.user.local_path}"
@ -53,7 +53,7 @@ class BookWyrmModel(models.Model):
@property @property
def local_path(self): def local_path(self):
"""how to link to this object in the local app, with a slug""" """how to link to this object in the local app, with a slug"""
local = self.get_remote_id().replace(f"https://{DOMAIN}", "") local = self.get_remote_id().replace(BASE_URL, "")
name = None name = None
if hasattr(self, "name_field"): if hasattr(self, "name_field"):

View file

@ -1,29 +1,33 @@
""" database schema for books and shelves """ """ database schema for books and shelves """
from itertools import chain from itertools import chain
import re import re
from typing import Any from typing import Any, Dict, Optional, Iterable
from typing_extensions import Self
from django.contrib.postgres.search import SearchVectorField from django.contrib.postgres.search import SearchVectorField
from django.contrib.postgres.indexes import GinIndex from django.contrib.postgres.indexes import GinIndex
from django.core.cache import cache from django.core.cache import cache
from django.db import models, transaction from django.db import models, transaction
from django.db.models import Prefetch from django.db.models import Prefetch, ManyToManyField
from django.dispatch import receiver from django.dispatch import receiver
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
from model_utils import FieldTracker from model_utils import FieldTracker
from model_utils.managers import InheritanceManager from model_utils.managers import InheritanceManager
from imagekit.models import ImageSpecField from imagekit.models import ImageSpecField
import pgtrigger
from bookwyrm import activitypub from bookwyrm import activitypub
from bookwyrm.isbn.isbn import hyphenator_singleton as hyphenator from bookwyrm.isbn.isbn import hyphenator_singleton as hyphenator
from bookwyrm.preview_images import generate_edition_preview_image_task from bookwyrm.preview_images import generate_edition_preview_image_task
from bookwyrm.settings import ( from bookwyrm.settings import (
DOMAIN, BASE_URL,
DEFAULT_LANGUAGE, DEFAULT_LANGUAGE,
LANGUAGE_ARTICLES, LANGUAGE_ARTICLES,
ENABLE_PREVIEW_IMAGES, ENABLE_PREVIEW_IMAGES,
ENABLE_THUMBNAIL_GENERATION, ENABLE_THUMBNAIL_GENERATION,
) )
from bookwyrm.utils.db import format_trigger, add_update_fields
from .activitypub_mixin import OrderedCollectionPageMixin, ObjectMixin from .activitypub_mixin import OrderedCollectionPageMixin, ObjectMixin
from .base_model import BookWyrmModel from .base_model import BookWyrmModel
@ -92,24 +96,134 @@ class BookDataModel(ObjectMixin, BookWyrmModel):
abstract = True abstract = True
def save(self, *args: Any, **kwargs: Any) -> None: def save(
self, *args: Any, update_fields: Optional[Iterable[str]] = None, **kwargs: Any
) -> None:
"""ensure that the remote_id is within this instance""" """ensure that the remote_id is within this instance"""
if self.id: if self.id:
self.remote_id = self.get_remote_id() self.remote_id = self.get_remote_id()
update_fields = add_update_fields(update_fields, "remote_id")
else: else:
self.origin_id = self.remote_id self.origin_id = self.remote_id
self.remote_id = None self.remote_id = None
return super().save(*args, **kwargs) update_fields = add_update_fields(update_fields, "origin_id", "remote_id")
super().save(*args, update_fields=update_fields, **kwargs)
# pylint: disable=arguments-differ # pylint: disable=arguments-differ
def broadcast(self, activity, sender, software="bookwyrm", **kwargs): def broadcast(self, activity, sender, software="bookwyrm", **kwargs):
"""only send book data updates to other bookwyrm instances""" """only send book data updates to other bookwyrm instances"""
super().broadcast(activity, sender, software=software, **kwargs) super().broadcast(activity, sender, software=software, **kwargs)
def merge_into(self, canonical: Self, dry_run=False) -> Dict[str, Any]:
"""merge this entity into another entity"""
if canonical.id == self.id:
raise ValueError(f"Cannot merge {self} into itself")
absorbed_fields = canonical.absorb_data_from(self, dry_run=dry_run)
if dry_run:
return absorbed_fields
canonical.save()
self.merged_model.objects.create(deleted_id=self.id, merged_into=canonical)
# move related models to canonical
related_models = [
(r.remote_field.name, r.related_model) for r in self._meta.related_objects
]
# pylint: disable=protected-access
for related_field, related_model in related_models:
# Skip the ManyToMany fields that arent auto-created. These
# should have a corresponding OneToMany field in the model for
# the linking table anyway. If we update it through that model
# instead then we wont lose the extra fields in the linking
# table.
# pylint: disable=protected-access
related_field_obj = related_model._meta.get_field(related_field)
if isinstance(related_field_obj, ManyToManyField):
through = related_field_obj.remote_field.through
if not through._meta.auto_created:
continue
related_objs = related_model.objects.filter(**{related_field: self})
for related_obj in related_objs:
try:
setattr(related_obj, related_field, canonical)
related_obj.save()
except TypeError:
getattr(related_obj, related_field).add(canonical)
getattr(related_obj, related_field).remove(self)
self.delete()
return absorbed_fields
def absorb_data_from(self, other: Self, dry_run=False) -> Dict[str, Any]:
"""fill empty fields with values from another entity"""
absorbed_fields = {}
for data_field in self._meta.get_fields():
if not hasattr(data_field, "activitypub_field"):
continue
canonical_value = getattr(self, data_field.name)
other_value = getattr(other, data_field.name)
if not other_value:
continue
if isinstance(data_field, fields.ArrayField):
if new_values := list(set(other_value) - set(canonical_value)):
# append at the end (in no particular order)
if not dry_run:
setattr(self, data_field.name, canonical_value + new_values)
absorbed_fields[data_field.name] = new_values
elif isinstance(data_field, fields.PartialDateField):
if (
(not canonical_value)
or (other_value.has_day and not canonical_value.has_day)
or (other_value.has_month and not canonical_value.has_month)
):
if not dry_run:
setattr(self, data_field.name, other_value)
absorbed_fields[data_field.name] = other_value
else:
if not canonical_value:
if not dry_run:
setattr(self, data_field.name, other_value)
absorbed_fields[data_field.name] = other_value
return absorbed_fields
class MergedBookDataModel(models.Model):
"""a BookDataModel instance that has been merged into another instance. kept
to be able to redirect old URLs"""
deleted_id = models.IntegerField(primary_key=True)
class Meta:
"""abstract just like BookDataModel"""
abstract = True
class MergedBook(MergedBookDataModel):
"""an Book that has been merged into another one"""
merged_into = models.ForeignKey(
"Book", on_delete=models.PROTECT, related_name="absorbed"
)
class MergedAuthor(MergedBookDataModel):
"""an Author that has been merged into another one"""
merged_into = models.ForeignKey(
"Author", on_delete=models.PROTECT, related_name="absorbed"
)
class Book(BookDataModel): class Book(BookDataModel):
"""a generic book, which can mean either an edition or a work""" """a generic book, which can mean either an edition or a work"""
merged_model = MergedBook
connector = models.ForeignKey("Connector", on_delete=models.PROTECT, null=True) connector = models.ForeignKey("Connector", on_delete=models.PROTECT, null=True)
# book/work metadata # book/work metadata
@ -190,9 +304,13 @@ class Book(BookDataModel):
"""properties of this edition, as a string""" """properties of this edition, as a string"""
items = [ items = [
self.physical_format if hasattr(self, "physical_format") else None, self.physical_format if hasattr(self, "physical_format") else None,
(
f"{self.languages[0]} language" f"{self.languages[0]} language"
if self.languages and self.languages[0] and self.languages[0] != "English" if self.languages
else None, and self.languages[0]
and self.languages[0] != "English"
else None
),
str(self.published_date.year) if self.published_date else None, str(self.published_date.year) if self.published_date else None,
", ".join(self.publishers) if hasattr(self, "publishers") else None, ", ".join(self.publishers) if hasattr(self, "publishers") else None,
] ]
@ -210,11 +328,11 @@ class Book(BookDataModel):
if not isinstance(self, (Edition, Work)): if not isinstance(self, (Edition, Work)):
raise ValueError("Books should be added as Editions or Works") raise ValueError("Books should be added as Editions or Works")
return super().save(*args, **kwargs) super().save(*args, **kwargs)
def get_remote_id(self): def get_remote_id(self):
"""editions and works both use "book" instead of model_name""" """editions and works both use "book" instead of model_name"""
return f"https://{DOMAIN}/book/{self.id}" return f"{BASE_URL}/book/{self.id}"
def guess_sort_title(self): def guess_sort_title(self):
"""Get a best-guess sort title for the current book""" """Get a best-guess sort title for the current book"""
@ -232,9 +350,49 @@ class Book(BookDataModel):
) )
class Meta: class Meta:
"""sets up postgres GIN index field""" """set up indexes and triggers"""
# pylint: disable=line-too-long
indexes = (GinIndex(fields=["search_vector"]),) indexes = (GinIndex(fields=["search_vector"]),)
triggers = [
pgtrigger.Trigger(
name="update_search_vector_on_book_edit",
when=pgtrigger.Before,
operation=pgtrigger.Insert
| pgtrigger.UpdateOf("title", "subtitle", "series", "search_vector"),
func=format_trigger(
"""
WITH author_names AS (
SELECT array_to_string(bookwyrm_author.name || bookwyrm_author.aliases, ' ') AS name_and_aliases
FROM bookwyrm_author
LEFT JOIN bookwyrm_book_authors
ON bookwyrm_author.id = bookwyrm_book_authors.author_id
WHERE bookwyrm_book_authors.book_id = new.id
)
SELECT
-- title, with priority A (parse in English, default to simple if empty)
setweight(COALESCE(nullif(
to_tsvector('english', new.title), ''),
to_tsvector('simple', new.title)), 'A') ||
-- subtitle, with priority B (always in English?)
setweight(to_tsvector('english', COALESCE(new.subtitle, '')), 'B') ||
-- list of authors names and aliases (with priority C)
(SELECT setweight(to_tsvector('simple', COALESCE(array_to_string(ARRAY_AGG(name_and_aliases), ' '), '')), 'C')
FROM author_names
) ||
--- last: series name, with lowest priority
setweight(to_tsvector('english', COALESCE(new.series, '')), 'D')
INTO new.search_vector;
RETURN new;
"""
),
)
]
class Work(OrderedCollectionPageMixin, Book): class Work(OrderedCollectionPageMixin, Book):
@ -247,10 +405,11 @@ class Work(OrderedCollectionPageMixin, Book):
def save(self, *args, **kwargs): def save(self, *args, **kwargs):
"""set some fields on the edition object""" """set some fields on the edition object"""
super().save(*args, **kwargs)
# set rank # set rank
for edition in self.editions.all(): for edition in self.editions.all():
edition.save() edition.save()
return super().save(*args, **kwargs)
@property @property
def default_edition(self): def default_edition(self):
@ -356,33 +515,48 @@ class Edition(Book):
# max rank is 9 # max rank is 9
return rank return rank
def save(self, *args: Any, **kwargs: Any) -> None: def save(
self, *args: Any, update_fields: Optional[Iterable[str]] = None, **kwargs: Any
) -> None:
"""set some fields on the edition object""" """set some fields on the edition object"""
# calculate isbn 10/13 # calculate isbn 10/13
if self.isbn_13 and self.isbn_13[:3] == "978" and not self.isbn_10: if (
self.isbn_10 is None
and self.isbn_13 is not None
and self.isbn_13[:3] == "978"
):
self.isbn_10 = isbn_13_to_10(self.isbn_13) self.isbn_10 = isbn_13_to_10(self.isbn_13)
if self.isbn_10 and not self.isbn_13: update_fields = add_update_fields(update_fields, "isbn_10")
if self.isbn_13 is None and self.isbn_10 is not None:
self.isbn_13 = isbn_10_to_13(self.isbn_10) self.isbn_13 = isbn_10_to_13(self.isbn_10)
update_fields = add_update_fields(update_fields, "isbn_13")
# normalize isbn format # normalize isbn format
if self.isbn_10: if self.isbn_10 is not None:
self.isbn_10 = normalize_isbn(self.isbn_10) self.isbn_10 = normalize_isbn(self.isbn_10)
if self.isbn_13: if self.isbn_13 is not None:
self.isbn_13 = normalize_isbn(self.isbn_13) self.isbn_13 = normalize_isbn(self.isbn_13)
# set rank # set rank
self.edition_rank = self.get_rank() if (new := self.get_rank()) != self.edition_rank:
self.edition_rank = new
# clear author cache update_fields = add_update_fields(update_fields, "edition_rank")
if self.id:
for author_id in self.authors.values_list("id", flat=True):
cache.delete(f"author-books-{author_id}")
# Create sort title by removing articles from title # Create sort title by removing articles from title
if self.sort_title in [None, ""]: if self.sort_title in [None, ""]:
self.sort_title = self.guess_sort_title() self.sort_title = self.guess_sort_title()
update_fields = add_update_fields(update_fields, "sort_title")
return super().save(*args, **kwargs) super().save(*args, update_fields=update_fields, **kwargs)
# clear author cache
if self.id:
cache.delete_many(
[
f"author-books-{author_id}"
for author_id in self.authors.values_list("id", flat=True)
]
)
@transaction.atomic @transaction.atomic
def repair(self): def repair(self):

View file

@ -0,0 +1,341 @@
"""Export user account to tar.gz file for import into another Bookwyrm instance"""
import logging
import os
from boto3.session import Session as BotoSession
from s3_tar import S3Tar
from django.db.models import BooleanField, FileField, JSONField
from django.core.serializers.json import DjangoJSONEncoder
from django.core.files.base import ContentFile
from django.core.files.storage import storages
from bookwyrm import settings
from bookwyrm.models import AnnualGoal, ReadThrough, ShelfBook, ListItem
from bookwyrm.models import Review, Comment, Quotation
from bookwyrm.models import Edition
from bookwyrm.models import UserFollows, User, UserBlocks
from bookwyrm.models.job import ParentJob
from bookwyrm.tasks import app, IMPORTS
from bookwyrm.utils.tar import BookwyrmTarFile
logger = logging.getLogger(__name__)
class BookwyrmAwsSession(BotoSession):
"""a boto session that always uses settings.AWS_S3_ENDPOINT_URL"""
def client(self, *args, **kwargs): # pylint: disable=arguments-differ
kwargs["endpoint_url"] = settings.AWS_S3_ENDPOINT_URL
return super().client("s3", *args, **kwargs)
def select_exports_storage():
"""callable to allow for dependency on runtime configuration"""
return storages["exports"]
class BookwyrmExportJob(ParentJob):
"""entry for a specific request to export a bookwyrm user"""
export_data = FileField(null=True, storage=select_exports_storage)
export_json = JSONField(null=True, encoder=DjangoJSONEncoder)
json_completed = BooleanField(default=False)
def start_job(self):
"""schedule the first task"""
task = create_export_json_task.delay(job_id=self.id)
self.task_id = task.id
self.save(update_fields=["task_id"])
@app.task(queue=IMPORTS)
def create_export_json_task(job_id):
"""create the JSON data for the export"""
job = BookwyrmExportJob.objects.get(id=job_id)
# don't start the job if it was stopped from the UI
if job.complete:
return
try:
job.set_status("active")
# generate JSON structure
job.export_json = export_json(job.user)
job.save(update_fields=["export_json"])
# create archive in separate task
create_archive_task.delay(job_id=job.id)
except Exception as err: # pylint: disable=broad-except
logger.exception(
"create_export_json_task for %s failed with error: %s", job, err
)
job.set_status("failed")
def archive_file_location(file, directory="") -> str:
"""get the relative location of a file inside the archive"""
return os.path.join(directory, file.name)
def add_file_to_s3_tar(s3_tar: S3Tar, storage, file, directory=""):
"""
add file to S3Tar inside directory, keeping any directories under its
storage location
"""
s3_tar.add_file(
os.path.join(storage.location, file.name),
folder=os.path.dirname(archive_file_location(file, directory=directory)),
)
@app.task(queue=IMPORTS)
def create_archive_task(job_id):
"""create the archive containing the JSON file and additional files"""
job = BookwyrmExportJob.objects.get(id=job_id)
# don't start the job if it was stopped from the UI
if job.complete:
return
try:
export_task_id = str(job.task_id)
archive_filename = f"{export_task_id}.tar.gz"
export_json_bytes = DjangoJSONEncoder().encode(job.export_json).encode("utf-8")
user = job.user
editions = get_books_for_user(user)
if settings.USE_S3:
# Storage for writing temporary files
exports_storage = storages["exports"]
# Handle for creating the final archive
s3_tar = S3Tar(
exports_storage.bucket_name,
os.path.join(exports_storage.location, archive_filename),
session=BookwyrmAwsSession(),
)
# Save JSON file to a temporary location
export_json_tmp_file = os.path.join(export_task_id, "archive.json")
exports_storage.save(
export_json_tmp_file,
ContentFile(export_json_bytes),
)
s3_tar.add_file(
os.path.join(exports_storage.location, export_json_tmp_file)
)
# Add images to TAR
images_storage = storages["default"]
if user.avatar:
add_file_to_s3_tar(s3_tar, images_storage, user.avatar)
for edition in editions:
if edition.cover:
add_file_to_s3_tar(
s3_tar, images_storage, edition.cover, directory="images"
)
# Create archive and store file name
s3_tar.tar()
job.export_data = archive_filename
job.save(update_fields=["export_data"])
# Delete temporary files
exports_storage.delete(export_json_tmp_file)
else:
job.export_data = archive_filename
with job.export_data.open("wb") as tar_file:
with BookwyrmTarFile.open(mode="w:gz", fileobj=tar_file) as tar:
# save json file
tar.write_bytes(export_json_bytes)
# Add avatar image if present
if user.avatar:
tar.add_image(user.avatar)
for edition in editions:
if edition.cover:
tar.add_image(edition.cover, directory="images")
job.save(update_fields=["export_data"])
job.set_status("completed")
except Exception as err: # pylint: disable=broad-except
logger.exception("create_archive_task for %s failed with error: %s", job, err)
job.set_status("failed")
def export_json(user: User):
"""create export JSON"""
data = export_user(user) # in the root of the JSON structure
data["settings"] = export_settings(user)
data["goals"] = export_goals(user)
data["books"] = export_books(user)
data["saved_lists"] = export_saved_lists(user)
data["follows"] = export_follows(user)
data["blocks"] = export_blocks(user)
return data
def export_user(user: User):
"""export user data"""
data = user.to_activity()
if user.avatar:
data["icon"]["url"] = archive_file_location(user.avatar)
else:
data["icon"] = {}
return data
def export_settings(user: User):
"""Additional settings - can't be serialized as AP"""
vals = [
"show_goal",
"preferred_timezone",
"default_post_privacy",
"show_suggested_users",
]
return {k: getattr(user, k) for k in vals}
def export_saved_lists(user: User):
"""add user saved lists to export JSON"""
return [l.remote_id for l in user.saved_lists.all()]
def export_follows(user: User):
"""add user follows to export JSON"""
follows = UserFollows.objects.filter(user_subject=user).distinct()
following = User.objects.filter(userfollows_user_object__in=follows).distinct()
return [f.remote_id for f in following]
def export_blocks(user: User):
"""add user blocks to export JSON"""
blocks = UserBlocks.objects.filter(user_subject=user).distinct()
blocking = User.objects.filter(userblocks_user_object__in=blocks).distinct()
return [b.remote_id for b in blocking]
def export_goals(user: User):
"""add user reading goals to export JSON"""
reading_goals = AnnualGoal.objects.filter(user=user).distinct()
return [
{"goal": goal.goal, "year": goal.year, "privacy": goal.privacy}
for goal in reading_goals
]
def export_books(user: User):
"""add books to export JSON"""
editions = get_books_for_user(user)
return [export_book(user, edition) for edition in editions]
def export_book(user: User, edition: Edition):
"""add book to export JSON"""
data = {}
data["work"] = edition.parent_work.to_activity()
data["edition"] = edition.to_activity()
if edition.cover:
data["edition"]["cover"]["url"] = archive_file_location(
edition.cover, directory="images"
)
# authors
data["authors"] = [author.to_activity() for author in edition.authors.all()]
# Shelves this book is on
# Every ShelfItem is this book so we don't other serializing
shelf_books = (
ShelfBook.objects.select_related("shelf")
.filter(user=user, book=edition)
.distinct()
)
data["shelves"] = [shelfbook.shelf.to_activity() for shelfbook in shelf_books]
# Lists and ListItems
# ListItems include "notes" and "approved" so we need them
# even though we know it's this book
list_items = ListItem.objects.filter(book=edition, user=user).distinct()
data["lists"] = []
for item in list_items:
list_info = item.book_list.to_activity()
list_info[
"privacy"
] = item.book_list.privacy # this isn't serialized so we add it
list_info["list_item"] = item.to_activity()
data["lists"].append(list_info)
# Statuses
# Can't use select_subclasses here because
# we need to filter on the "book" value,
# which is not available on an ordinary Status
for status in ["comments", "quotations", "reviews"]:
data[status] = []
comments = Comment.objects.filter(user=user, book=edition).all()
for status in comments:
obj = status.to_activity()
obj["progress"] = status.progress
obj["progress_mode"] = status.progress_mode
data["comments"].append(obj)
quotes = Quotation.objects.filter(user=user, book=edition).all()
for status in quotes:
obj = status.to_activity()
obj["position"] = status.position
obj["endposition"] = status.endposition
obj["position_mode"] = status.position_mode
data["quotations"].append(obj)
reviews = Review.objects.filter(user=user, book=edition).all()
data["reviews"] = [status.to_activity() for status in reviews]
# readthroughs can't be serialized to activity
book_readthroughs = (
ReadThrough.objects.filter(user=user, book=edition).distinct().values()
)
data["readthroughs"] = list(book_readthroughs)
return data
def get_books_for_user(user):
"""
Get all the books and editions related to a user.
We use union() instead of Q objects because it creates
multiple simple queries in stead of a much more complex DB query
that can time out.
"""
shelf_eds = Edition.objects.select_related("parent_work").filter(shelves__user=user)
rt_eds = Edition.objects.select_related("parent_work").filter(
readthrough__user=user
)
review_eds = Edition.objects.select_related("parent_work").filter(review__user=user)
list_eds = Edition.objects.select_related("parent_work").filter(list__user=user)
comment_eds = Edition.objects.select_related("parent_work").filter(
comment__user=user
)
quote_eds = Edition.objects.select_related("parent_work").filter(
quotation__user=user
)
editions = shelf_eds.union(rt_eds, review_eds, list_eds, comment_eds, quote_eds)
return editions

View file

@ -0,0 +1,462 @@
"""Import a user from another Bookwyrm instance"""
import json
import logging
from django.db.models import FileField, JSONField, CharField
from django.utils import timezone
from django.utils.html import strip_tags
from django.contrib.postgres.fields import ArrayField as DjangoArrayField
from bookwyrm import activitypub
from bookwyrm import models
from bookwyrm.tasks import app, IMPORTS
from bookwyrm.models.job import ParentJob, ParentTask, SubTask
from bookwyrm.utils.tar import BookwyrmTarFile
logger = logging.getLogger(__name__)
class BookwyrmImportJob(ParentJob):
"""entry for a specific request for importing a bookwyrm user backup"""
archive_file = FileField(null=True, blank=True)
import_data = JSONField(null=True)
required = DjangoArrayField(CharField(max_length=50, blank=True), blank=True)
def start_job(self):
"""Start the job"""
start_import_task.delay(job_id=self.id, no_children=True)
@app.task(queue=IMPORTS, base=ParentTask)
def start_import_task(**kwargs):
"""trigger the child import tasks for each user data"""
job = BookwyrmImportJob.objects.get(id=kwargs["job_id"])
archive_file = job.archive_file
# don't start the job if it was stopped from the UI
if job.complete:
return
try:
archive_file.open("rb")
with BookwyrmTarFile.open(mode="r:gz", fileobj=archive_file) as tar:
json_filename = next(
filter(lambda n: n.startswith("archive"), tar.getnames())
)
job.import_data = json.loads(tar.read(json_filename).decode("utf-8"))
if "include_user_profile" in job.required:
update_user_profile(job.user, tar, job.import_data)
if "include_user_settings" in job.required:
update_user_settings(job.user, job.import_data)
if "include_goals" in job.required:
update_goals(job.user, job.import_data.get("goals", []))
if "include_saved_lists" in job.required:
upsert_saved_lists(job.user, job.import_data.get("saved_lists", []))
if "include_follows" in job.required:
upsert_follows(job.user, job.import_data.get("follows", []))
if "include_blocks" in job.required:
upsert_user_blocks(job.user, job.import_data.get("blocks", []))
process_books(job, tar)
job.set_status("complete")
archive_file.close()
except Exception as err: # pylint: disable=broad-except
logger.exception("User Import Job %s Failed with error: %s", job.id, err)
job.set_status("failed")
def process_books(job, tar):
"""
Process user import data related to books
We always import the books even if not assigning
them to shelves, lists etc
"""
books = job.import_data.get("books")
for data in books:
book = get_or_create_edition(data, tar)
if "include_shelves" in job.required:
upsert_shelves(book, job.user, data)
if "include_readthroughs" in job.required:
upsert_readthroughs(data.get("readthroughs"), job.user, book.id)
if "include_comments" in job.required:
upsert_statuses(
job.user, models.Comment, data.get("comments"), book.remote_id
)
if "include_quotations" in job.required:
upsert_statuses(
job.user, models.Quotation, data.get("quotations"), book.remote_id
)
if "include_reviews" in job.required:
upsert_statuses(
job.user, models.Review, data.get("reviews"), book.remote_id
)
if "include_lists" in job.required:
upsert_lists(job.user, data.get("lists"), book.id)
def get_or_create_edition(book_data, tar):
"""Take a JSON string of work and edition data,
find or create the edition and work in the database and
return an edition instance"""
edition = book_data.get("edition")
existing = models.Edition.find_existing(edition)
if existing:
return existing
# make sure we have the authors in the local DB
# replace the old author ids in the edition JSON
edition["authors"] = []
for author in book_data.get("authors"):
parsed_author = activitypub.parse(author)
instance = parsed_author.to_model(
model=models.Author, save=True, overwrite=True
)
edition["authors"].append(instance.remote_id)
# we will add the cover later from the tar
# don't try to load it from the old server
cover = edition.get("cover", {})
cover_path = cover.get("url", None)
edition["cover"] = {}
# first we need the parent work to exist
work = book_data.get("work")
work["editions"] = []
parsed_work = activitypub.parse(work)
work_instance = parsed_work.to_model(model=models.Work, save=True, overwrite=True)
# now we have a work we can add it to the edition
# and create the edition model instance
edition["work"] = work_instance.remote_id
parsed_edition = activitypub.parse(edition)
book = parsed_edition.to_model(model=models.Edition, save=True, overwrite=True)
# set the cover image from the tar
if cover_path:
tar.write_image_to_file(cover_path, book.cover)
return book
def upsert_readthroughs(data, user, book_id):
"""Take a JSON string of readthroughs and
find or create the instances in the database"""
for read_through in data:
obj = {}
keys = [
"progress_mode",
"start_date",
"finish_date",
"stopped_date",
"is_active",
]
for key in keys:
obj[key] = read_through[key]
obj["user_id"] = user.id
obj["book_id"] = book_id
existing = models.ReadThrough.objects.filter(**obj).first()
if not existing:
models.ReadThrough.objects.create(**obj)
def upsert_statuses(user, cls, data, book_remote_id):
"""Take a JSON string of a status and
find or create the instances in the database"""
for status in data:
if is_alias(
user, status["attributedTo"]
): # don't let l33t hax0rs steal other people's posts
# update ids and remove replies
status["attributedTo"] = user.remote_id
status["to"] = update_followers_address(user, status["to"])
status["cc"] = update_followers_address(user, status["cc"])
status[
"replies"
] = (
{}
) # this parses incorrectly but we can't set it without knowing the new id
status["inReplyToBook"] = book_remote_id
parsed = activitypub.parse(status)
if not status_already_exists(
user, parsed
): # don't duplicate posts on multiple import
instance = parsed.to_model(model=cls, save=True, overwrite=True)
for val in [
"progress",
"progress_mode",
"position",
"endposition",
"position_mode",
]:
if status.get(val):
instance.val = status[val]
instance.remote_id = instance.get_remote_id() # update the remote_id
instance.save() # save and broadcast
else:
logger.warning("User does not have permission to import statuses")
def upsert_lists(user, lists, book_id):
"""Take a list of objects each containing
a list and list item as AP objects
Because we are creating new IDs we can't assume the id
will exist or be accurate, so we only use to_model for
adding new items after checking whether they exist .
"""
book = models.Edition.objects.get(id=book_id)
for blist in lists:
booklist = models.List.objects.filter(name=blist["name"], user=user).first()
if not booklist:
blist["owner"] = user.remote_id
parsed = activitypub.parse(blist)
booklist = parsed.to_model(model=models.List, save=True, overwrite=True)
booklist.privacy = blist["privacy"]
booklist.save()
item = models.ListItem.objects.filter(book=book, book_list=booklist).exists()
if not item:
count = booklist.books.count()
models.ListItem.objects.create(
book=book,
book_list=booklist,
user=user,
notes=blist["list_item"]["notes"],
approved=blist["list_item"]["approved"],
order=count + 1,
)
def upsert_shelves(book, user, book_data):
"""Take shelf JSON objects and create
DB entries if they don't already exist"""
shelves = book_data["shelves"]
for shelf in shelves:
book_shelf = models.Shelf.objects.filter(name=shelf["name"], user=user).first()
if not book_shelf:
book_shelf = models.Shelf.objects.create(name=shelf["name"], user=user)
# add the book as a ShelfBook if needed
if not models.ShelfBook.objects.filter(
book=book, shelf=book_shelf, user=user
).exists():
models.ShelfBook.objects.create(
book=book, shelf=book_shelf, user=user, shelved_date=timezone.now()
)
def update_user_profile(user, tar, data):
"""update the user's profile from import data"""
name = data.get("name", None)
username = data.get("preferredUsername")
user.name = name if name else username
user.summary = strip_tags(data.get("summary", None))
user.save(update_fields=["name", "summary"])
if data["icon"].get("url"):
avatar_filename = next(filter(lambda n: n.startswith("avatar"), tar.getnames()))
tar.write_image_to_file(avatar_filename, user.avatar)
def update_user_settings(user, data):
"""update the user's settings from import data"""
update_fields = ["manually_approves_followers", "hide_follows", "discoverable"]
ap_fields = [
("manuallyApprovesFollowers", "manually_approves_followers"),
("hideFollows", "hide_follows"),
("discoverable", "discoverable"),
]
for (ap_field, bw_field) in ap_fields:
setattr(user, bw_field, data[ap_field])
bw_fields = [
"show_goal",
"show_suggested_users",
"default_post_privacy",
"preferred_timezone",
]
for field in bw_fields:
update_fields.append(field)
setattr(user, field, data["settings"][field])
user.save(update_fields=update_fields)
@app.task(queue=IMPORTS, base=SubTask)
def update_user_settings_task(job_id):
"""wrapper task for user's settings import"""
parent_job = BookwyrmImportJob.objects.get(id=job_id)
return update_user_settings(parent_job.user, parent_job.import_data.get("user"))
def update_goals(user, data):
"""update the user's goals from import data"""
for goal in data:
# edit the existing goal if there is one
existing = models.AnnualGoal.objects.filter(
year=goal["year"], user=user
).first()
if existing:
for k in goal.keys():
setattr(existing, k, goal[k])
existing.save()
else:
goal["user"] = user
models.AnnualGoal.objects.create(**goal)
@app.task(queue=IMPORTS, base=SubTask)
def update_goals_task(job_id):
"""wrapper task for user's goals import"""
parent_job = BookwyrmImportJob.objects.get(id=job_id)
return update_goals(parent_job.user, parent_job.import_data.get("goals"))
def upsert_saved_lists(user, values):
"""Take a list of remote ids and add as saved lists"""
for remote_id in values:
book_list = activitypub.resolve_remote_id(remote_id, models.List)
if book_list:
user.saved_lists.add(book_list)
@app.task(queue=IMPORTS, base=SubTask)
def upsert_saved_lists_task(job_id):
"""wrapper task for user's saved lists import"""
parent_job = BookwyrmImportJob.objects.get(id=job_id)
return upsert_saved_lists(
parent_job.user, parent_job.import_data.get("saved_lists")
)
def upsert_follows(user, values):
"""Take a list of remote ids and add as follows"""
for remote_id in values:
followee = activitypub.resolve_remote_id(remote_id, models.User)
if followee:
(follow_request, created,) = models.UserFollowRequest.objects.get_or_create(
user_subject=user,
user_object=followee,
)
if not created:
# this request probably failed to connect with the remote
# and should save to trigger a re-broadcast
follow_request.save()
@app.task(queue=IMPORTS, base=SubTask)
def upsert_follows_task(job_id):
"""wrapper task for user's follows import"""
parent_job = BookwyrmImportJob.objects.get(id=job_id)
return upsert_follows(parent_job.user, parent_job.import_data.get("follows"))
def upsert_user_blocks(user, user_ids):
"""block users"""
for user_id in user_ids:
user_object = activitypub.resolve_remote_id(user_id, models.User)
if user_object:
exists = models.UserBlocks.objects.filter(
user_subject=user, user_object=user_object
).exists()
if not exists:
models.UserBlocks.objects.create(
user_subject=user, user_object=user_object
)
# remove the blocked users's lists from the groups
models.List.remove_from_group(user, user_object)
# remove the blocked user from all blocker's owned groups
models.GroupMember.remove(user, user_object)
@app.task(queue=IMPORTS, base=SubTask)
def upsert_user_blocks_task(job_id):
"""wrapper task for user's blocks import"""
parent_job = BookwyrmImportJob.objects.get(id=job_id)
return upsert_user_blocks(
parent_job.user, parent_job.import_data.get("blocked_users")
)
def update_followers_address(user, field):
"""statuses to or cc followers need to have the followers
address updated to the new local user"""
for i, audience in enumerate(field):
if audience.rsplit("/")[-1] == "followers":
field[i] = user.followers_url
return field
def is_alias(user, remote_id):
"""check that the user is listed as movedTo or also_known_as
in the remote user's profile"""
remote_user = activitypub.resolve_remote_id(
remote_id=remote_id, model=models.User, save=False
)
if remote_user:
if remote_user.moved_to:
return user.remote_id == remote_user.moved_to
if remote_user.also_known_as:
return user in remote_user.also_known_as.all()
return False
def status_already_exists(user, status):
"""check whether this status has already been published
by this user. We can't rely on to_model() because it
only matches on remote_id, which we have to change
*after* saving because it needs the primary key (id)"""
return models.Status.objects.filter(
user=user, content=status.content, published_date=status.published
).exists()

View file

@ -11,7 +11,7 @@ ConnectorFiles = models.TextChoices("ConnectorFiles", CONNECTORS)
class Connector(BookWyrmModel): class Connector(BookWyrmModel):
"""book data source connectors""" """book data source connectors"""
identifier = models.CharField(max_length=255, unique=True) identifier = models.CharField(max_length=255, unique=True) # domain
priority = models.IntegerField(default=2) priority = models.IntegerField(default=2)
name = models.CharField(max_length=255, null=True, blank=True) name = models.CharField(max_length=255, null=True, blank=True)
connector_file = models.CharField(max_length=255, choices=ConnectorFiles.choices) connector_file = models.CharField(max_length=255, choices=ConnectorFiles.choices)

View file

@ -16,7 +16,7 @@ FederationStatus = [
class FederatedServer(BookWyrmModel): class FederatedServer(BookWyrmModel):
"""store which servers we federate with""" """store which servers we federate with"""
server_name = models.CharField(max_length=255, unique=True) server_name = models.CharField(max_length=255, unique=True) # domain
status = models.CharField( status = models.CharField(
max_length=255, default="federated", choices=FederationStatus max_length=255, default="federated", choices=FederationStatus
) )
@ -64,5 +64,4 @@ class FederatedServer(BookWyrmModel):
def is_blocked(cls, url: str) -> bool: def is_blocked(cls, url: str) -> bool:
"""look up if a domain is blocked""" """look up if a domain is blocked"""
url = urlparse(url) url = urlparse(url)
domain = url.netloc return cls.objects.filter(server_name=url.hostname, status="blocked").exists()
return cls.objects.filter(server_name=domain, status="blocked").exists()

View file

@ -260,12 +260,12 @@ class PrivacyField(ActivitypubFieldMixin, models.CharField):
if to == [self.public]: if to == [self.public]:
setattr(instance, self.name, "public") setattr(instance, self.name, "public")
elif self.public in cc:
setattr(instance, self.name, "unlisted")
elif to == [user.followers_url]: elif to == [user.followers_url]:
setattr(instance, self.name, "followers") setattr(instance, self.name, "followers")
elif cc == []: elif cc == []:
setattr(instance, self.name, "direct") setattr(instance, self.name, "direct")
elif self.public in cc:
setattr(instance, self.name, "unlisted")
else: else:
setattr(instance, self.name, "followers") setattr(instance, self.name, "followers")
return original == getattr(instance, self.name) return original == getattr(instance, self.name)
@ -482,7 +482,7 @@ class ImageField(ActivitypubFieldMixin, models.ImageField):
if not url: if not url:
return None return None
return activitypub.Document(url=url, name=alt) return activitypub.Image(url=url, name=alt)
def field_from_activity(self, value, allow_external_connections=True): def field_from_activity(self, value, allow_external_connections=True):
image_slug = value image_slug = value

View file

@ -1,7 +1,7 @@
""" do book related things with other users """ """ do book related things with other users """
from django.db import models, IntegrityError, transaction from django.db import models, IntegrityError, transaction
from django.db.models import Q from django.db.models import Q
from bookwyrm.settings import DOMAIN from bookwyrm.settings import BASE_URL
from .base_model import BookWyrmModel from .base_model import BookWyrmModel
from . import fields from . import fields
from .relationship import UserBlocks from .relationship import UserBlocks
@ -17,7 +17,7 @@ class Group(BookWyrmModel):
def get_remote_id(self): def get_remote_id(self):
"""don't want the user to be in there in this case""" """don't want the user to be in there in this case"""
return f"https://{DOMAIN}/group/{self.id}" return f"{BASE_URL}/group/{self.id}"
@classmethod @classmethod
def followers_filter(cls, queryset, viewer): def followers_filter(cls, queryset, viewer):

View file

@ -2,18 +2,19 @@
from bookwyrm import activitypub from bookwyrm import activitypub
from .activitypub_mixin import ActivitypubMixin from .activitypub_mixin import ActivitypubMixin
from .base_model import BookWyrmModel from .base_model import BookWyrmModel
from .fields import CICharField from .fields import CharField
class Hashtag(ActivitypubMixin, BookWyrmModel): class Hashtag(ActivitypubMixin, BookWyrmModel):
"a hashtag which can be used in statuses" "a hashtag which can be used in statuses"
name = CICharField( name = CharField(
max_length=256, max_length=256,
blank=False, blank=False,
null=False, null=False,
activitypub_field="name", activitypub_field="name",
deduplication_field=True, deduplication_field=True,
db_collation="case_insensitive",
) )
name_field = "name" name_field = "name"

307
bookwyrm/models/job.py Normal file
View file

@ -0,0 +1,307 @@
"""Everything needed for Celery to multi-thread complex tasks."""
from django.db import models
from django.db import transaction
from django.utils.translation import gettext_lazy as _
from django.utils import timezone
from bookwyrm.models.user import User
from bookwyrm.tasks import app
class Job(models.Model):
"""Abstract model to store the state of a Task."""
class Status(models.TextChoices):
"""Possible job states."""
PENDING = "pending", _("Pending")
ACTIVE = "active", _("Active")
COMPLETE = "complete", _("Complete")
STOPPED = "stopped", _("Stopped")
FAILED = "failed", _("Failed")
task_id = models.UUIDField(unique=True, null=True, blank=True)
created_date = models.DateTimeField(default=timezone.now)
updated_date = models.DateTimeField(default=timezone.now)
complete = models.BooleanField(default=False)
status = models.CharField(
max_length=50, choices=Status.choices, default=Status.PENDING, null=True
)
class Meta:
"""Make it abstract"""
abstract = True
def complete_job(self):
"""Report that the job has completed"""
if self.complete:
return
self.status = self.Status.COMPLETE
self.complete = True
self.updated_date = timezone.now()
self.save(update_fields=["status", "complete", "updated_date"])
def stop_job(self, reason=None):
"""Stop the job"""
if self.complete:
return
self.__terminate_job()
if reason and reason == "failed":
self.status = self.Status.FAILED
else:
self.status = self.Status.STOPPED
self.complete = True
self.updated_date = timezone.now()
self.save(update_fields=["status", "complete", "updated_date"])
def set_status(self, status):
"""Set job status"""
if self.complete:
return
if self.status == status:
return
if status == self.Status.COMPLETE:
self.complete_job()
return
if status == self.Status.STOPPED:
self.stop_job()
return
if status == self.Status.FAILED:
self.stop_job(reason="failed")
return
self.updated_date = timezone.now()
self.status = status
self.save(update_fields=["status", "updated_date"])
def __terminate_job(self):
"""Tell workers to ignore and not execute this task."""
app.control.revoke(self.task_id, terminate=True)
class ParentJob(Job):
"""Store the state of a Task which can spawn many :model:`ChildJob`s to spread
resource load.
Intended to be sub-classed if necessary via proxy or
multi-table inheritance.
Extends :model:`Job`.
"""
user = models.ForeignKey(User, on_delete=models.CASCADE)
def complete_job(self):
"""Report that the job has completed and stop pending
children. Extend.
"""
super().complete_job()
self.__terminate_pending_child_jobs()
def notify_child_job_complete(self):
"""let the job know when the items get work done"""
if self.complete:
return
self.updated_date = timezone.now()
self.save(update_fields=["updated_date"])
if not self.complete and self.has_completed:
self.complete_job()
def __terminate_job(self): # pylint: disable=unused-private-member
"""Tell workers to ignore and not execute this task
& pending child tasks. Extend.
"""
super().__terminate_job()
self.__terminate_pending_child_jobs()
def __terminate_pending_child_jobs(self):
"""Tell workers to ignore and not execute any pending child tasks."""
tasks = self.pending_child_jobs.filter(task_id__isnull=False).values_list(
"task_id", flat=True
)
app.control.revoke(list(tasks))
self.pending_child_jobs.update(status=self.Status.STOPPED)
@property
def has_completed(self):
"""has this job finished"""
return not self.pending_child_jobs.exists()
@property
def pending_child_jobs(self):
"""items that haven't been processed yet"""
return self.child_jobs.filter(complete=False)
class ChildJob(Job):
"""Stores the state of a Task for the related :model:`ParentJob`.
Intended to be sub-classed if necessary via proxy or
multi-table inheritance.
Extends :model:`Job`.
"""
parent_job = models.ForeignKey(
ParentJob, on_delete=models.CASCADE, related_name="child_jobs"
)
def set_status(self, status):
"""Set job and parent_job status. Extend."""
super().set_status(status)
if (
status == self.Status.ACTIVE
and self.parent_job.status == self.Status.PENDING
):
self.parent_job.set_status(self.Status.ACTIVE)
def complete_job(self):
"""Report to parent_job that the job has completed. Extend."""
super().complete_job()
self.parent_job.notify_child_job_complete()
class ParentTask(app.Task):
"""Used with ParentJob, Abstract Tasks execute code at specific points in
a Task's lifecycle, applying to all Tasks with the same 'base'.
All status & ParentJob.task_id assignment is managed here for you.
Usage e.g. @app.task(base=ParentTask)
"""
def before_start(
self, task_id, args, kwargs
): # pylint: disable=no-self-use, unused-argument
"""Handler called before the task starts. Override.
Prepare ParentJob before the task starts.
Arguments:
task_id (str): Unique id of the task to execute.
args (Tuple): Original arguments for the task to execute.
kwargs (Dict): Original keyword arguments for the task to execute.
Keyword Arguments:
job_id (int): Unique 'id' of the ParentJob.
no_children (bool): If 'True' this is the only Task expected to run
for the given ParentJob.
Returns:
None: The return value of this handler is ignored.
"""
job = ParentJob.objects.get(id=kwargs["job_id"])
job.task_id = task_id
job.save(update_fields=["task_id"])
if kwargs["no_children"]:
job.set_status(ChildJob.Status.ACTIVE)
def on_success(
self, retval, task_id, args, kwargs
): # pylint: disable=no-self-use, unused-argument
"""Run by the worker if the task executes successfully. Override.
Update ParentJob on Task complete.
Arguments:
retval (Any): The return value of the task.
task_id (str): Unique id of the executed task.
args (Tuple): Original arguments for the executed task.
kwargs (Dict): Original keyword arguments for the executed task.
Keyword Arguments:
job_id (int): Unique 'id' of the ParentJob.
no_children (bool): If 'True' this is the only Task expected to run
for the given ParentJob.
Returns:
None: The return value of this handler is ignored.
"""
if kwargs["no_children"]:
job = ParentJob.objects.get(id=kwargs["job_id"])
job.complete_job()
class SubTask(app.Task):
"""Used with ChildJob, Abstract Tasks execute code at specific points in
a Task's lifecycle, applying to all Tasks with the same 'base'.
All status & ChildJob.task_id assignment is managed here for you.
Usage e.g. @app.task(base=SubTask)
"""
def before_start(
self, task_id, *args, **kwargs
): # pylint: disable=no-self-use, unused-argument
"""Handler called before the task starts. Override.
Prepare ChildJob before the task starts.
Arguments:
task_id (str): Unique id of the task to execute.
args (Tuple): Original arguments for the task to execute.
kwargs (Dict): Original keyword arguments for the task to execute.
Keyword Arguments:
job_id (int): Unique 'id' of the ParentJob.
child_id (int): Unique 'id' of the ChildJob.
Returns:
None: The return value of this handler is ignored.
"""
child_job = ChildJob.objects.get(id=kwargs["child_id"])
child_job.task_id = task_id
child_job.save(update_fields=["task_id"])
child_job.set_status(ChildJob.Status.ACTIVE)
def on_success(
self, retval, task_id, *args, **kwargs
): # pylint: disable=no-self-use, unused-argument
"""Run by the worker if the task executes successfully. Override.
Notify ChildJob of task completion.
Arguments:
retval (Any): The return value of the task.
task_id (str): Unique id of the executed task.
args (Tuple): Original arguments for the executed task.
kwargs (Dict): Original keyword arguments for the executed task.
Keyword Arguments:
job_id (int): Unique 'id' of the ParentJob.
child_id (int): Unique 'id' of the ChildJob.
Returns:
None: The return value of this handler is ignored.
"""
subtask = ChildJob.objects.get(id=kwargs["child_id"])
subtask.complete_job()
@transaction.atomic
def create_child_job(parent_job, task_callback):
"""Utility method for creating a ChildJob
and running a task to avoid DB race conditions
"""
child_job = ChildJob.objects.create(parent_job=parent_job)
transaction.on_commit(
lambda: task_callback.delay(job_id=parent_job.id, child_id=child_job.id)
)
return child_job

View file

@ -1,4 +1,5 @@
""" outlink data """ """ outlink data """
from typing import Optional, Iterable
from urllib.parse import urlparse from urllib.parse import urlparse
from django.core.exceptions import PermissionDenied from django.core.exceptions import PermissionDenied
@ -6,6 +7,7 @@ from django.db import models
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
from bookwyrm import activitypub from bookwyrm import activitypub
from bookwyrm.utils.db import add_update_fields
from .activitypub_mixin import ActivitypubMixin from .activitypub_mixin import ActivitypubMixin
from .base_model import BookWyrmModel from .base_model import BookWyrmModel
from . import fields from . import fields
@ -34,17 +36,19 @@ class Link(ActivitypubMixin, BookWyrmModel):
"""link name via the associated domain""" """link name via the associated domain"""
return self.domain.name return self.domain.name
def save(self, *args, **kwargs): def save(self, *args, update_fields: Optional[Iterable[str]] = None, **kwargs):
"""create a link""" """create a link"""
# get or create the associated domain # get or create the associated domain
if not self.domain: if not self.domain:
domain = urlparse(self.url).netloc domain = urlparse(self.url).hostname
self.domain, _ = LinkDomain.objects.get_or_create(domain=domain) self.domain, _ = LinkDomain.objects.get_or_create(domain=domain)
update_fields = add_update_fields(update_fields, "domain")
# this is never broadcast, the owning model broadcasts an update # this is never broadcast, the owning model broadcasts an update
if "broadcast" in kwargs: if "broadcast" in kwargs:
del kwargs["broadcast"] del kwargs["broadcast"]
return super().save(*args, **kwargs)
super().save(*args, update_fields=update_fields, **kwargs)
AvailabilityChoices = [ AvailabilityChoices = [
@ -88,8 +92,10 @@ class LinkDomain(BookWyrmModel):
return return
raise PermissionDenied() raise PermissionDenied()
def save(self, *args, **kwargs): def save(self, *args, update_fields: Optional[Iterable[str]] = None, **kwargs):
"""set a default name""" """set a default name"""
if not self.name: if not self.name:
self.name = self.domain self.name = self.domain
super().save(*args, **kwargs) update_fields = add_update_fields(update_fields, "name")
super().save(*args, update_fields=update_fields, **kwargs)

View file

@ -1,4 +1,5 @@
""" make a list of books!! """ """ make a list of books!! """
from typing import Optional, Iterable
import uuid import uuid
from django.core.exceptions import PermissionDenied from django.core.exceptions import PermissionDenied
@ -7,7 +8,8 @@ from django.db.models import Q
from django.utils import timezone from django.utils import timezone
from bookwyrm import activitypub from bookwyrm import activitypub
from bookwyrm.settings import DOMAIN from bookwyrm.settings import BASE_URL
from bookwyrm.utils.db import add_update_fields
from .activitypub_mixin import CollectionItemMixin, OrderedCollectionMixin from .activitypub_mixin import CollectionItemMixin, OrderedCollectionMixin
from .base_model import BookWyrmModel from .base_model import BookWyrmModel
@ -50,7 +52,7 @@ class List(OrderedCollectionMixin, BookWyrmModel):
def get_remote_id(self): def get_remote_id(self):
"""don't want the user to be in there in this case""" """don't want the user to be in there in this case"""
return f"https://{DOMAIN}/list/{self.id}" return f"{BASE_URL}/list/{self.id}"
@property @property
def collection_queryset(self): def collection_queryset(self):
@ -124,11 +126,13 @@ class List(OrderedCollectionMixin, BookWyrmModel):
group=None, curation="closed" group=None, curation="closed"
) )
def save(self, *args, **kwargs): def save(self, *args, update_fields: Optional[Iterable[str]] = None, **kwargs):
"""on save, update embed_key and avoid clash with existing code""" """on save, update embed_key and avoid clash with existing code"""
if not self.embed_key: if not self.embed_key:
self.embed_key = uuid.uuid4() self.embed_key = uuid.uuid4()
super().save(*args, **kwargs) update_fields = add_update_fields(update_fields, "embed_key")
super().save(*args, update_fields=update_fields, **kwargs)
class ListItem(CollectionItemMixin, BookWyrmModel): class ListItem(CollectionItemMixin, BookWyrmModel):

View file

@ -10,7 +10,7 @@ from .notification import Notification, NotificationType
class Move(ActivityMixin, BookWyrmModel): class Move(ActivityMixin, BookWyrmModel):
"""migrating an activitypub user account""" """migrating an activitypub object"""
user = fields.ForeignKey( user = fields.ForeignKey(
"User", on_delete=models.PROTECT, activitypub_field="actor" "User", on_delete=models.PROTECT, activitypub_field="actor"
@ -48,16 +48,16 @@ class MoveUser(Move):
"""update user info and broadcast it""" """update user info and broadcast it"""
# only allow if the source is listed in the target's alsoKnownAs # only allow if the source is listed in the target's alsoKnownAs
if self.user in self.target.also_known_as.all(): if self.user not in self.target.also_known_as.all():
raise PermissionDenied()
self.user.also_known_as.add(self.target.id) self.user.also_known_as.add(self.target.id)
self.user.update_active_date() self.user.update_active_date()
self.user.moved_to = self.target.remote_id self.user.moved_to = self.target.remote_id
self.user.save(update_fields=["moved_to"]) self.user.save(update_fields=["moved_to"])
if self.user.local: if self.user.local:
kwargs[ kwargs["broadcast"] = True # Only broadcast if we are initiating the Move
"broadcast"
] = True # Only broadcast if we are initiating the Move
super().save(*args, **kwargs) super().save(*args, **kwargs)
@ -66,6 +66,3 @@ class MoveUser(Move):
Notification.notify( Notification.notify(
follower, self.user, notification_type=NotificationType.MOVE follower, self.user, notification_type=NotificationType.MOVE
) )
else:
raise PermissionDenied()

View file

@ -1,8 +1,16 @@
""" alert a user to activity """ """ alert a user to activity """
from django.db import models, transaction from django.db import models, transaction
from django.dispatch import receiver from django.dispatch import receiver
from bookwyrm.models.bookwyrm_export_job import BookwyrmExportJob
from .base_model import BookWyrmModel from .base_model import BookWyrmModel
from . import Boost, Favorite, GroupMemberInvitation, ImportJob, LinkDomain from . import (
Boost,
Favorite,
GroupMemberInvitation,
ImportJob,
BookwyrmImportJob,
LinkDomain,
)
from . import ListItem, Report, Status, User, UserFollowRequest from . import ListItem, Report, Status, User, UserFollowRequest
from .site import InviteRequest from .site import InviteRequest
@ -23,6 +31,8 @@ class NotificationType(models.TextChoices):
# Imports # Imports
IMPORT = "IMPORT" IMPORT = "IMPORT"
USER_IMPORT = "USER_IMPORT"
USER_EXPORT = "USER_EXPORT"
# List activity # List activity
ADD = "ADD" ADD = "ADD"
@ -63,6 +73,9 @@ class Notification(BookWyrmModel):
) )
related_status = models.ForeignKey("Status", on_delete=models.CASCADE, null=True) related_status = models.ForeignKey("Status", on_delete=models.CASCADE, null=True)
related_import = models.ForeignKey("ImportJob", on_delete=models.CASCADE, null=True) related_import = models.ForeignKey("ImportJob", on_delete=models.CASCADE, null=True)
related_user_export = models.ForeignKey(
"BookwyrmExportJob", on_delete=models.CASCADE, null=True
)
related_list_items = models.ManyToManyField( related_list_items = models.ManyToManyField(
"ListItem", symmetrical=False, related_name="notifications" "ListItem", symmetrical=False, related_name="notifications"
) )
@ -226,6 +239,36 @@ def notify_user_on_import_complete(
) )
@receiver(models.signals.post_save, sender=BookwyrmImportJob)
# pylint: disable=unused-argument
def notify_user_on_user_import_complete(
sender, instance, *args, update_fields=None, **kwargs
):
"""we imported your user details! aren't you proud of us"""
update_fields = update_fields or []
if not instance.complete or "complete" not in update_fields:
return
Notification.objects.create(
user=instance.user, notification_type=NotificationType.USER_IMPORT
)
@receiver(models.signals.post_save, sender=BookwyrmExportJob)
# pylint: disable=unused-argument
def notify_user_on_user_export_complete(
sender, instance, *args, update_fields=None, **kwargs
):
"""we exported your user details! aren't you proud of us"""
update_fields = update_fields or []
if not instance.complete or "complete" not in update_fields:
return
Notification.objects.create(
user=instance.user,
notification_type=NotificationType.USER_EXPORT,
related_user_export=instance,
)
@receiver(models.signals.post_save, sender=Report) @receiver(models.signals.post_save, sender=Report)
@transaction.atomic @transaction.atomic
# pylint: disable=unused-argument # pylint: disable=unused-argument

View file

@ -1,9 +1,13 @@
""" progress in a book """ """ progress in a book """
from typing import Optional, Iterable
from django.core import validators from django.core import validators
from django.core.cache import cache from django.core.cache import cache
from django.db import models from django.db import models
from django.db.models import F, Q from django.db.models import F, Q
from bookwyrm.utils.db import add_update_fields
from .base_model import BookWyrmModel from .base_model import BookWyrmModel
@ -30,14 +34,17 @@ class ReadThrough(BookWyrmModel):
stopped_date = models.DateTimeField(blank=True, null=True) stopped_date = models.DateTimeField(blank=True, null=True)
is_active = models.BooleanField(default=True) is_active = models.BooleanField(default=True)
def save(self, *args, **kwargs): def save(self, *args, update_fields: Optional[Iterable[str]] = None, **kwargs):
"""update user active time""" """update user active time"""
cache.delete(f"latest_read_through-{self.user_id}-{self.book_id}")
self.user.update_active_date()
# an active readthrough must have an unset finish date # an active readthrough must have an unset finish date
if self.finish_date or self.stopped_date: if self.finish_date or self.stopped_date:
self.is_active = False self.is_active = False
super().save(*args, **kwargs) update_fields = add_update_fields(update_fields, "is_active")
super().save(*args, update_fields=update_fields, **kwargs)
cache.delete(f"latest_read_through-{self.user_id}-{self.book_id}")
self.user.update_active_date()
def create_update(self): def create_update(self):
"""add update to the readthrough""" """add update to the readthrough"""

View file

@ -38,14 +38,16 @@ class UserRelationship(BookWyrmModel):
def save(self, *args, **kwargs): def save(self, *args, **kwargs):
"""clear the template cache""" """clear the template cache"""
clear_cache(self.user_subject, self.user_object)
super().save(*args, **kwargs) super().save(*args, **kwargs)
clear_cache(self.user_subject, self.user_object)
def delete(self, *args, **kwargs): def delete(self, *args, **kwargs):
"""clear the template cache""" """clear the template cache"""
clear_cache(self.user_subject, self.user_object)
super().delete(*args, **kwargs) super().delete(*args, **kwargs)
clear_cache(self.user_subject, self.user_object)
class Meta: class Meta:
"""relationships should be unique""" """relationships should be unique"""
@ -65,6 +67,13 @@ class UserRelationship(BookWyrmModel):
base_path = self.user_subject.remote_id base_path = self.user_subject.remote_id
return f"{base_path}#follows/{self.id}" return f"{base_path}#follows/{self.id}"
def get_accept_reject_id(self, status):
"""get id for sending an accept or reject of a local user"""
base_path = self.user_object.remote_id
status_id = self.id or 0
return f"{base_path}#{status}/{status_id}"
class UserFollows(ActivityMixin, UserRelationship): class UserFollows(ActivityMixin, UserRelationship):
"""Following a user""" """Following a user"""
@ -105,6 +114,20 @@ class UserFollows(ActivityMixin, UserRelationship):
) )
return obj return obj
def reject(self):
"""generate a Reject for this follow. This would normally happen
when a user deletes a follow they previously accepted"""
if self.user_object.local:
activity = activitypub.Reject(
id=self.get_accept_reject_id(status="rejects"),
actor=self.user_object.remote_id,
object=self.to_activity(),
).serialize()
self.broadcast(activity, self.user_object)
self.delete()
class UserFollowRequest(ActivitypubMixin, UserRelationship): class UserFollowRequest(ActivitypubMixin, UserRelationship):
"""following a user requires manual or automatic confirmation""" """following a user requires manual or automatic confirmation"""
@ -148,13 +171,6 @@ class UserFollowRequest(ActivitypubMixin, UserRelationship):
if not manually_approves: if not manually_approves:
self.accept() self.accept()
def get_accept_reject_id(self, status):
"""get id for sending an accept or reject of a local user"""
base_path = self.user_object.remote_id
status_id = self.id or 0
return f"{base_path}#{status}/{status_id}"
def accept(self, broadcast_only=False): def accept(self, broadcast_only=False):
"""turn this request into the real deal""" """turn this request into the real deal"""
user = self.user_object user = self.user_object

View file

@ -3,7 +3,7 @@ from django.core.exceptions import PermissionDenied
from django.db import models from django.db import models
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
from bookwyrm.settings import DOMAIN from bookwyrm.settings import BASE_URL
from .base_model import BookWyrmModel from .base_model import BookWyrmModel
@ -46,7 +46,7 @@ class Report(BookWyrmModel):
raise PermissionDenied() raise PermissionDenied()
def get_remote_id(self): def get_remote_id(self):
return f"https://{DOMAIN}/settings/reports/{self.id}" return f"{BASE_URL}/settings/reports/{self.id}"
def comment(self, user, note): def comment(self, user, note):
"""comment on a report""" """comment on a report"""

View file

@ -1,13 +1,15 @@
""" puttin' books on shelves """ """ puttin' books on shelves """
import re import re
from typing import Optional, Iterable
from django.core.cache import cache from django.core.cache import cache
from django.core.exceptions import PermissionDenied from django.core.exceptions import PermissionDenied
from django.db import models from django.db import models
from django.utils import timezone from django.utils import timezone
from bookwyrm import activitypub from bookwyrm import activitypub
from bookwyrm.settings import DOMAIN from bookwyrm.settings import BASE_URL
from bookwyrm.tasks import BROADCAST from bookwyrm.tasks import BROADCAST
from bookwyrm.utils.db import add_update_fields
from .activitypub_mixin import CollectionItemMixin, OrderedCollectionMixin from .activitypub_mixin import CollectionItemMixin, OrderedCollectionMixin
from .base_model import BookWyrmModel from .base_model import BookWyrmModel
from . import fields from . import fields
@ -44,8 +46,9 @@ class Shelf(OrderedCollectionMixin, BookWyrmModel):
"""set the identifier""" """set the identifier"""
super().save(*args, priority=priority, **kwargs) super().save(*args, priority=priority, **kwargs)
if not self.identifier: if not self.identifier:
# this needs the auto increment ID from the save() above
self.identifier = self.get_identifier() self.identifier = self.get_identifier()
super().save(*args, **kwargs, broadcast=False) super().save(*args, **kwargs, broadcast=False, update_fields={"identifier"})
def get_identifier(self): def get_identifier(self):
"""custom-shelf-123 for the url""" """custom-shelf-123 for the url"""
@ -71,7 +74,7 @@ class Shelf(OrderedCollectionMixin, BookWyrmModel):
@property @property
def local_path(self): def local_path(self):
"""No slugs""" """No slugs"""
return self.get_remote_id().replace(f"https://{DOMAIN}", "") return self.get_remote_id().replace(BASE_URL, "")
def raise_not_deletable(self, viewer): def raise_not_deletable(self, viewer):
"""don't let anyone delete a default shelf""" """don't let anyone delete a default shelf"""
@ -100,10 +103,21 @@ class ShelfBook(CollectionItemMixin, BookWyrmModel):
activity_serializer = activitypub.ShelfItem activity_serializer = activitypub.ShelfItem
collection_field = "shelf" collection_field = "shelf"
def save(self, *args, priority=BROADCAST, **kwargs): def save(
self,
*args,
priority=BROADCAST,
update_fields: Optional[Iterable[str]] = None,
**kwargs,
):
if not self.user: if not self.user:
self.user = self.shelf.user self.user = self.shelf.user
if self.id and self.user.local: update_fields = add_update_fields(update_fields, "user")
is_update = self.id is not None
super().save(*args, priority=priority, update_fields=update_fields, **kwargs)
if is_update and self.user.local:
# remove all caches related to all editions of this book # remove all caches related to all editions of this book
cache.delete_many( cache.delete_many(
[ [
@ -111,7 +125,6 @@ class ShelfBook(CollectionItemMixin, BookWyrmModel):
for book in self.book.parent_work.editions.all() for book in self.book.parent_work.editions.all()
] ]
) )
super().save(*args, priority=priority, **kwargs)
def delete(self, *args, **kwargs): def delete(self, *args, **kwargs):
if self.id and self.user.local: if self.id and self.user.local:

Some files were not shown because too many files have changed in this diff Show more