diff --git a/.env.example b/.env.example index fb0f7308d..c61ceba1e 100644 --- a/.env.example +++ b/.env.example @@ -16,6 +16,11 @@ DEFAULT_LANGUAGE="English" ## Leave unset to allow all hosts # ALLOWED_HOSTS="localhost,127.0.0.1,[::1]" +# Specify when the site is served from a port that is not the default +# for the protocol (80 for HTTP or 443 for HTTPS). +# Probably only necessary in development. +# PORT=1333 + MEDIA_ROOT=images/ # Database configuration @@ -71,14 +76,20 @@ ENABLE_THUMBNAIL_GENERATION=true USE_S3=false AWS_ACCESS_KEY_ID= AWS_SECRET_ACCESS_KEY= +# seconds for signed S3 urls to expire +# this is currently only used for user export files +S3_SIGNED_URL_EXPIRY=900 # Commented are example values if you use a non-AWS, S3-compatible service # AWS S3 should work with only AWS_STORAGE_BUCKET_NAME and AWS_S3_REGION_NAME # non-AWS S3-compatible services will need AWS_STORAGE_BUCKET_NAME, -# along with both AWS_S3_CUSTOM_DOMAIN and AWS_S3_ENDPOINT_URL +# along with both AWS_S3_CUSTOM_DOMAIN and AWS_S3_ENDPOINT_URL. +# AWS_S3_URL_PROTOCOL must end in ":" and defaults to the same protocol as +# the BookWyrm instance ("http:" or "https:", based on USE_SSL). # AWS_STORAGE_BUCKET_NAME= # "example-bucket-name" # AWS_S3_CUSTOM_DOMAIN=None # "example-bucket-name.s3.fr-par.scw.cloud" +# AWS_S3_URL_PROTOCOL=None # "http:" # AWS_S3_REGION_NAME=None # "fr-par" # AWS_S3_ENDPOINT_URL=None # "https://s3.fr-par.scw.cloud" @@ -133,7 +144,14 @@ HTTP_X_FORWARDED_PROTO=false TWO_FACTOR_LOGIN_VALIDITY_WINDOW=2 TWO_FACTOR_LOGIN_MAX_SECONDS=60 -# Additional hosts to allow in the Content-Security-Policy, "self" (should be DOMAIN) -# and AWS_S3_CUSTOM_DOMAIN (if used) are added by default. -# Value should be a comma-separated list of host names. +# Additional hosts to allow in the Content-Security-Policy, "self" (should be +# DOMAIN with optionally ":" + PORT) and AWS_S3_CUSTOM_DOMAIN (if used) are +# added by default. Value should be a comma-separated list of host names. CSP_ADDITIONAL_HOSTS= + +# Time before being logged out (in seconds) +# SESSION_COOKIE_AGE=2592000 # current default: 30 days + +# Maximum allowed memory for file uploads (increase if users are having trouble +# uploading BookWyrm export files). +# DATA_UPLOAD_MAX_MEMORY_MiB=100 diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 000000000..99c92478d --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,78 @@ + + +## Are you finished? + +### Linters + + +- [ ] I have checked my code with `black`, `pylint`, and `mypy`, or `./bw-dev formatters` + +### Tests + + +- [ ] My changes do not need new tests +- [ ] All tests I have added are passing +- [ ] I have written tests but need help to make them pass +- [ ] I have not written tests and need help to write them + +## What type of Pull Request is this? + + +- [ ] Bug Fix +- [ ] Enhancement +- [ ] Plumbing / Internals / Dependencies +- [ ] Refactor + +## Does this PR change settings or dependencies, or break something? + + +- [ ] This PR changes or adds default settings, configuration, or .env values +- [ ] This PR changes or adds dependencies +- [ ] This PR introduces other breaking changes + +### Details of breaking or configuration changes (if any of above checked) + +## Description + + + +- Related Issue # +- Closes # + +## Documentation + + + + +- [ ] New or amended documentation will be required if this PR is merged +- [ ] I have created a matching pull request in the Documentation repository +- [ ] I intend to create a matching pull request in the Documentation repository after this PR is merged + diff --git a/.github/release.yml b/.github/release.yml new file mode 100644 index 000000000..3a347bf51 --- /dev/null +++ b/.github/release.yml @@ -0,0 +1,26 @@ +changelog: + exclude: + labels: + - ignore-for-release + categories: + - title: ‼️ Breaking Changes & New Settings ⚙️ + labels: + - breaking-change + - config-change + - title: Updated Dependencies 🧸 + labels: + - dependencies + - title: New Features 🎉 + labels: + - enhancement + - title: Bug Fixes 🐛 + labels: + - fix + - bug + - title: Internals/Plumbing 👩‍🔧 + - plumbing + - tests + - deployment + - title: Other Changes + labels: + - "*" diff --git a/.github/workflows/black.yml b/.github/workflows/black.yml deleted file mode 100644 index 4e7be4af3..000000000 --- a/.github/workflows/black.yml +++ /dev/null @@ -1,17 +0,0 @@ -name: Python Formatting (run ./bw-dev black to fix) - -on: - push: - branches: [ main ] - pull_request: - branches: [ main ] - -jobs: - lint: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - uses: actions/setup-python@v4 - - uses: psf/black@22.12.0 - with: - version: 22.12.0 diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 68bb05d7e..014745a52 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -36,11 +36,11 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v2 + uses: github/codeql-action/init@v3 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. @@ -51,7 +51,7 @@ jobs: # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild - uses: github/codeql-action/autobuild@v2 + uses: github/codeql-action/autobuild@v3 # ℹ️ Command-line programs to run using the OS shell. # 📚 https://git.io/JvXDl @@ -65,4 +65,4 @@ jobs: # make release - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 + uses: github/codeql-action/analyze@v3 diff --git a/.github/workflows/curlylint.yaml b/.github/workflows/curlylint.yaml index 8d5c6b4f7..10ad04ce1 100644 --- a/.github/workflows/curlylint.yaml +++ b/.github/workflows/curlylint.yaml @@ -10,7 +10,7 @@ jobs: lint: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Install curlylint run: pip install curlylint diff --git a/.github/workflows/django-tests.yml b/.github/workflows/django-tests.yml deleted file mode 100644 index 78b6e142e..000000000 --- a/.github/workflows/django-tests.yml +++ /dev/null @@ -1,70 +0,0 @@ -name: Run Python Tests -on: - push: - branches: [ main ] - pull_request: - branches: [ main ] - -jobs: - build: - - runs-on: ubuntu-20.04 - services: - postgres: - image: postgres:13 - env: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: hunter2 - options: >- - --health-cmd pg_isready - --health-interval 10s - --health-timeout 5s - --health-retries 5 - ports: - - 5432:5432 - steps: - - uses: actions/checkout@v3 - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: 3.9 - - name: Install Dependencies - run: | - python -m pip install --upgrade pip - pip install -r requirements.txt - - name: Check migrations up-to-date - run: | - python ./manage.py makemigrations --check - env: - SECRET_KEY: beepbeep - DOMAIN: your.domain.here - EMAIL_HOST: "" - EMAIL_HOST_USER: "" - EMAIL_HOST_PASSWORD: "" - - name: Run Tests - env: - SECRET_KEY: beepbeep - DEBUG: false - USE_HTTPS: true - DOMAIN: your.domain.here - BOOKWYRM_DATABASE_BACKEND: postgres - MEDIA_ROOT: images/ - POSTGRES_PASSWORD: hunter2 - POSTGRES_USER: postgres - POSTGRES_DB: github_actions - POSTGRES_HOST: 127.0.0.1 - CELERY_BROKER: "" - REDIS_BROKER_PORT: 6379 - REDIS_BROKER_PASSWORD: beep - USE_DUMMY_CACHE: true - FLOWER_PORT: 8888 - EMAIL_HOST: "smtp.mailgun.org" - EMAIL_PORT: 587 - EMAIL_HOST_USER: "" - EMAIL_HOST_PASSWORD: "" - EMAIL_USE_TLS: true - ENABLE_PREVIEW_IMAGES: false - ENABLE_THUMBNAIL_GENERATION: true - HTTP_X_FORWARDED_PROTO: false - run: | - pytest -n 3 diff --git a/.github/workflows/lint-frontend.yaml b/.github/workflows/lint-frontend.yaml index 0d0559e40..b0322f371 100644 --- a/.github/workflows/lint-frontend.yaml +++ b/.github/workflows/lint-frontend.yaml @@ -19,10 +19,11 @@ jobs: steps: # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it. - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Install modules - run: npm install stylelint stylelint-config-recommended stylelint-config-standard stylelint-order eslint + # run: npm install stylelint stylelint-config-recommended stylelint-config-standard stylelint-order eslint + run: npm install eslint@^8.9.0 # See .stylelintignore for files that are not linted. # - name: Run stylelint diff --git a/.github/workflows/mypy.yml b/.github/workflows/mypy.yml deleted file mode 100644 index 1a641edd2..000000000 --- a/.github/workflows/mypy.yml +++ /dev/null @@ -1,50 +0,0 @@ -name: Mypy - -on: - push: - branches: [ main ] - pull_request: - branches: [ main ] - -jobs: - build: - - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v3 - - name: Set up Python 3.9 - uses: actions/setup-python@v4 - with: - python-version: 3.9 - - name: Install Dependencies - run: | - python -m pip install --upgrade pip - pip install -r requirements.txt - - name: Analysing the code with mypy - env: - SECRET_KEY: beepbeep - DEBUG: false - USE_HTTPS: true - DOMAIN: your.domain.here - BOOKWYRM_DATABASE_BACKEND: postgres - MEDIA_ROOT: images/ - POSTGRES_PASSWORD: hunter2 - POSTGRES_USER: postgres - POSTGRES_DB: github_actions - POSTGRES_HOST: 127.0.0.1 - CELERY_BROKER: "" - REDIS_BROKER_PORT: 6379 - REDIS_BROKER_PASSWORD: beep - USE_DUMMY_CACHE: true - FLOWER_PORT: 8888 - EMAIL_HOST: "smtp.mailgun.org" - EMAIL_PORT: 587 - EMAIL_HOST_USER: "" - EMAIL_HOST_PASSWORD: "" - EMAIL_USE_TLS: true - ENABLE_PREVIEW_IMAGES: false - ENABLE_THUMBNAIL_GENERATION: true - HTTP_X_FORWARDED_PROTO: false - run: | - mypy bookwyrm celerywyrm diff --git a/.github/workflows/prettier.yaml b/.github/workflows/prettier.yaml index 501516ae1..9c05c7476 100644 --- a/.github/workflows/prettier.yaml +++ b/.github/workflows/prettier.yaml @@ -14,7 +14,7 @@ jobs: steps: # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it. - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Install modules run: npm install prettier@2.5.1 diff --git a/.github/workflows/pylint.yml b/.github/workflows/pylint.yml deleted file mode 100644 index 3811c97d3..000000000 --- a/.github/workflows/pylint.yml +++ /dev/null @@ -1,27 +0,0 @@ -name: Pylint - -on: - push: - branches: [ main ] - pull_request: - branches: [ main ] - -jobs: - build: - - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v3 - - name: Set up Python 3.9 - uses: actions/setup-python@v4 - with: - python-version: 3.9 - - name: Install Dependencies - run: | - python -m pip install --upgrade pip - pip install -r requirements.txt - - name: Analysing the code with pylint - run: | - pylint bookwyrm/ - diff --git a/.github/workflows/python.yml b/.github/workflows/python.yml new file mode 100644 index 000000000..01241b467 --- /dev/null +++ b/.github/workflows/python.yml @@ -0,0 +1,99 @@ +name: Python +on: + push: + branches: [ main ] + pull_request: + branches: [ main ] + +# overrides for .env.example +env: + POSTGRES_HOST: 127.0.0.1 + PGPORT: 5432 + POSTGRES_USER: postgres + POSTGRES_PASSWORD: hunter2 + POSTGRES_DB: github_actions + SECRET_KEY: beepbeep + EMAIL_HOST_USER: "" + EMAIL_HOST_PASSWORD: "" + +jobs: + pytest: + name: Tests (pytest) + runs-on: ubuntu-latest + services: + postgres: + image: postgres:13 + env: # does not inherit from jobs.build.env + POSTGRES_USER: postgres + POSTGRES_PASSWORD: hunter2 + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 5432:5432 + steps: + - uses: actions/checkout@v4 + - name: Set up Python 3.11 + uses: actions/setup-python@v5 + with: + python-version: 3.11 + cache: pip + - name: Install Dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements.txt + pip install pytest-github-actions-annotate-failures + - name: Set up .env + run: cp .env.example .env + - name: Check migrations up-to-date + run: python ./manage.py makemigrations --check + - name: Run Tests + run: pytest -n 3 + + pylint: + name: Linting (pylint) + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Set up Python 3.11 + uses: actions/setup-python@v5 + with: + python-version: 3.11 + cache: pip + - name: Install Dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements.txt + - name: Analyse code with pylint + run: pylint bookwyrm/ + + mypy: + name: Typing (mypy) + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Set up Python 3.11 + uses: actions/setup-python@v5 + with: + python-version: 3.11 + cache: pip + - name: Install Dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements.txt + - name: Set up .env + run: cp .env.example .env + - name: Analyse code with mypy + run: mypy bookwyrm celerywyrm + + black: + name: Formatting (black; run ./bw-dev black to fix) + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + - uses: psf/black@stable + with: + version: "22.*" diff --git a/.gitignore b/.gitignore index ec2a08f80..fd6cc7547 100644 --- a/.gitignore +++ b/.gitignore @@ -16,6 +16,8 @@ # BookWyrm .env /images/ +/exports/ +/static/ bookwyrm/static/css/bookwyrm.css bookwyrm/static/css/themes/ !bookwyrm/static/css/themes/bookwyrm-*.scss @@ -36,3 +38,6 @@ nginx/default.conf #macOS **/.DS_Store + +# Docker +docker-compose.override.yml diff --git a/Dockerfile b/Dockerfile index b3cd26e88..82b0c92c5 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.9 +FROM python:3.11 ENV PYTHONUNBUFFERED 1 diff --git a/README.md b/README.md index f8b2eb1f6..7e27d44e6 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,6 @@ BookWyrm is a social network for tracking your reading, talking about books, wri ## Links [![Mastodon Follow](https://img.shields.io/mastodon/follow/000146121?domain=https%3A%2F%2Ftech.lgbt&style=social)](https://tech.lgbt/@bookwyrm) -[![Twitter Follow](https://img.shields.io/twitter/follow/BookWyrmSocial?style=social)](https://twitter.com/BookWyrmSocial) - [Project homepage](https://joinbookwyrm.com/) - [Support](https://patreon.com/bookwyrm) diff --git a/VERSION b/VERSION index faef31a43..f38fc5393 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -0.7.0 +0.7.3 diff --git a/bookwyrm/activitypub/base_activity.py b/bookwyrm/activitypub/base_activity.py index aa4b5b687..dc4b8f6ae 100644 --- a/bookwyrm/activitypub/base_activity.py +++ b/bookwyrm/activitypub/base_activity.py @@ -20,6 +20,7 @@ from bookwyrm.tasks import app, MISC logger = logging.getLogger(__name__) +# pylint: disable=invalid-name TBookWyrmModel = TypeVar("TBookWyrmModel", bound=base_model.BookWyrmModel) @@ -236,7 +237,7 @@ class ActivityObject: omit = kwargs.get("omit", ()) data = self.__dict__.copy() # recursively serialize - for (k, v) in data.items(): + for k, v in data.items(): try: if issubclass(type(v), ActivityObject): data[k] = v.serialize() @@ -249,7 +250,10 @@ class ActivityObject: pass data = {k: v for (k, v) in data.items() if v is not None and k not in omit} if "@context" not in omit: - data["@context"] = "https://www.w3.org/ns/activitystreams" + data["@context"] = [ + "https://www.w3.org/ns/activitystreams", + {"Hashtag": "as:Hashtag"}, + ] return data @@ -397,18 +401,14 @@ def resolve_remote_id( def get_representative(): """Get or create an actor representing the instance to sign outgoing HTTP GET requests""" - username = f"{INSTANCE_ACTOR_USERNAME}@{DOMAIN}" - email = "bookwyrm@localhost" - try: - user = models.User.objects.get(username=username) - except models.User.DoesNotExist: - user = models.User.objects.create_user( - username=username, - email=email, - local=True, - localname=INSTANCE_ACTOR_USERNAME, - ) - return user + return models.User.objects.get_or_create( + username=f"{INSTANCE_ACTOR_USERNAME}@{DOMAIN}", + defaults={ + "email": "bookwyrm@localhost", + "local": True, + "localname": INSTANCE_ACTOR_USERNAME, + }, + )[0] def get_activitypub_data(url): @@ -427,6 +427,7 @@ def get_activitypub_data(url): "Date": now, "Signature": make_signature("get", sender, url, now), }, + timeout=15, ) except requests.RequestException: raise ConnectorException() diff --git a/bookwyrm/activitypub/person.py b/bookwyrm/activitypub/person.py index 85cf44409..dfec92e4c 100644 --- a/bookwyrm/activitypub/person.py +++ b/bookwyrm/activitypub/person.py @@ -1,5 +1,5 @@ """ actor serializer """ -from dataclasses import dataclass, field +from dataclasses import dataclass from typing import Dict from .base_activity import ActivityObject @@ -35,7 +35,7 @@ class Person(ActivityObject): endpoints: Dict = None name: str = None summary: str = None - icon: Image = field(default_factory=lambda: {}) + icon: Image = None bookwyrmUser: bool = False manuallyApprovesFollowers: str = False discoverable: str = False diff --git a/bookwyrm/activitypub/verbs.py b/bookwyrm/activitypub/verbs.py index 00c9524fe..a365f4cc0 100644 --- a/bookwyrm/activitypub/verbs.py +++ b/bookwyrm/activitypub/verbs.py @@ -171,9 +171,19 @@ class Reject(Verb): type: str = "Reject" def action(self, allow_external_connections=True): - """reject a follow request""" - obj = self.object.to_model(save=False, allow_create=False) - obj.reject() + """reject a follow or follow request""" + + for model_name in ["UserFollowRequest", "UserFollows", None]: + model = apps.get_model(f"bookwyrm.{model_name}") if model_name else None + if obj := self.object.to_model( + model=model, + save=False, + allow_create=False, + allow_external_connections=allow_external_connections, + ): + # Reject the first model that can be built. + obj.reject() + break @dataclass(init=False) diff --git a/bookwyrm/activitystreams.py b/bookwyrm/activitystreams.py index 42f99e209..0009ac7a3 100644 --- a/bookwyrm/activitystreams.py +++ b/bookwyrm/activitystreams.py @@ -139,14 +139,14 @@ class ActivityStream(RedisStore): | ( Q(following=status.user) & Q(following=status.reply_parent.user) ) # if the user is following both authors - ).distinct() + ) # only visible to the poster's followers and tagged users elif status.privacy == "followers": audience = audience.filter( Q(following=status.user) # if the user is following the author ) - return audience.distinct() + return audience.distinct("id") @tracer.start_as_current_span("ActivityStream.get_audience") def get_audience(self, status): @@ -156,7 +156,7 @@ class ActivityStream(RedisStore): status_author = models.User.objects.filter( is_active=True, local=True, id=status.user.id ).values_list("id", flat=True) - return list(set(list(audience) + list(status_author))) + return list(set(audience) | set(status_author)) def get_stores_for_users(self, user_ids): """convert a list of user ids into redis store ids""" @@ -183,15 +183,13 @@ class HomeStream(ActivityStream): def get_audience(self, status): trace.get_current_span().set_attribute("stream_id", self.key) audience = super()._get_audience(status) - if not audience: - return [] # if the user is following the author audience = audience.filter(following=status.user).values_list("id", flat=True) # if the user is the post's author status_author = models.User.objects.filter( is_active=True, local=True, id=status.user.id ).values_list("id", flat=True) - return list(set(list(audience) + list(status_author))) + return list(set(audience) | set(status_author)) def get_statuses_for_user(self, user): return models.Status.privacy_filter( @@ -239,9 +237,7 @@ class BooksStream(ActivityStream): ) audience = super()._get_audience(status) - if not audience: - return models.User.objects.none() - return audience.filter(shelfbook__book__parent_work=work).distinct() + return audience.filter(shelfbook__book__parent_work=work) def get_audience(self, status): # only show public statuses on the books feed, diff --git a/bookwyrm/apps.py b/bookwyrm/apps.py index b0c3e3fa4..41b1a17a2 100644 --- a/bookwyrm/apps.py +++ b/bookwyrm/apps.py @@ -1,4 +1,5 @@ """Do further startup configuration and initialization""" + import os import urllib import logging @@ -14,16 +15,16 @@ def download_file(url, destination): """Downloads a file to the given path""" try: # Ensure our destination directory exists - os.makedirs(os.path.dirname(destination)) + os.makedirs(os.path.dirname(destination), exist_ok=True) with urllib.request.urlopen(url) as stream: with open(destination, "b+w") as outfile: outfile.write(stream.read()) - except (urllib.error.HTTPError, urllib.error.URLError): - logger.info("Failed to download file %s", url) - except OSError: - logger.info("Couldn't open font file %s for writing", destination) - except: # pylint: disable=bare-except - logger.info("Unknown error in file download") + except (urllib.error.HTTPError, urllib.error.URLError) as err: + logger.error("Failed to download file %s: %s", url, err) + except OSError as err: + logger.error("Couldn't open font file %s for writing: %s", destination, err) + except Exception as err: # pylint:disable=broad-except + logger.error("Unknown error in file download: %s", err) class BookwyrmConfig(AppConfig): diff --git a/bookwyrm/book_search.py b/bookwyrm/book_search.py index ceb228f40..cf48f4832 100644 --- a/bookwyrm/book_search.py +++ b/bookwyrm/book_search.py @@ -43,6 +43,7 @@ def search( min_confidence: float = 0, filters: Optional[list[Any]] = None, return_first: bool = False, + books: Optional[QuerySet[models.Edition]] = None, ) -> Union[Optional[models.Edition], QuerySet[models.Edition]]: """search your local database""" filters = filters or [] @@ -54,13 +55,15 @@ def search( # first, try searching unique identifiers # unique identifiers never have spaces, title/author usually do if not " " in query: - results = search_identifiers(query, *filters, return_first=return_first) + results = search_identifiers( + query, *filters, return_first=return_first, books=books + ) # if there were no identifier results... if not results: # then try searching title/author results = search_title_author( - query, min_confidence, *filters, return_first=return_first + query, min_confidence, *filters, return_first=return_first, books=books ) return results @@ -98,9 +101,17 @@ def format_search_result(search_result): def search_identifiers( - query, *filters, return_first=False + query, + *filters, + return_first=False, + books=None, ) -> Union[Optional[models.Edition], QuerySet[models.Edition]]: - """tries remote_id, isbn; defined as dedupe fields on the model""" + """search Editions by deduplication fields + + Best for cases when we can assume someone is searching for an exact match on + commonly unique data identifiers like isbn or specific library ids. + """ + books = books or models.Edition.objects if connectors.maybe_isbn(query): # Oh did you think the 'S' in ISBN stood for 'standard'? normalized_isbn = query.strip().upper().rjust(10, "0") @@ -111,7 +122,7 @@ def search_identifiers( for f in models.Edition._meta.get_fields() if hasattr(f, "deduplication_field") and f.deduplication_field ] - results = models.Edition.objects.filter( + results = books.filter( *filters, reduce(operator.or_, (Q(**f) for f in or_filters)) ).distinct() @@ -121,12 +132,17 @@ def search_identifiers( def search_title_author( - query, min_confidence, *filters, return_first=False + query, + min_confidence, + *filters, + return_first=False, + books=None, ) -> QuerySet[models.Edition]: """searches for title and author""" + books = books or models.Edition.objects query = SearchQuery(query, config="simple") | SearchQuery(query, config="english") results = ( - models.Edition.objects.filter(*filters, search_vector=query) + books.filter(*filters, search_vector=query) .annotate(rank=SearchRank(F("search_vector"), query)) .filter(rank__gt=min_confidence) .order_by("-rank") @@ -137,7 +153,7 @@ def search_title_author( # filter out multiple editions of the same work list_results = [] - for work_id in set(editions_of_work[:30]): + for work_id in editions_of_work[:30]: result = ( results.filter(parent_work=work_id) .order_by("-rank", "-edition_rank") diff --git a/bookwyrm/connectors/abstract_connector.py b/bookwyrm/connectors/abstract_connector.py index 8b6dcb885..aa8edbeae 100644 --- a/bookwyrm/connectors/abstract_connector.py +++ b/bookwyrm/connectors/abstract_connector.py @@ -3,7 +3,9 @@ from __future__ import annotations from abc import ABC, abstractmethod from typing import Optional, TypedDict, Any, Callable, Union, Iterator from urllib.parse import quote_plus -import imghdr + +# pylint: disable-next=deprecated-module +import imghdr # Deprecated in 3.11 for removal in 3.13; no good alternative yet import logging import re import asyncio diff --git a/bookwyrm/connectors/connector_manager.py b/bookwyrm/connectors/connector_manager.py index 444a626ba..ad68af1dc 100644 --- a/bookwyrm/connectors/connector_manager.py +++ b/bookwyrm/connectors/connector_manager.py @@ -118,9 +118,11 @@ def get_connectors() -> Iterator[abstract_connector.AbstractConnector]: def get_or_create_connector(remote_id: str) -> abstract_connector.AbstractConnector: """get the connector related to the object's server""" url = urlparse(remote_id) - identifier = url.netloc + identifier = url.hostname if not identifier: - raise ValueError("Invalid remote id") + raise ValueError(f"Invalid remote id: {remote_id}") + + base_url = f"{url.scheme}://{url.netloc}" try: connector_info = models.Connector.objects.get(identifier=identifier) @@ -128,10 +130,10 @@ def get_or_create_connector(remote_id: str) -> abstract_connector.AbstractConnec connector_info = models.Connector.objects.create( identifier=identifier, connector_file="bookwyrm_connector", - base_url=f"https://{identifier}", - books_url=f"https://{identifier}/book", - covers_url=f"https://{identifier}/images/covers", - search_url=f"https://{identifier}/search?q=", + base_url=base_url, + books_url=f"{base_url}/book", + covers_url=f"{base_url}/images/covers", + search_url=f"{base_url}/search?q=", priority=2, ) @@ -143,7 +145,9 @@ def load_more_data(connector_id: str, book_id: str) -> None: """background the work of getting all 10,000 editions of LoTR""" connector_info = models.Connector.objects.get(id=connector_id) connector = load_connector(connector_info) - book = models.Book.objects.select_subclasses().get(id=book_id) + book = models.Book.objects.select_subclasses().get( # type: ignore[no-untyped-call] + id=book_id + ) connector.expand_book_data(book) @@ -154,7 +158,9 @@ def create_edition_task( """separate task for each of the 10,000 editions of LoTR""" connector_info = models.Connector.objects.get(id=connector_id) connector = load_connector(connector_info) - work = models.Work.objects.select_subclasses().get(id=work_id) + work = models.Work.objects.select_subclasses().get( # type: ignore[no-untyped-call] + id=work_id + ) connector.create_edition_from_data(work, data) @@ -188,8 +194,11 @@ def raise_not_valid_url(url: str) -> None: if not parsed.scheme in ["http", "https"]: raise ConnectorException("Invalid scheme: ", url) + if not parsed.hostname: + raise ConnectorException("Hostname missing: ", url) + try: - ipaddress.ip_address(parsed.netloc) + ipaddress.ip_address(parsed.hostname) raise ConnectorException("Provided url is an IP address: ", url) except ValueError: # it's not an IP address, which is good diff --git a/bookwyrm/connectors/inventaire.py b/bookwyrm/connectors/inventaire.py index c08bcdee1..249f6b9ca 100644 --- a/bookwyrm/connectors/inventaire.py +++ b/bookwyrm/connectors/inventaire.py @@ -229,7 +229,7 @@ class Connector(AbstractConnector): data = get_data(url) except ConnectorException: return "" - return data.get("extract", "") + return str(data.get("extract", "")) def get_remote_id_from_model(self, obj: models.BookDataModel) -> str: """use get_remote_id to figure out the link from a model obj""" diff --git a/bookwyrm/emailing.py b/bookwyrm/emailing.py index 5e08ebba1..ccc0aea61 100644 --- a/bookwyrm/emailing.py +++ b/bookwyrm/emailing.py @@ -4,7 +4,7 @@ from django.template.loader import get_template from bookwyrm import models, settings from bookwyrm.tasks import app, EMAIL -from bookwyrm.settings import DOMAIN +from bookwyrm.settings import DOMAIN, BASE_URL def email_data(): @@ -14,6 +14,7 @@ def email_data(): "site_name": site.name, "logo": site.logo_small_url, "domain": DOMAIN, + "base_url": BASE_URL, "user": None, } diff --git a/bookwyrm/forms/author.py b/bookwyrm/forms/author.py index 5b54a07b5..a3a759af7 100644 --- a/bookwyrm/forms/author.py +++ b/bookwyrm/forms/author.py @@ -15,6 +15,7 @@ class AuthorForm(CustomForm): "aliases", "bio", "wikipedia_link", + "wikidata", "website", "born", "died", @@ -32,6 +33,7 @@ class AuthorForm(CustomForm): "wikipedia_link": forms.TextInput( attrs={"aria-describedby": "desc_wikipedia_link"} ), + "wikidata": forms.TextInput(attrs={"aria-describedby": "desc_wikidata"}), "website": forms.TextInput(attrs={"aria-describedby": "desc_website"}), "born": forms.SelectDateWidget(attrs={"aria-describedby": "desc_born"}), "died": forms.SelectDateWidget(attrs={"aria-describedby": "desc_died"}), diff --git a/bookwyrm/forms/books.py b/bookwyrm/forms/books.py index 4885dc063..f73ce3f5a 100644 --- a/bookwyrm/forms/books.py +++ b/bookwyrm/forms/books.py @@ -1,8 +1,9 @@ """ using django model forms """ from django import forms +from file_resubmit.widgets import ResubmitImageWidget + from bookwyrm import models -from bookwyrm.models.fields import ClearableFileInputWithWarning from .custom_form import CustomForm from .widgets import ArrayWidget, SelectDateWidget, Select @@ -70,9 +71,7 @@ class EditionForm(CustomForm): "published_date": SelectDateWidget( attrs={"aria-describedby": "desc_published_date"} ), - "cover": ClearableFileInputWithWarning( - attrs={"aria-describedby": "desc_cover"} - ), + "cover": ResubmitImageWidget(attrs={"aria-describedby": "desc_cover"}), "physical_format": Select( attrs={"aria-describedby": "desc_physical_format"} ), diff --git a/bookwyrm/forms/forms.py b/bookwyrm/forms/forms.py index ea6093750..3d555f308 100644 --- a/bookwyrm/forms/forms.py +++ b/bookwyrm/forms/forms.py @@ -25,6 +25,10 @@ class ImportForm(forms.Form): csv_file = forms.FileField() +class ImportUserForm(forms.Form): + archive_file = forms.FileField() + + class ShelfForm(CustomForm): class Meta: model = models.Shelf diff --git a/bookwyrm/forms/links.py b/bookwyrm/forms/links.py index d2fd5f116..5156d2578 100644 --- a/bookwyrm/forms/links.py +++ b/bookwyrm/forms/links.py @@ -1,4 +1,5 @@ """ using django model forms """ + from urllib.parse import urlparse from django.utils.translation import gettext_lazy as _ @@ -25,7 +26,7 @@ class FileLinkForm(CustomForm): url = cleaned_data.get("url") filetype = cleaned_data.get("filetype") book = cleaned_data.get("book") - domain = urlparse(url).netloc + domain = urlparse(url).hostname if models.LinkDomain.objects.filter(domain=domain).exists(): status = models.LinkDomain.objects.get(domain=domain).status if status == "blocked": @@ -37,10 +38,9 @@ class FileLinkForm(CustomForm): ), ) if ( - not self.instance - and models.FileLink.objects.filter( - url=url, book=book, filetype=filetype - ).exists() + models.FileLink.objects.filter(url=url, book=book, filetype=filetype) + .exclude(pk=self.instance) + .exists() ): # pylint: disable=line-too-long self.add_error( diff --git a/bookwyrm/importers/__init__.py b/bookwyrm/importers/__init__.py index daf9bae62..8e92872f2 100644 --- a/bookwyrm/importers/__init__.py +++ b/bookwyrm/importers/__init__.py @@ -1,7 +1,7 @@ """ import classes """ from .importer import Importer -from .bookwyrm_import import BookwyrmBooksImporter +from .bookwyrm_import import BookwyrmImporter from .calibre_import import CalibreImporter from .goodreads_import import GoodreadsImporter from .librarything_import import LibrarythingImporter diff --git a/bookwyrm/importers/bookwyrm_import.py b/bookwyrm/importers/bookwyrm_import.py index ed25166f9..206cd6219 100644 --- a/bookwyrm/importers/bookwyrm_import.py +++ b/bookwyrm/importers/bookwyrm_import.py @@ -1,14 +1,24 @@ -""" handle reading a csv from BookWyrm """ +"""Import data from Bookwyrm export files""" +from django.http import QueryDict -from typing import Any -from . import Importer +from bookwyrm.models import User +from bookwyrm.models.bookwyrm_import_job import BookwyrmImportJob -class BookwyrmBooksImporter(Importer): - """Goodreads is the default importer, we basically just use the same structure""" +class BookwyrmImporter: + """Import a Bookwyrm User export file. + This is kind of a combination of an importer and a connector. + """ - service = "BookWyrm" + # pylint: disable=no-self-use + def process_import( + self, user: User, archive_file: bytes, settings: QueryDict + ) -> BookwyrmImportJob: + """import user data from a Bookwyrm export file""" - def __init__(self, *args: Any, **kwargs: Any): - self.row_mappings_guesses.append(("shelf_name", ["shelf_name"])) - super().__init__(*args, **kwargs) + required = [k for k in settings if settings.get(k) == "on"] + + job = BookwyrmImportJob.objects.create( + user=user, archive_file=archive_file, required=required + ) + return job diff --git a/bookwyrm/importers/calibre_import.py b/bookwyrm/importers/calibre_import.py index 5c22a539d..542175dd7 100644 --- a/bookwyrm/importers/calibre_import.py +++ b/bookwyrm/importers/calibre_import.py @@ -14,15 +14,10 @@ class CalibreImporter(Importer): def __init__(self, *args: Any, **kwargs: Any): # Add timestamp to row_mappings_guesses for date_added to avoid # integrity error - row_mappings_guesses = [] - - for field, mapping in self.row_mappings_guesses: - if field in ("date_added",): - row_mappings_guesses.append((field, mapping + ["timestamp"])) - else: - row_mappings_guesses.append((field, mapping)) - - self.row_mappings_guesses = row_mappings_guesses + self.row_mappings_guesses = [ + (field, mapping + (["timestamp"] if field == "date_added" else [])) + for field, mapping in self.row_mappings_guesses + ] super().__init__(*args, **kwargs) def get_shelf(self, normalized_row: dict[str, Optional[str]]) -> Optional[str]: diff --git a/bookwyrm/isbn/isbn.py b/bookwyrm/isbn/isbn.py index 56062ff7b..d14dc2619 100644 --- a/bookwyrm/isbn/isbn.py +++ b/bookwyrm/isbn/isbn.py @@ -26,7 +26,7 @@ class IsbnHyphenator: def update_range_message(self) -> None: """Download the range message xml file and save it locally""" - response = requests.get(self.__range_message_url) + response = requests.get(self.__range_message_url, timeout=15) with open(self.__range_file_path, "w", encoding="utf-8") as file: file.write(response.text) self.__element_tree = None diff --git a/bookwyrm/management/commands/deduplicate_book_data.py b/bookwyrm/management/commands/deduplicate_book_data.py index dde7d133c..c2d897ce3 100644 --- a/bookwyrm/management/commands/deduplicate_book_data.py +++ b/bookwyrm/management/commands/deduplicate_book_data.py @@ -1,13 +1,14 @@ """ PROCEED WITH CAUTION: uses deduplication fields to permanently merge book data objects """ + from django.core.management.base import BaseCommand from django.db.models import Count from bookwyrm import models -from bookwyrm.management.merge import merge_objects -def dedupe_model(model): +def dedupe_model(model, dry_run=False): """combine duplicate editions and update related models""" + print(f"deduplicating {model.__name__}:") fields = model._meta.get_fields() dedupe_fields = [ f for f in fields if hasattr(f, "deduplication_field") and f.deduplication_field @@ -16,30 +17,42 @@ def dedupe_model(model): dupes = ( model.objects.values(field.name) .annotate(Count(field.name)) - .filter(**{"%s__count__gt" % field.name: 1}) + .filter(**{f"{field.name}__count__gt": 1}) + .exclude(**{field.name: ""}) + .exclude(**{f"{field.name}__isnull": True}) ) for dupe in dupes: value = dupe[field.name] - if not value or value == "": - continue print("----------") - print(dupe) objs = model.objects.filter(**{field.name: value}).order_by("id") canonical = objs.first() - print("keeping", canonical.remote_id) + action = "would merge" if dry_run else "merging" + print( + f"{action} into {model.__name__} {canonical.remote_id} based on {field.name} {value}:" + ) for obj in objs[1:]: - print(obj.remote_id) - merge_objects(canonical, obj) + print(f"- {obj.remote_id}") + absorbed_fields = obj.merge_into(canonical, dry_run=dry_run) + print(f" absorbed fields: {absorbed_fields}") class Command(BaseCommand): """deduplicate allllll the book data models""" help = "merges duplicate book data" + + def add_arguments(self, parser): + """add the arguments for this command""" + parser.add_argument( + "--dry_run", + action="store_true", + help="don't actually merge, only print what would happen", + ) + # pylint: disable=no-self-use,unused-argument def handle(self, *args, **options): """run deduplications""" - dedupe_model(models.Edition) - dedupe_model(models.Work) - dedupe_model(models.Author) + dedupe_model(models.Edition, dry_run=options["dry_run"]) + dedupe_model(models.Work, dry_run=options["dry_run"]) + dedupe_model(models.Author, dry_run=options["dry_run"]) diff --git a/bookwyrm/management/commands/erase_deleted_user_data.py b/bookwyrm/management/commands/erase_deleted_user_data.py new file mode 100644 index 000000000..40c3f042b --- /dev/null +++ b/bookwyrm/management/commands/erase_deleted_user_data.py @@ -0,0 +1,43 @@ +""" Erase any data stored about deleted users """ +import sys +from django.core.management.base import BaseCommand, CommandError +from bookwyrm import models +from bookwyrm.models.user import erase_user_data + +# pylint: disable=missing-function-docstring +class Command(BaseCommand): + """command-line options""" + + help = "Remove Two Factor Authorisation from user" + + def add_arguments(self, parser): # pylint: disable=no-self-use + parser.add_argument( + "--dryrun", + action="store_true", + help="Preview users to be cleared without altering the database", + ) + + def handle(self, *args, **options): # pylint: disable=unused-argument + + # Check for anything fishy + bad_state = models.User.objects.filter(is_deleted=True, is_active=True) + if bad_state.exists(): + raise CommandError( + f"{bad_state.count()} user(s) marked as both active and deleted" + ) + + deleted_users = models.User.objects.filter(is_deleted=True) + self.stdout.write(f"Found {deleted_users.count()} deleted users") + if options["dryrun"]: + self.stdout.write("\n".join(u.username for u in deleted_users[:5])) + if deleted_users.count() > 5: + self.stdout.write("... and more") + sys.exit() + + self.stdout.write("Erasing user data:") + for user_id in deleted_users.values_list("id", flat=True): + erase_user_data.delay(user_id) + self.stdout.write(".", ending="") + + self.stdout.write("") + self.stdout.write("Tasks created successfully") diff --git a/bookwyrm/management/commands/instance_version.py b/bookwyrm/management/commands/instance_version.py deleted file mode 100644 index ca150d640..000000000 --- a/bookwyrm/management/commands/instance_version.py +++ /dev/null @@ -1,54 +0,0 @@ -""" Get your admin code to allow install """ -from django.core.management.base import BaseCommand - -from bookwyrm import models -from bookwyrm.settings import VERSION - - -# pylint: disable=no-self-use -class Command(BaseCommand): - """command-line options""" - - help = "What version is this?" - - def add_arguments(self, parser): - """specify which function to run""" - parser.add_argument( - "--current", - action="store_true", - help="Version stored in database", - ) - parser.add_argument( - "--target", - action="store_true", - help="Version stored in settings", - ) - parser.add_argument( - "--update", - action="store_true", - help="Update database version", - ) - - # pylint: disable=unused-argument - def handle(self, *args, **options): - """execute init""" - site = models.SiteSettings.objects.get() - current = site.version or "0.0.1" - target = VERSION - if options.get("current"): - print(current) - return - - if options.get("target"): - print(target) - return - - if options.get("update"): - site.version = target - site.save() - return - - if current != target: - print(f"{current}/{target}") - else: - print(current) diff --git a/bookwyrm/management/merge.py b/bookwyrm/management/merge.py deleted file mode 100644 index f55229f18..000000000 --- a/bookwyrm/management/merge.py +++ /dev/null @@ -1,50 +0,0 @@ -from django.db.models import ManyToManyField - - -def update_related(canonical, obj): - """update all the models with fk to the object being removed""" - # move related models to canonical - related_models = [ - (r.remote_field.name, r.related_model) for r in canonical._meta.related_objects - ] - for (related_field, related_model) in related_models: - # Skip the ManyToMany fields that aren’t auto-created. These - # should have a corresponding OneToMany field in the model for - # the linking table anyway. If we update it through that model - # instead then we won’t lose the extra fields in the linking - # table. - related_field_obj = related_model._meta.get_field(related_field) - if isinstance(related_field_obj, ManyToManyField): - through = related_field_obj.remote_field.through - if not through._meta.auto_created: - continue - related_objs = related_model.objects.filter(**{related_field: obj}) - for related_obj in related_objs: - print("replacing in", related_model.__name__, related_field, related_obj.id) - try: - setattr(related_obj, related_field, canonical) - related_obj.save() - except TypeError: - getattr(related_obj, related_field).add(canonical) - getattr(related_obj, related_field).remove(obj) - - -def copy_data(canonical, obj): - """try to get the most data possible""" - for data_field in obj._meta.get_fields(): - if not hasattr(data_field, "activitypub_field"): - continue - data_value = getattr(obj, data_field.name) - if not data_value: - continue - if not getattr(canonical, data_field.name): - print("setting data field", data_field.name, data_value) - setattr(canonical, data_field.name, data_value) - canonical.save() - - -def merge_objects(canonical, obj): - copy_data(canonical, obj) - update_related(canonical, obj) - # remove the outdated entry - obj.delete() diff --git a/bookwyrm/management/merge_command.py b/bookwyrm/management/merge_command.py index 805dc73fa..66e60814a 100644 --- a/bookwyrm/management/merge_command.py +++ b/bookwyrm/management/merge_command.py @@ -1,4 +1,3 @@ -from bookwyrm.management.merge import merge_objects from django.core.management.base import BaseCommand @@ -9,6 +8,11 @@ class MergeCommand(BaseCommand): """add the arguments for this command""" parser.add_argument("--canonical", type=int, required=True) parser.add_argument("--other", type=int, required=True) + parser.add_argument( + "--dry_run", + action="store_true", + help="don't actually merge, only print what would happen", + ) # pylint: disable=no-self-use,unused-argument def handle(self, *args, **options): @@ -26,4 +30,8 @@ class MergeCommand(BaseCommand): print("other book doesn’t exist!") return - merge_objects(canonical, other) + absorbed_fields = other.merge_into(canonical, dry_run=options["dry_run"]) + + action = "would be" if options["dry_run"] else "has been" + print(f"{other.remote_id} {action} merged into {canonical.remote_id}") + print(f"absorbed fields: {absorbed_fields}") diff --git a/bookwyrm/middleware/__init__.py b/bookwyrm/middleware/__init__.py index 03843c5a3..85c3a56fe 100644 --- a/bookwyrm/middleware/__init__.py +++ b/bookwyrm/middleware/__init__.py @@ -1,3 +1,4 @@ """ look at all this nice middleware! """ from .timezone_middleware import TimezoneMiddleware from .ip_middleware import IPBlocklistMiddleware +from .file_too_big import FileTooBig diff --git a/bookwyrm/middleware/file_too_big.py b/bookwyrm/middleware/file_too_big.py new file mode 100644 index 000000000..de1349d96 --- /dev/null +++ b/bookwyrm/middleware/file_too_big.py @@ -0,0 +1,30 @@ +"""Middleware to display a custom 413 error page""" + +from django.http import HttpResponse +from django.shortcuts import render +from django.core.exceptions import RequestDataTooBig + + +class FileTooBig: + """Middleware to display a custom page when a + RequestDataTooBig exception is thrown""" + + def __init__(self, get_response): + """boilerplate __init__ from Django docs""" + + self.get_response = get_response + + def __call__(self, request): + """If RequestDataTooBig is thrown, render the 413 error page""" + + try: + body = request.body # pylint: disable=unused-variable + + except RequestDataTooBig: + + rendered = render(request, "413.html") + response = HttpResponse(rendered) + return response + + response = self.get_response(request) + return response diff --git a/bookwyrm/middleware/timezone_middleware.py b/bookwyrm/middleware/timezone_middleware.py index 5033397a5..3cf084154 100644 --- a/bookwyrm/middleware/timezone_middleware.py +++ b/bookwyrm/middleware/timezone_middleware.py @@ -1,5 +1,5 @@ """ Makes the app aware of the users timezone """ -import pytz +import zoneinfo from django.utils import timezone @@ -12,9 +12,7 @@ class TimezoneMiddleware: def __call__(self, request): if request.user.is_authenticated: - timezone.activate(pytz.timezone(request.user.preferred_timezone)) + timezone.activate(zoneinfo.ZoneInfo(request.user.preferred_timezone)) else: - timezone.activate(pytz.utc) - response = self.get_response(request) - timezone.deactivate() - return response + timezone.deactivate() + return self.get_response(request) diff --git a/bookwyrm/migrations/0171_alter_user_preferred_timezone.py b/bookwyrm/migrations/0171_alter_user_preferred_timezone.py index 7dcd9546c..8d1dff553 100644 --- a/bookwyrm/migrations/0171_alter_user_preferred_timezone.py +++ b/bookwyrm/migrations/0171_alter_user_preferred_timezone.py @@ -10,6 +10,7 @@ class Migration(migrations.Migration): ] operations = [ + # The new timezones are "Factory" and "localtime" migrations.AlterField( model_name="user", name="preferred_timezone", diff --git a/bookwyrm/migrations/0184_auto_20231106_0421.py b/bookwyrm/migrations/0184_auto_20231106_0421.py index e8197dea1..23bacc502 100644 --- a/bookwyrm/migrations/0184_auto_20231106_0421.py +++ b/bookwyrm/migrations/0184_auto_20231106_0421.py @@ -22,17 +22,6 @@ def update_deleted_users(apps, schema_editor): ).update(is_deleted=True) -def erase_deleted_user_data(apps, schema_editor): - """Retroactively clear user data""" - for user in User.objects.filter(is_deleted=True): - user.erase_user_data() - user.save( - broadcast=False, - update_fields=["email", "avatar", "preview_image", "summary", "name"], - ) - user.erase_user_statuses(broadcast=False) - - class Migration(migrations.Migration): dependencies = [ @@ -43,7 +32,4 @@ class Migration(migrations.Migration): migrations.RunPython( update_deleted_users, reverse_code=migrations.RunPython.noop ), - migrations.RunPython( - erase_deleted_user_data, reverse_code=migrations.RunPython.noop - ), ] diff --git a/bookwyrm/migrations/0186_auto_20231116_0048.py b/bookwyrm/migrations/0186_auto_20231116_0048.py new file mode 100644 index 000000000..e3b9da4fe --- /dev/null +++ b/bookwyrm/migrations/0186_auto_20231116_0048.py @@ -0,0 +1,212 @@ +# Generated by Django 3.2.20 on 2023-11-16 00:48 + +from django.conf import settings +import django.contrib.postgres.fields +from django.db import migrations, models +import django.db.models.deletion +import django.utils.timezone + + +class Migration(migrations.Migration): + + dependencies = [ + ("bookwyrm", "0185_alter_notification_notification_type"), + ] + + operations = [ + migrations.CreateModel( + name="ParentJob", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("task_id", models.UUIDField(blank=True, null=True, unique=True)), + ( + "created_date", + models.DateTimeField(default=django.utils.timezone.now), + ), + ( + "updated_date", + models.DateTimeField(default=django.utils.timezone.now), + ), + ("complete", models.BooleanField(default=False)), + ( + "status", + models.CharField( + choices=[ + ("pending", "Pending"), + ("active", "Active"), + ("complete", "Complete"), + ("stopped", "Stopped"), + ("failed", "Failed"), + ], + default="pending", + max_length=50, + null=True, + ), + ), + ( + "user", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, + ), + ), + ], + options={ + "abstract": False, + }, + ), + migrations.AddField( + model_name="sitesettings", + name="user_import_time_limit", + field=models.IntegerField(default=48), + ), + migrations.AlterField( + model_name="notification", + name="notification_type", + field=models.CharField( + choices=[ + ("FAVORITE", "Favorite"), + ("BOOST", "Boost"), + ("REPLY", "Reply"), + ("MENTION", "Mention"), + ("TAG", "Tag"), + ("FOLLOW", "Follow"), + ("FOLLOW_REQUEST", "Follow Request"), + ("IMPORT", "Import"), + ("USER_IMPORT", "User Import"), + ("USER_EXPORT", "User Export"), + ("ADD", "Add"), + ("REPORT", "Report"), + ("LINK_DOMAIN", "Link Domain"), + ("INVITE", "Invite"), + ("ACCEPT", "Accept"), + ("JOIN", "Join"), + ("LEAVE", "Leave"), + ("REMOVE", "Remove"), + ("GROUP_PRIVACY", "Group Privacy"), + ("GROUP_NAME", "Group Name"), + ("GROUP_DESCRIPTION", "Group Description"), + ("MOVE", "Move"), + ], + max_length=255, + ), + ), + migrations.CreateModel( + name="BookwyrmExportJob", + fields=[ + ( + "parentjob_ptr", + models.OneToOneField( + auto_created=True, + on_delete=django.db.models.deletion.CASCADE, + parent_link=True, + primary_key=True, + serialize=False, + to="bookwyrm.parentjob", + ), + ), + ("export_data", models.FileField(null=True, upload_to="")), + ], + options={ + "abstract": False, + }, + bases=("bookwyrm.parentjob",), + ), + migrations.CreateModel( + name="BookwyrmImportJob", + fields=[ + ( + "parentjob_ptr", + models.OneToOneField( + auto_created=True, + on_delete=django.db.models.deletion.CASCADE, + parent_link=True, + primary_key=True, + serialize=False, + to="bookwyrm.parentjob", + ), + ), + ("archive_file", models.FileField(blank=True, null=True, upload_to="")), + ("import_data", models.JSONField(null=True)), + ( + "required", + django.contrib.postgres.fields.ArrayField( + base_field=models.CharField(blank=True, max_length=50), + blank=True, + size=None, + ), + ), + ], + options={ + "abstract": False, + }, + bases=("bookwyrm.parentjob",), + ), + migrations.CreateModel( + name="ChildJob", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("task_id", models.UUIDField(blank=True, null=True, unique=True)), + ( + "created_date", + models.DateTimeField(default=django.utils.timezone.now), + ), + ( + "updated_date", + models.DateTimeField(default=django.utils.timezone.now), + ), + ("complete", models.BooleanField(default=False)), + ( + "status", + models.CharField( + choices=[ + ("pending", "Pending"), + ("active", "Active"), + ("complete", "Complete"), + ("stopped", "Stopped"), + ("failed", "Failed"), + ], + default="pending", + max_length=50, + null=True, + ), + ), + ( + "parent_job", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="child_jobs", + to="bookwyrm.parentjob", + ), + ), + ], + options={ + "abstract": False, + }, + ), + migrations.AddField( + model_name="notification", + name="related_user_export", + field=models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="bookwyrm.bookwyrmexportjob", + ), + ), + ] diff --git a/bookwyrm/migrations/0189_alter_user_preferred_language.py b/bookwyrm/migrations/0189_alter_user_preferred_language.py new file mode 100644 index 000000000..d9d9777c7 --- /dev/null +++ b/bookwyrm/migrations/0189_alter_user_preferred_language.py @@ -0,0 +1,45 @@ +# Generated by Django 3.2.23 on 2023-12-12 23:42 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("bookwyrm", "0188_theme_loads"), + ] + + operations = [ + migrations.AlterField( + model_name="user", + name="preferred_language", + field=models.CharField( + blank=True, + choices=[ + ("en-us", "English"), + ("ca-es", "Català (Catalan)"), + ("de-de", "Deutsch (German)"), + ("eo-uy", "Esperanto (Esperanto)"), + ("es-es", "Español (Spanish)"), + ("eu-es", "Euskara (Basque)"), + ("gl-es", "Galego (Galician)"), + ("it-it", "Italiano (Italian)"), + ("fi-fi", "Suomi (Finnish)"), + ("fr-fr", "Français (French)"), + ("lt-lt", "Lietuvių (Lithuanian)"), + ("nl-nl", "Nederlands (Dutch)"), + ("no-no", "Norsk (Norwegian)"), + ("pl-pl", "Polski (Polish)"), + ("pt-br", "Português do Brasil (Brazilian Portuguese)"), + ("pt-pt", "Português Europeu (European Portuguese)"), + ("ro-ro", "Română (Romanian)"), + ("sv-se", "Svenska (Swedish)"), + ("uk-ua", "Українська (Ukrainian)"), + ("zh-hans", "简体中文 (Simplified Chinese)"), + ("zh-hant", "繁體中文 (Traditional Chinese)"), + ], + max_length=255, + null=True, + ), + ), + ] diff --git a/bookwyrm/migrations/0189_merge_0186_auto_20231116_0048_0188_theme_loads.py b/bookwyrm/migrations/0189_merge_0186_auto_20231116_0048_0188_theme_loads.py new file mode 100644 index 000000000..eb6238f6e --- /dev/null +++ b/bookwyrm/migrations/0189_merge_0186_auto_20231116_0048_0188_theme_loads.py @@ -0,0 +1,13 @@ +# Generated by Django 3.2.23 on 2023-11-22 10:16 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("bookwyrm", "0186_auto_20231116_0048"), + ("bookwyrm", "0188_theme_loads"), + ] + + operations = [] diff --git a/bookwyrm/migrations/0190_alter_notification_notification_type.py b/bookwyrm/migrations/0190_alter_notification_notification_type.py new file mode 100644 index 000000000..aff54c77b --- /dev/null +++ b/bookwyrm/migrations/0190_alter_notification_notification_type.py @@ -0,0 +1,45 @@ +# Generated by Django 3.2.23 on 2023-11-23 19:49 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("bookwyrm", "0189_merge_0186_auto_20231116_0048_0188_theme_loads"), + ] + + operations = [ + migrations.AlterField( + model_name="notification", + name="notification_type", + field=models.CharField( + choices=[ + ("FAVORITE", "Favorite"), + ("BOOST", "Boost"), + ("REPLY", "Reply"), + ("MENTION", "Mention"), + ("TAG", "Tag"), + ("FOLLOW", "Follow"), + ("FOLLOW_REQUEST", "Follow Request"), + ("IMPORT", "Import"), + ("USER_IMPORT", "User Import"), + ("USER_EXPORT", "User Export"), + ("ADD", "Add"), + ("REPORT", "Report"), + ("LINK_DOMAIN", "Link Domain"), + ("INVITE_REQUEST", "Invite Request"), + ("INVITE", "Invite"), + ("ACCEPT", "Accept"), + ("JOIN", "Join"), + ("LEAVE", "Leave"), + ("REMOVE", "Remove"), + ("GROUP_PRIVACY", "Group Privacy"), + ("GROUP_NAME", "Group Name"), + ("GROUP_DESCRIPTION", "Group Description"), + ("MOVE", "Move"), + ], + max_length=255, + ), + ), + ] diff --git a/bookwyrm/migrations/0190_book_search_updates.py b/bookwyrm/migrations/0190_book_search_updates.py new file mode 100644 index 000000000..52d80fcb9 --- /dev/null +++ b/bookwyrm/migrations/0190_book_search_updates.py @@ -0,0 +1,16 @@ +# Generated by Django 3.2.20 on 2023-11-24 17:11 + +from django.db import migrations + + +class Migration(migrations.Migration): + dependencies = [ + ("bookwyrm", "0188_theme_loads"), + ] + + operations = [ + migrations.RemoveIndex( + model_name="author", + name="bookwyrm_au_search__b050a8_gin", + ), + ] diff --git a/bookwyrm/migrations/0191_merge_20240102_0326.py b/bookwyrm/migrations/0191_merge_20240102_0326.py new file mode 100644 index 000000000..485c14af8 --- /dev/null +++ b/bookwyrm/migrations/0191_merge_20240102_0326.py @@ -0,0 +1,13 @@ +# Generated by Django 3.2.23 on 2024-01-02 03:26 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("bookwyrm", "0189_alter_user_preferred_language"), + ("bookwyrm", "0190_alter_notification_notification_type"), + ] + + operations = [] diff --git a/bookwyrm/migrations/0191_migrate_search_vec_triggers_to_pgtriggers.py b/bookwyrm/migrations/0191_migrate_search_vec_triggers_to_pgtriggers.py new file mode 100644 index 000000000..03442298f --- /dev/null +++ b/bookwyrm/migrations/0191_migrate_search_vec_triggers_to_pgtriggers.py @@ -0,0 +1,76 @@ +# Generated by Django 3.2.20 on 2023-11-25 00:47 + +from importlib import import_module +import re + +from django.db import migrations +import pgtrigger.compiler +import pgtrigger.migrations + +trigger_migration = import_module("bookwyrm.migrations.0077_auto_20210623_2155") + +# it's _very_ convenient for development that this migration be reversible +search_vector_trigger = trigger_migration.Migration.operations[4] +author_search_vector_trigger = trigger_migration.Migration.operations[5] + + +assert re.search(r"\bCREATE TRIGGER search_vector_trigger\b", search_vector_trigger.sql) +assert re.search( + r"\bCREATE TRIGGER author_search_vector_trigger\b", + author_search_vector_trigger.sql, +) + + +class Migration(migrations.Migration): + dependencies = [ + ("bookwyrm", "0190_book_search_updates"), + ] + + operations = [ + pgtrigger.migrations.AddTrigger( + model_name="book", + trigger=pgtrigger.compiler.Trigger( + name="update_search_vector_on_book_edit", + sql=pgtrigger.compiler.UpsertTriggerSql( + func="new.search_vector := setweight(coalesce(nullif(to_tsvector('english', new.title), ''), to_tsvector('simple', new.title)), 'A') || setweight(to_tsvector('english', coalesce(new.subtitle, '')), 'B') || (SELECT setweight(to_tsvector('simple', coalesce(array_to_string(array_agg(bookwyrm_author.name), ' '), '')), 'C') FROM bookwyrm_author LEFT JOIN bookwyrm_book_authors ON bookwyrm_author.id = bookwyrm_book_authors.author_id WHERE bookwyrm_book_authors.book_id = new.id ) || setweight(to_tsvector('english', coalesce(new.series, '')), 'D');RETURN NEW;", + hash="77d6399497c0a89b0bf09d296e33c396da63705c", + operation='INSERT OR UPDATE OF "title", "subtitle", "series", "search_vector"', + pgid="pgtrigger_update_search_vector_on_book_edit_bec58", + table="bookwyrm_book", + when="BEFORE", + ), + ), + ), + pgtrigger.migrations.AddTrigger( + model_name="author", + trigger=pgtrigger.compiler.Trigger( + name="reset_search_vector_on_author_edit", + sql=pgtrigger.compiler.UpsertTriggerSql( + func="WITH updated_books AS (SELECT book_id FROM bookwyrm_book_authors WHERE author_id = new.id ) UPDATE bookwyrm_book SET search_vector = '' FROM updated_books WHERE id = updated_books.book_id;RETURN NEW;", + hash="e7bbf08711ff3724c58f4d92fb7a082ffb3d7826", + operation='UPDATE OF "name"', + pgid="pgtrigger_reset_search_vector_on_author_edit_a447c", + table="bookwyrm_author", + when="AFTER", + ), + ), + ), + migrations.RunSQL( + sql="""DROP TRIGGER IF EXISTS search_vector_trigger ON bookwyrm_book; + DROP FUNCTION IF EXISTS book_trigger; + """, + reverse_sql=search_vector_trigger.sql, + ), + migrations.RunSQL( + sql="""DROP TRIGGER IF EXISTS author_search_vector_trigger ON bookwyrm_author; + DROP FUNCTION IF EXISTS author_trigger; + """, + reverse_sql=author_search_vector_trigger.sql, + ), + migrations.RunSQL( + # Recalculate book search vector for any missed author name changes + # due to bug in JOIN in the old trigger. + sql="UPDATE bookwyrm_book SET search_vector = NULL;", + reverse_sql=migrations.RunSQL.noop, + ), + ] diff --git a/bookwyrm/migrations/0192_make_page_positions_text.py b/bookwyrm/migrations/0192_make_page_positions_text.py new file mode 100644 index 000000000..940a9e941 --- /dev/null +++ b/bookwyrm/migrations/0192_make_page_positions_text.py @@ -0,0 +1,23 @@ +# Generated by Django 3.2.23 on 2024-01-04 23:56 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("bookwyrm", "0191_merge_20240102_0326"), + ] + + operations = [ + migrations.AlterField( + model_name="quotation", + name="endposition", + field=models.TextField(blank=True, null=True), + ), + migrations.AlterField( + model_name="quotation", + name="position", + field=models.TextField(blank=True, null=True), + ), + ] diff --git a/bookwyrm/migrations/0192_rename_version_sitesettings_available_version.py b/bookwyrm/migrations/0192_rename_version_sitesettings_available_version.py new file mode 100644 index 000000000..db67b4e92 --- /dev/null +++ b/bookwyrm/migrations/0192_rename_version_sitesettings_available_version.py @@ -0,0 +1,18 @@ +# Generated by Django 3.2.23 on 2024-01-02 19:36 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("bookwyrm", "0191_merge_20240102_0326"), + ] + + operations = [ + migrations.RenameField( + model_name="sitesettings", + old_name="version", + new_name="available_version", + ), + ] diff --git a/bookwyrm/migrations/0192_sitesettings_user_exports_enabled.py b/bookwyrm/migrations/0192_sitesettings_user_exports_enabled.py new file mode 100644 index 000000000..ec5b411e2 --- /dev/null +++ b/bookwyrm/migrations/0192_sitesettings_user_exports_enabled.py @@ -0,0 +1,18 @@ +# Generated by Django 3.2.23 on 2024-01-16 10:28 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("bookwyrm", "0191_merge_20240102_0326"), + ] + + operations = [ + migrations.AddField( + model_name="sitesettings", + name="user_exports_enabled", + field=models.BooleanField(default=False), + ), + ] diff --git a/bookwyrm/migrations/0193_auto_20240128_0249.py b/bookwyrm/migrations/0193_auto_20240128_0249.py new file mode 100644 index 000000000..82e32ee48 --- /dev/null +++ b/bookwyrm/migrations/0193_auto_20240128_0249.py @@ -0,0 +1,92 @@ +# Generated by Django 3.2.23 on 2024-01-28 02:49 + +import django.core.serializers.json +from django.db import migrations, models +import django.db.models.deletion +from django.core.files.storage import storages + + +class Migration(migrations.Migration): + + dependencies = [ + ("bookwyrm", "0192_sitesettings_user_exports_enabled"), + ] + + operations = [ + migrations.AddField( + model_name="bookwyrmexportjob", + name="export_json", + field=models.JSONField( + encoder=django.core.serializers.json.DjangoJSONEncoder, null=True + ), + ), + migrations.AddField( + model_name="bookwyrmexportjob", + name="json_completed", + field=models.BooleanField(default=False), + ), + migrations.AlterField( + model_name="bookwyrmexportjob", + name="export_data", + field=models.FileField( + null=True, + storage=storages["exports"], + upload_to="", + ), + ), + migrations.CreateModel( + name="AddFileToTar", + fields=[ + ( + "childjob_ptr", + models.OneToOneField( + auto_created=True, + on_delete=django.db.models.deletion.CASCADE, + parent_link=True, + primary_key=True, + serialize=False, + to="bookwyrm.childjob", + ), + ), + ( + "parent_export_job", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="child_edition_export_jobs", + to="bookwyrm.bookwyrmexportjob", + ), + ), + ], + options={ + "abstract": False, + }, + bases=("bookwyrm.childjob",), + ), + migrations.CreateModel( + name="AddBookToUserExportJob", + fields=[ + ( + "childjob_ptr", + models.OneToOneField( + auto_created=True, + on_delete=django.db.models.deletion.CASCADE, + parent_link=True, + primary_key=True, + serialize=False, + to="bookwyrm.childjob", + ), + ), + ( + "edition", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="bookwyrm.edition", + ), + ), + ], + options={ + "abstract": False, + }, + bases=("bookwyrm.childjob",), + ), + ] diff --git a/bookwyrm/migrations/0193_merge_20240203_1539.py b/bookwyrm/migrations/0193_merge_20240203_1539.py new file mode 100644 index 000000000..a88568ba1 --- /dev/null +++ b/bookwyrm/migrations/0193_merge_20240203_1539.py @@ -0,0 +1,13 @@ +# Generated by Django 3.2.23 on 2024-02-03 15:39 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("bookwyrm", "0192_make_page_positions_text"), + ("bookwyrm", "0192_sitesettings_user_exports_enabled"), + ] + + operations = [] diff --git a/bookwyrm/migrations/0194_merge_20240203_1619.py b/bookwyrm/migrations/0194_merge_20240203_1619.py new file mode 100644 index 000000000..a5c18e300 --- /dev/null +++ b/bookwyrm/migrations/0194_merge_20240203_1619.py @@ -0,0 +1,13 @@ +# Generated by Django 3.2.23 on 2024-02-03 16:19 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("bookwyrm", "0192_rename_version_sitesettings_available_version"), + ("bookwyrm", "0193_merge_20240203_1539"), + ] + + operations = [] diff --git a/bookwyrm/migrations/0195_alter_user_preferred_language.py b/bookwyrm/migrations/0195_alter_user_preferred_language.py new file mode 100644 index 000000000..1fbfa7304 --- /dev/null +++ b/bookwyrm/migrations/0195_alter_user_preferred_language.py @@ -0,0 +1,46 @@ +# Generated by Django 3.2.23 on 2024-02-21 00:45 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("bookwyrm", "0194_merge_20240203_1619"), + ] + + operations = [ + migrations.AlterField( + model_name="user", + name="preferred_language", + field=models.CharField( + blank=True, + choices=[ + ("en-us", "English"), + ("ca-es", "Català (Catalan)"), + ("de-de", "Deutsch (German)"), + ("eo-uy", "Esperanto (Esperanto)"), + ("es-es", "Español (Spanish)"), + ("eu-es", "Euskara (Basque)"), + ("gl-es", "Galego (Galician)"), + ("it-it", "Italiano (Italian)"), + ("ko-kr", "한국어 (Korean)"), + ("fi-fi", "Suomi (Finnish)"), + ("fr-fr", "Français (French)"), + ("lt-lt", "Lietuvių (Lithuanian)"), + ("nl-nl", "Nederlands (Dutch)"), + ("no-no", "Norsk (Norwegian)"), + ("pl-pl", "Polski (Polish)"), + ("pt-br", "Português do Brasil (Brazilian Portuguese)"), + ("pt-pt", "Português Europeu (European Portuguese)"), + ("ro-ro", "Română (Romanian)"), + ("sv-se", "Svenska (Swedish)"), + ("uk-ua", "Українська (Ukrainian)"), + ("zh-hans", "简体中文 (Simplified Chinese)"), + ("zh-hant", "繁體中文 (Traditional Chinese)"), + ], + max_length=255, + null=True, + ), + ), + ] diff --git a/bookwyrm/migrations/0196_merge_20240318_1737.py b/bookwyrm/migrations/0196_merge_20240318_1737.py new file mode 100644 index 000000000..2d80b2e58 --- /dev/null +++ b/bookwyrm/migrations/0196_merge_20240318_1737.py @@ -0,0 +1,13 @@ +# Generated by Django 3.2.23 on 2024-03-18 17:37 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("bookwyrm", "0193_auto_20240128_0249"), + ("bookwyrm", "0195_alter_user_preferred_language"), + ] + + operations = [] diff --git a/bookwyrm/migrations/0196_merge_pr3134_into_main.py b/bookwyrm/migrations/0196_merge_pr3134_into_main.py new file mode 100644 index 000000000..0862f11b2 --- /dev/null +++ b/bookwyrm/migrations/0196_merge_pr3134_into_main.py @@ -0,0 +1,13 @@ +# Generated by Django 3.2.23 on 2024-03-18 00:48 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("bookwyrm", "0191_migrate_search_vec_triggers_to_pgtriggers"), + ("bookwyrm", "0195_alter_user_preferred_language"), + ] + + operations = [] diff --git a/bookwyrm/migrations/0197_author_search_vector.py b/bookwyrm/migrations/0197_author_search_vector.py new file mode 100644 index 000000000..baa540cc0 --- /dev/null +++ b/bookwyrm/migrations/0197_author_search_vector.py @@ -0,0 +1,41 @@ +# Generated by Django 3.2.25 on 2024-03-20 15:15 + +import django.contrib.postgres.indexes +from django.db import migrations +import pgtrigger.compiler +import pgtrigger.migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("bookwyrm", "0196_merge_pr3134_into_main"), + ] + + operations = [ + migrations.AddIndex( + model_name="author", + index=django.contrib.postgres.indexes.GinIndex( + fields=["search_vector"], name="bookwyrm_au_search__b050a8_gin" + ), + ), + pgtrigger.migrations.AddTrigger( + model_name="author", + trigger=pgtrigger.compiler.Trigger( + name="update_search_vector_on_author_edit", + sql=pgtrigger.compiler.UpsertTriggerSql( + func="new.search_vector := setweight(to_tsvector('simple', new.name), 'A') || setweight(to_tsvector('simple', coalesce(array_to_string(new.aliases, ' '), '')), 'B');RETURN NEW;", + hash="b97919016236d74d0ade51a0769a173ea269da64", + operation='INSERT OR UPDATE OF "name", "aliases", "search_vector"', + pgid="pgtrigger_update_search_vector_on_author_edit_c61cb", + table="bookwyrm_author", + when="BEFORE", + ), + ), + ), + migrations.RunSQL( + # Calculate search vector for all Authors. + sql="UPDATE bookwyrm_author SET search_vector = NULL;", + reverse_sql="UPDATE bookwyrm_author SET search_vector = NULL;", + ), + ] diff --git a/bookwyrm/migrations/0197_merge_20240324_0235.py b/bookwyrm/migrations/0197_merge_20240324_0235.py new file mode 100644 index 000000000..a7c01a955 --- /dev/null +++ b/bookwyrm/migrations/0197_merge_20240324_0235.py @@ -0,0 +1,13 @@ +# Generated by Django 3.2.25 on 2024-03-24 02:35 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("bookwyrm", "0196_merge_20240318_1737"), + ("bookwyrm", "0196_merge_pr3134_into_main"), + ] + + operations = [] diff --git a/bookwyrm/migrations/0197_mergedauthor_mergedbook.py b/bookwyrm/migrations/0197_mergedauthor_mergedbook.py new file mode 100644 index 000000000..23ca38ab2 --- /dev/null +++ b/bookwyrm/migrations/0197_mergedauthor_mergedbook.py @@ -0,0 +1,48 @@ +# Generated by Django 3.2.24 on 2024-02-28 21:30 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ("bookwyrm", "0196_merge_pr3134_into_main"), + ] + + operations = [ + migrations.CreateModel( + name="MergedBook", + fields=[ + ("deleted_id", models.IntegerField(primary_key=True, serialize=False)), + ( + "merged_into", + models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, + related_name="absorbed", + to="bookwyrm.book", + ), + ), + ], + options={ + "abstract": False, + }, + ), + migrations.CreateModel( + name="MergedAuthor", + fields=[ + ("deleted_id", models.IntegerField(primary_key=True, serialize=False)), + ( + "merged_into", + models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, + related_name="absorbed", + to="bookwyrm.author", + ), + ), + ], + options={ + "abstract": False, + }, + ), + ] diff --git a/bookwyrm/migrations/0198_alter_bookwyrmexportjob_export_data.py b/bookwyrm/migrations/0198_alter_bookwyrmexportjob_export_data.py new file mode 100644 index 000000000..552584d2b --- /dev/null +++ b/bookwyrm/migrations/0198_alter_bookwyrmexportjob_export_data.py @@ -0,0 +1,23 @@ +# Generated by Django 3.2.25 on 2024-03-26 11:37 + +import bookwyrm.models.bookwyrm_export_job +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("bookwyrm", "0197_merge_20240324_0235"), + ] + + operations = [ + migrations.AlterField( + model_name="bookwyrmexportjob", + name="export_data", + field=models.FileField( + null=True, + storage=bookwyrm.models.bookwyrm_export_job.select_exports_storage, + upload_to="", + ), + ), + ] diff --git a/bookwyrm/migrations/0198_book_search_vector_author_aliases.py b/bookwyrm/migrations/0198_book_search_vector_author_aliases.py new file mode 100644 index 000000000..491cb64bb --- /dev/null +++ b/bookwyrm/migrations/0198_book_search_vector_author_aliases.py @@ -0,0 +1,57 @@ +# Generated by Django 3.2.25 on 2024-03-20 15:52 + +from django.db import migrations +import pgtrigger.compiler +import pgtrigger.migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("bookwyrm", "0197_author_search_vector"), + ] + + operations = [ + pgtrigger.migrations.RemoveTrigger( + model_name="author", + name="reset_search_vector_on_author_edit", + ), + pgtrigger.migrations.RemoveTrigger( + model_name="book", + name="update_search_vector_on_book_edit", + ), + pgtrigger.migrations.AddTrigger( + model_name="author", + trigger=pgtrigger.compiler.Trigger( + name="reset_book_search_vector_on_author_edit", + sql=pgtrigger.compiler.UpsertTriggerSql( + func="WITH updated_books AS (SELECT book_id FROM bookwyrm_book_authors WHERE author_id = new.id ) UPDATE bookwyrm_book SET search_vector = '' FROM updated_books WHERE id = updated_books.book_id;RETURN NEW;", + hash="68422c0f29879c5802b82159dde45297eff53e73", + operation='UPDATE OF "name", "aliases"', + pgid="pgtrigger_reset_book_search_vector_on_author_edit_a50c7", + table="bookwyrm_author", + when="AFTER", + ), + ), + ), + pgtrigger.migrations.AddTrigger( + model_name="book", + trigger=pgtrigger.compiler.Trigger( + name="update_search_vector_on_book_edit", + sql=pgtrigger.compiler.UpsertTriggerSql( + func="WITH author_names AS (SELECT array_to_string(bookwyrm_author.name || bookwyrm_author.aliases, ' ') AS name_and_aliases FROM bookwyrm_author LEFT JOIN bookwyrm_book_authors ON bookwyrm_author.id = bookwyrm_book_authors.author_id WHERE bookwyrm_book_authors.book_id = new.id ) SELECT setweight(coalesce(nullif(to_tsvector('english', new.title), ''), to_tsvector('simple', new.title)), 'A') || setweight(to_tsvector('english', coalesce(new.subtitle, '')), 'B') || (SELECT setweight(to_tsvector('simple', coalesce(array_to_string(array_agg(name_and_aliases), ' '), '')), 'C') FROM author_names) || setweight(to_tsvector('english', coalesce(new.series, '')), 'D') INTO new.search_vector;RETURN NEW;", + hash="9324f5ca76a6f5e63931881d62d11da11f595b2c", + operation='INSERT OR UPDATE OF "title", "subtitle", "series", "search_vector"', + pgid="pgtrigger_update_search_vector_on_book_edit_bec58", + table="bookwyrm_book", + when="BEFORE", + ), + ), + ), + migrations.RunSQL( + # Recalculate search vector for all Books because it now includes + # Author aliases. + sql="UPDATE bookwyrm_book SET search_vector = NULL;", + reverse_sql="UPDATE bookwyrm_book SET search_vector = NULL;", + ), + ] diff --git a/bookwyrm/migrations/0199_alter_userblocks_user_object_and_more.py b/bookwyrm/migrations/0199_alter_userblocks_user_object_and_more.py new file mode 100644 index 000000000..bde1f25c1 --- /dev/null +++ b/bookwyrm/migrations/0199_alter_userblocks_user_object_and_more.py @@ -0,0 +1,70 @@ +# Generated by Django 4.2.11 on 2024-03-29 19:25 + +import bookwyrm.models.fields +from django.conf import settings +from django.db import migrations +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ("bookwyrm", "0198_book_search_vector_author_aliases"), + ] + + operations = [ + migrations.AlterField( + model_name="userblocks", + name="user_object", + field=bookwyrm.models.fields.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, + related_name="%(class)s_user_object", + to=settings.AUTH_USER_MODEL, + ), + ), + migrations.AlterField( + model_name="userblocks", + name="user_subject", + field=bookwyrm.models.fields.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, + related_name="%(class)s_user_subject", + to=settings.AUTH_USER_MODEL, + ), + ), + migrations.AlterField( + model_name="userfollowrequest", + name="user_object", + field=bookwyrm.models.fields.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, + related_name="%(class)s_user_object", + to=settings.AUTH_USER_MODEL, + ), + ), + migrations.AlterField( + model_name="userfollowrequest", + name="user_subject", + field=bookwyrm.models.fields.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, + related_name="%(class)s_user_subject", + to=settings.AUTH_USER_MODEL, + ), + ), + migrations.AlterField( + model_name="userfollows", + name="user_object", + field=bookwyrm.models.fields.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, + related_name="%(class)s_user_object", + to=settings.AUTH_USER_MODEL, + ), + ), + migrations.AlterField( + model_name="userfollows", + name="user_subject", + field=bookwyrm.models.fields.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, + related_name="%(class)s_user_subject", + to=settings.AUTH_USER_MODEL, + ), + ), + ] diff --git a/bookwyrm/migrations/0199_merge_20240326_1217.py b/bookwyrm/migrations/0199_merge_20240326_1217.py new file mode 100644 index 000000000..7794af54a --- /dev/null +++ b/bookwyrm/migrations/0199_merge_20240326_1217.py @@ -0,0 +1,13 @@ +# Generated by Django 3.2.25 on 2024-03-26 12:17 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("bookwyrm", "0198_alter_bookwyrmexportjob_export_data"), + ("bookwyrm", "0198_book_search_vector_author_aliases"), + ] + + operations = [] diff --git a/bookwyrm/migrations/0199_status_bookwyrm_st_remote__06aeba_idx.py b/bookwyrm/migrations/0199_status_bookwyrm_st_remote__06aeba_idx.py new file mode 100644 index 000000000..5d2513698 --- /dev/null +++ b/bookwyrm/migrations/0199_status_bookwyrm_st_remote__06aeba_idx.py @@ -0,0 +1,19 @@ +# Generated by Django 3.2.25 on 2024-04-02 19:53 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("bookwyrm", "0198_book_search_vector_author_aliases"), + ] + + operations = [ + migrations.AddIndex( + model_name="status", + index=models.Index( + fields=["remote_id"], name="bookwyrm_st_remote__06aeba_idx" + ), + ), + ] diff --git a/bookwyrm/migrations/0200_alter_user_preferred_timezone.py b/bookwyrm/migrations/0200_alter_user_preferred_timezone.py new file mode 100644 index 000000000..1b21c0f94 --- /dev/null +++ b/bookwyrm/migrations/0200_alter_user_preferred_timezone.py @@ -0,0 +1,633 @@ +# Generated by Django 4.2.11 on 2024-04-01 20:19 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("bookwyrm", "0199_alter_userblocks_user_object_and_more"), + ] + + operations = [ + migrations.AlterField( + model_name="user", + name="preferred_timezone", + field=models.CharField( + choices=[ + ("Africa/Abidjan", "Africa/Abidjan"), + ("Africa/Accra", "Africa/Accra"), + ("Africa/Addis_Ababa", "Africa/Addis_Ababa"), + ("Africa/Algiers", "Africa/Algiers"), + ("Africa/Asmara", "Africa/Asmara"), + ("Africa/Asmera", "Africa/Asmera"), + ("Africa/Bamako", "Africa/Bamako"), + ("Africa/Bangui", "Africa/Bangui"), + ("Africa/Banjul", "Africa/Banjul"), + ("Africa/Bissau", "Africa/Bissau"), + ("Africa/Blantyre", "Africa/Blantyre"), + ("Africa/Brazzaville", "Africa/Brazzaville"), + ("Africa/Bujumbura", "Africa/Bujumbura"), + ("Africa/Cairo", "Africa/Cairo"), + ("Africa/Casablanca", "Africa/Casablanca"), + ("Africa/Ceuta", "Africa/Ceuta"), + ("Africa/Conakry", "Africa/Conakry"), + ("Africa/Dakar", "Africa/Dakar"), + ("Africa/Dar_es_Salaam", "Africa/Dar_es_Salaam"), + ("Africa/Djibouti", "Africa/Djibouti"), + ("Africa/Douala", "Africa/Douala"), + ("Africa/El_Aaiun", "Africa/El_Aaiun"), + ("Africa/Freetown", "Africa/Freetown"), + ("Africa/Gaborone", "Africa/Gaborone"), + ("Africa/Harare", "Africa/Harare"), + ("Africa/Johannesburg", "Africa/Johannesburg"), + ("Africa/Juba", "Africa/Juba"), + ("Africa/Kampala", "Africa/Kampala"), + ("Africa/Khartoum", "Africa/Khartoum"), + ("Africa/Kigali", "Africa/Kigali"), + ("Africa/Kinshasa", "Africa/Kinshasa"), + ("Africa/Lagos", "Africa/Lagos"), + ("Africa/Libreville", "Africa/Libreville"), + ("Africa/Lome", "Africa/Lome"), + ("Africa/Luanda", "Africa/Luanda"), + ("Africa/Lubumbashi", "Africa/Lubumbashi"), + ("Africa/Lusaka", "Africa/Lusaka"), + ("Africa/Malabo", "Africa/Malabo"), + ("Africa/Maputo", "Africa/Maputo"), + ("Africa/Maseru", "Africa/Maseru"), + ("Africa/Mbabane", "Africa/Mbabane"), + ("Africa/Mogadishu", "Africa/Mogadishu"), + ("Africa/Monrovia", "Africa/Monrovia"), + ("Africa/Nairobi", "Africa/Nairobi"), + ("Africa/Ndjamena", "Africa/Ndjamena"), + ("Africa/Niamey", "Africa/Niamey"), + ("Africa/Nouakchott", "Africa/Nouakchott"), + ("Africa/Ouagadougou", "Africa/Ouagadougou"), + ("Africa/Porto-Novo", "Africa/Porto-Novo"), + ("Africa/Sao_Tome", "Africa/Sao_Tome"), + ("Africa/Timbuktu", "Africa/Timbuktu"), + ("Africa/Tripoli", "Africa/Tripoli"), + ("Africa/Tunis", "Africa/Tunis"), + ("Africa/Windhoek", "Africa/Windhoek"), + ("America/Adak", "America/Adak"), + ("America/Anchorage", "America/Anchorage"), + ("America/Anguilla", "America/Anguilla"), + ("America/Antigua", "America/Antigua"), + ("America/Araguaina", "America/Araguaina"), + ( + "America/Argentina/Buenos_Aires", + "America/Argentina/Buenos_Aires", + ), + ("America/Argentina/Catamarca", "America/Argentina/Catamarca"), + ( + "America/Argentina/ComodRivadavia", + "America/Argentina/ComodRivadavia", + ), + ("America/Argentina/Cordoba", "America/Argentina/Cordoba"), + ("America/Argentina/Jujuy", "America/Argentina/Jujuy"), + ("America/Argentina/La_Rioja", "America/Argentina/La_Rioja"), + ("America/Argentina/Mendoza", "America/Argentina/Mendoza"), + ( + "America/Argentina/Rio_Gallegos", + "America/Argentina/Rio_Gallegos", + ), + ("America/Argentina/Salta", "America/Argentina/Salta"), + ("America/Argentina/San_Juan", "America/Argentina/San_Juan"), + ("America/Argentina/San_Luis", "America/Argentina/San_Luis"), + ("America/Argentina/Tucuman", "America/Argentina/Tucuman"), + ("America/Argentina/Ushuaia", "America/Argentina/Ushuaia"), + ("America/Aruba", "America/Aruba"), + ("America/Asuncion", "America/Asuncion"), + ("America/Atikokan", "America/Atikokan"), + ("America/Atka", "America/Atka"), + ("America/Bahia", "America/Bahia"), + ("America/Bahia_Banderas", "America/Bahia_Banderas"), + ("America/Barbados", "America/Barbados"), + ("America/Belem", "America/Belem"), + ("America/Belize", "America/Belize"), + ("America/Blanc-Sablon", "America/Blanc-Sablon"), + ("America/Boa_Vista", "America/Boa_Vista"), + ("America/Bogota", "America/Bogota"), + ("America/Boise", "America/Boise"), + ("America/Buenos_Aires", "America/Buenos_Aires"), + ("America/Cambridge_Bay", "America/Cambridge_Bay"), + ("America/Campo_Grande", "America/Campo_Grande"), + ("America/Cancun", "America/Cancun"), + ("America/Caracas", "America/Caracas"), + ("America/Catamarca", "America/Catamarca"), + ("America/Cayenne", "America/Cayenne"), + ("America/Cayman", "America/Cayman"), + ("America/Chicago", "America/Chicago"), + ("America/Chihuahua", "America/Chihuahua"), + ("America/Ciudad_Juarez", "America/Ciudad_Juarez"), + ("America/Coral_Harbour", "America/Coral_Harbour"), + ("America/Cordoba", "America/Cordoba"), + ("America/Costa_Rica", "America/Costa_Rica"), + ("America/Creston", "America/Creston"), + ("America/Cuiaba", "America/Cuiaba"), + ("America/Curacao", "America/Curacao"), + ("America/Danmarkshavn", "America/Danmarkshavn"), + ("America/Dawson", "America/Dawson"), + ("America/Dawson_Creek", "America/Dawson_Creek"), + ("America/Denver", "America/Denver"), + ("America/Detroit", "America/Detroit"), + ("America/Dominica", "America/Dominica"), + ("America/Edmonton", "America/Edmonton"), + ("America/Eirunepe", "America/Eirunepe"), + ("America/El_Salvador", "America/El_Salvador"), + ("America/Ensenada", "America/Ensenada"), + ("America/Fort_Nelson", "America/Fort_Nelson"), + ("America/Fort_Wayne", "America/Fort_Wayne"), + ("America/Fortaleza", "America/Fortaleza"), + ("America/Glace_Bay", "America/Glace_Bay"), + ("America/Godthab", "America/Godthab"), + ("America/Goose_Bay", "America/Goose_Bay"), + ("America/Grand_Turk", "America/Grand_Turk"), + ("America/Grenada", "America/Grenada"), + ("America/Guadeloupe", "America/Guadeloupe"), + ("America/Guatemala", "America/Guatemala"), + ("America/Guayaquil", "America/Guayaquil"), + ("America/Guyana", "America/Guyana"), + ("America/Halifax", "America/Halifax"), + ("America/Havana", "America/Havana"), + ("America/Hermosillo", "America/Hermosillo"), + ("America/Indiana/Indianapolis", "America/Indiana/Indianapolis"), + ("America/Indiana/Knox", "America/Indiana/Knox"), + ("America/Indiana/Marengo", "America/Indiana/Marengo"), + ("America/Indiana/Petersburg", "America/Indiana/Petersburg"), + ("America/Indiana/Tell_City", "America/Indiana/Tell_City"), + ("America/Indiana/Vevay", "America/Indiana/Vevay"), + ("America/Indiana/Vincennes", "America/Indiana/Vincennes"), + ("America/Indiana/Winamac", "America/Indiana/Winamac"), + ("America/Indianapolis", "America/Indianapolis"), + ("America/Inuvik", "America/Inuvik"), + ("America/Iqaluit", "America/Iqaluit"), + ("America/Jamaica", "America/Jamaica"), + ("America/Jujuy", "America/Jujuy"), + ("America/Juneau", "America/Juneau"), + ("America/Kentucky/Louisville", "America/Kentucky/Louisville"), + ("America/Kentucky/Monticello", "America/Kentucky/Monticello"), + ("America/Knox_IN", "America/Knox_IN"), + ("America/Kralendijk", "America/Kralendijk"), + ("America/La_Paz", "America/La_Paz"), + ("America/Lima", "America/Lima"), + ("America/Los_Angeles", "America/Los_Angeles"), + ("America/Louisville", "America/Louisville"), + ("America/Lower_Princes", "America/Lower_Princes"), + ("America/Maceio", "America/Maceio"), + ("America/Managua", "America/Managua"), + ("America/Manaus", "America/Manaus"), + ("America/Marigot", "America/Marigot"), + ("America/Martinique", "America/Martinique"), + ("America/Matamoros", "America/Matamoros"), + ("America/Mazatlan", "America/Mazatlan"), + ("America/Mendoza", "America/Mendoza"), + ("America/Menominee", "America/Menominee"), + ("America/Merida", "America/Merida"), + ("America/Metlakatla", "America/Metlakatla"), + ("America/Mexico_City", "America/Mexico_City"), + ("America/Miquelon", "America/Miquelon"), + ("America/Moncton", "America/Moncton"), + ("America/Monterrey", "America/Monterrey"), + ("America/Montevideo", "America/Montevideo"), + ("America/Montreal", "America/Montreal"), + ("America/Montserrat", "America/Montserrat"), + ("America/Nassau", "America/Nassau"), + ("America/New_York", "America/New_York"), + ("America/Nipigon", "America/Nipigon"), + ("America/Nome", "America/Nome"), + ("America/Noronha", "America/Noronha"), + ("America/North_Dakota/Beulah", "America/North_Dakota/Beulah"), + ("America/North_Dakota/Center", "America/North_Dakota/Center"), + ( + "America/North_Dakota/New_Salem", + "America/North_Dakota/New_Salem", + ), + ("America/Nuuk", "America/Nuuk"), + ("America/Ojinaga", "America/Ojinaga"), + ("America/Panama", "America/Panama"), + ("America/Pangnirtung", "America/Pangnirtung"), + ("America/Paramaribo", "America/Paramaribo"), + ("America/Phoenix", "America/Phoenix"), + ("America/Port-au-Prince", "America/Port-au-Prince"), + ("America/Port_of_Spain", "America/Port_of_Spain"), + ("America/Porto_Acre", "America/Porto_Acre"), + ("America/Porto_Velho", "America/Porto_Velho"), + ("America/Puerto_Rico", "America/Puerto_Rico"), + ("America/Punta_Arenas", "America/Punta_Arenas"), + ("America/Rainy_River", "America/Rainy_River"), + ("America/Rankin_Inlet", "America/Rankin_Inlet"), + ("America/Recife", "America/Recife"), + ("America/Regina", "America/Regina"), + ("America/Resolute", "America/Resolute"), + ("America/Rio_Branco", "America/Rio_Branco"), + ("America/Rosario", "America/Rosario"), + ("America/Santa_Isabel", "America/Santa_Isabel"), + ("America/Santarem", "America/Santarem"), + ("America/Santiago", "America/Santiago"), + ("America/Santo_Domingo", "America/Santo_Domingo"), + ("America/Sao_Paulo", "America/Sao_Paulo"), + ("America/Scoresbysund", "America/Scoresbysund"), + ("America/Shiprock", "America/Shiprock"), + ("America/Sitka", "America/Sitka"), + ("America/St_Barthelemy", "America/St_Barthelemy"), + ("America/St_Johns", "America/St_Johns"), + ("America/St_Kitts", "America/St_Kitts"), + ("America/St_Lucia", "America/St_Lucia"), + ("America/St_Thomas", "America/St_Thomas"), + ("America/St_Vincent", "America/St_Vincent"), + ("America/Swift_Current", "America/Swift_Current"), + ("America/Tegucigalpa", "America/Tegucigalpa"), + ("America/Thule", "America/Thule"), + ("America/Thunder_Bay", "America/Thunder_Bay"), + ("America/Tijuana", "America/Tijuana"), + ("America/Toronto", "America/Toronto"), + ("America/Tortola", "America/Tortola"), + ("America/Vancouver", "America/Vancouver"), + ("America/Virgin", "America/Virgin"), + ("America/Whitehorse", "America/Whitehorse"), + ("America/Winnipeg", "America/Winnipeg"), + ("America/Yakutat", "America/Yakutat"), + ("America/Yellowknife", "America/Yellowknife"), + ("Antarctica/Casey", "Antarctica/Casey"), + ("Antarctica/Davis", "Antarctica/Davis"), + ("Antarctica/DumontDUrville", "Antarctica/DumontDUrville"), + ("Antarctica/Macquarie", "Antarctica/Macquarie"), + ("Antarctica/Mawson", "Antarctica/Mawson"), + ("Antarctica/McMurdo", "Antarctica/McMurdo"), + ("Antarctica/Palmer", "Antarctica/Palmer"), + ("Antarctica/Rothera", "Antarctica/Rothera"), + ("Antarctica/South_Pole", "Antarctica/South_Pole"), + ("Antarctica/Syowa", "Antarctica/Syowa"), + ("Antarctica/Troll", "Antarctica/Troll"), + ("Antarctica/Vostok", "Antarctica/Vostok"), + ("Arctic/Longyearbyen", "Arctic/Longyearbyen"), + ("Asia/Aden", "Asia/Aden"), + ("Asia/Almaty", "Asia/Almaty"), + ("Asia/Amman", "Asia/Amman"), + ("Asia/Anadyr", "Asia/Anadyr"), + ("Asia/Aqtau", "Asia/Aqtau"), + ("Asia/Aqtobe", "Asia/Aqtobe"), + ("Asia/Ashgabat", "Asia/Ashgabat"), + ("Asia/Ashkhabad", "Asia/Ashkhabad"), + ("Asia/Atyrau", "Asia/Atyrau"), + ("Asia/Baghdad", "Asia/Baghdad"), + ("Asia/Bahrain", "Asia/Bahrain"), + ("Asia/Baku", "Asia/Baku"), + ("Asia/Bangkok", "Asia/Bangkok"), + ("Asia/Barnaul", "Asia/Barnaul"), + ("Asia/Beirut", "Asia/Beirut"), + ("Asia/Bishkek", "Asia/Bishkek"), + ("Asia/Brunei", "Asia/Brunei"), + ("Asia/Calcutta", "Asia/Calcutta"), + ("Asia/Chita", "Asia/Chita"), + ("Asia/Choibalsan", "Asia/Choibalsan"), + ("Asia/Chongqing", "Asia/Chongqing"), + ("Asia/Chungking", "Asia/Chungking"), + ("Asia/Colombo", "Asia/Colombo"), + ("Asia/Dacca", "Asia/Dacca"), + ("Asia/Damascus", "Asia/Damascus"), + ("Asia/Dhaka", "Asia/Dhaka"), + ("Asia/Dili", "Asia/Dili"), + ("Asia/Dubai", "Asia/Dubai"), + ("Asia/Dushanbe", "Asia/Dushanbe"), + ("Asia/Famagusta", "Asia/Famagusta"), + ("Asia/Gaza", "Asia/Gaza"), + ("Asia/Harbin", "Asia/Harbin"), + ("Asia/Hebron", "Asia/Hebron"), + ("Asia/Ho_Chi_Minh", "Asia/Ho_Chi_Minh"), + ("Asia/Hong_Kong", "Asia/Hong_Kong"), + ("Asia/Hovd", "Asia/Hovd"), + ("Asia/Irkutsk", "Asia/Irkutsk"), + ("Asia/Istanbul", "Asia/Istanbul"), + ("Asia/Jakarta", "Asia/Jakarta"), + ("Asia/Jayapura", "Asia/Jayapura"), + ("Asia/Jerusalem", "Asia/Jerusalem"), + ("Asia/Kabul", "Asia/Kabul"), + ("Asia/Kamchatka", "Asia/Kamchatka"), + ("Asia/Karachi", "Asia/Karachi"), + ("Asia/Kashgar", "Asia/Kashgar"), + ("Asia/Kathmandu", "Asia/Kathmandu"), + ("Asia/Katmandu", "Asia/Katmandu"), + ("Asia/Khandyga", "Asia/Khandyga"), + ("Asia/Kolkata", "Asia/Kolkata"), + ("Asia/Krasnoyarsk", "Asia/Krasnoyarsk"), + ("Asia/Kuala_Lumpur", "Asia/Kuala_Lumpur"), + ("Asia/Kuching", "Asia/Kuching"), + ("Asia/Kuwait", "Asia/Kuwait"), + ("Asia/Macao", "Asia/Macao"), + ("Asia/Macau", "Asia/Macau"), + ("Asia/Magadan", "Asia/Magadan"), + ("Asia/Makassar", "Asia/Makassar"), + ("Asia/Manila", "Asia/Manila"), + ("Asia/Muscat", "Asia/Muscat"), + ("Asia/Nicosia", "Asia/Nicosia"), + ("Asia/Novokuznetsk", "Asia/Novokuznetsk"), + ("Asia/Novosibirsk", "Asia/Novosibirsk"), + ("Asia/Omsk", "Asia/Omsk"), + ("Asia/Oral", "Asia/Oral"), + ("Asia/Phnom_Penh", "Asia/Phnom_Penh"), + ("Asia/Pontianak", "Asia/Pontianak"), + ("Asia/Pyongyang", "Asia/Pyongyang"), + ("Asia/Qatar", "Asia/Qatar"), + ("Asia/Qostanay", "Asia/Qostanay"), + ("Asia/Qyzylorda", "Asia/Qyzylorda"), + ("Asia/Rangoon", "Asia/Rangoon"), + ("Asia/Riyadh", "Asia/Riyadh"), + ("Asia/Saigon", "Asia/Saigon"), + ("Asia/Sakhalin", "Asia/Sakhalin"), + ("Asia/Samarkand", "Asia/Samarkand"), + ("Asia/Seoul", "Asia/Seoul"), + ("Asia/Shanghai", "Asia/Shanghai"), + ("Asia/Singapore", "Asia/Singapore"), + ("Asia/Srednekolymsk", "Asia/Srednekolymsk"), + ("Asia/Taipei", "Asia/Taipei"), + ("Asia/Tashkent", "Asia/Tashkent"), + ("Asia/Tbilisi", "Asia/Tbilisi"), + ("Asia/Tehran", "Asia/Tehran"), + ("Asia/Tel_Aviv", "Asia/Tel_Aviv"), + ("Asia/Thimbu", "Asia/Thimbu"), + ("Asia/Thimphu", "Asia/Thimphu"), + ("Asia/Tokyo", "Asia/Tokyo"), + ("Asia/Tomsk", "Asia/Tomsk"), + ("Asia/Ujung_Pandang", "Asia/Ujung_Pandang"), + ("Asia/Ulaanbaatar", "Asia/Ulaanbaatar"), + ("Asia/Ulan_Bator", "Asia/Ulan_Bator"), + ("Asia/Urumqi", "Asia/Urumqi"), + ("Asia/Ust-Nera", "Asia/Ust-Nera"), + ("Asia/Vientiane", "Asia/Vientiane"), + ("Asia/Vladivostok", "Asia/Vladivostok"), + ("Asia/Yakutsk", "Asia/Yakutsk"), + ("Asia/Yangon", "Asia/Yangon"), + ("Asia/Yekaterinburg", "Asia/Yekaterinburg"), + ("Asia/Yerevan", "Asia/Yerevan"), + ("Atlantic/Azores", "Atlantic/Azores"), + ("Atlantic/Bermuda", "Atlantic/Bermuda"), + ("Atlantic/Canary", "Atlantic/Canary"), + ("Atlantic/Cape_Verde", "Atlantic/Cape_Verde"), + ("Atlantic/Faeroe", "Atlantic/Faeroe"), + ("Atlantic/Faroe", "Atlantic/Faroe"), + ("Atlantic/Jan_Mayen", "Atlantic/Jan_Mayen"), + ("Atlantic/Madeira", "Atlantic/Madeira"), + ("Atlantic/Reykjavik", "Atlantic/Reykjavik"), + ("Atlantic/South_Georgia", "Atlantic/South_Georgia"), + ("Atlantic/St_Helena", "Atlantic/St_Helena"), + ("Atlantic/Stanley", "Atlantic/Stanley"), + ("Australia/ACT", "Australia/ACT"), + ("Australia/Adelaide", "Australia/Adelaide"), + ("Australia/Brisbane", "Australia/Brisbane"), + ("Australia/Broken_Hill", "Australia/Broken_Hill"), + ("Australia/Canberra", "Australia/Canberra"), + ("Australia/Currie", "Australia/Currie"), + ("Australia/Darwin", "Australia/Darwin"), + ("Australia/Eucla", "Australia/Eucla"), + ("Australia/Hobart", "Australia/Hobart"), + ("Australia/LHI", "Australia/LHI"), + ("Australia/Lindeman", "Australia/Lindeman"), + ("Australia/Lord_Howe", "Australia/Lord_Howe"), + ("Australia/Melbourne", "Australia/Melbourne"), + ("Australia/NSW", "Australia/NSW"), + ("Australia/North", "Australia/North"), + ("Australia/Perth", "Australia/Perth"), + ("Australia/Queensland", "Australia/Queensland"), + ("Australia/South", "Australia/South"), + ("Australia/Sydney", "Australia/Sydney"), + ("Australia/Tasmania", "Australia/Tasmania"), + ("Australia/Victoria", "Australia/Victoria"), + ("Australia/West", "Australia/West"), + ("Australia/Yancowinna", "Australia/Yancowinna"), + ("Brazil/Acre", "Brazil/Acre"), + ("Brazil/DeNoronha", "Brazil/DeNoronha"), + ("Brazil/East", "Brazil/East"), + ("Brazil/West", "Brazil/West"), + ("CET", "CET"), + ("CST6CDT", "CST6CDT"), + ("Canada/Atlantic", "Canada/Atlantic"), + ("Canada/Central", "Canada/Central"), + ("Canada/Eastern", "Canada/Eastern"), + ("Canada/Mountain", "Canada/Mountain"), + ("Canada/Newfoundland", "Canada/Newfoundland"), + ("Canada/Pacific", "Canada/Pacific"), + ("Canada/Saskatchewan", "Canada/Saskatchewan"), + ("Canada/Yukon", "Canada/Yukon"), + ("Chile/Continental", "Chile/Continental"), + ("Chile/EasterIsland", "Chile/EasterIsland"), + ("Cuba", "Cuba"), + ("EET", "EET"), + ("EST", "EST"), + ("EST5EDT", "EST5EDT"), + ("Egypt", "Egypt"), + ("Eire", "Eire"), + ("Etc/GMT", "Etc/GMT"), + ("Etc/GMT+0", "Etc/GMT+0"), + ("Etc/GMT+1", "Etc/GMT+1"), + ("Etc/GMT+10", "Etc/GMT+10"), + ("Etc/GMT+11", "Etc/GMT+11"), + ("Etc/GMT+12", "Etc/GMT+12"), + ("Etc/GMT+2", "Etc/GMT+2"), + ("Etc/GMT+3", "Etc/GMT+3"), + ("Etc/GMT+4", "Etc/GMT+4"), + ("Etc/GMT+5", "Etc/GMT+5"), + ("Etc/GMT+6", "Etc/GMT+6"), + ("Etc/GMT+7", "Etc/GMT+7"), + ("Etc/GMT+8", "Etc/GMT+8"), + ("Etc/GMT+9", "Etc/GMT+9"), + ("Etc/GMT-0", "Etc/GMT-0"), + ("Etc/GMT-1", "Etc/GMT-1"), + ("Etc/GMT-10", "Etc/GMT-10"), + ("Etc/GMT-11", "Etc/GMT-11"), + ("Etc/GMT-12", "Etc/GMT-12"), + ("Etc/GMT-13", "Etc/GMT-13"), + ("Etc/GMT-14", "Etc/GMT-14"), + ("Etc/GMT-2", "Etc/GMT-2"), + ("Etc/GMT-3", "Etc/GMT-3"), + ("Etc/GMT-4", "Etc/GMT-4"), + ("Etc/GMT-5", "Etc/GMT-5"), + ("Etc/GMT-6", "Etc/GMT-6"), + ("Etc/GMT-7", "Etc/GMT-7"), + ("Etc/GMT-8", "Etc/GMT-8"), + ("Etc/GMT-9", "Etc/GMT-9"), + ("Etc/GMT0", "Etc/GMT0"), + ("Etc/Greenwich", "Etc/Greenwich"), + ("Etc/UCT", "Etc/UCT"), + ("Etc/UTC", "Etc/UTC"), + ("Etc/Universal", "Etc/Universal"), + ("Etc/Zulu", "Etc/Zulu"), + ("Europe/Amsterdam", "Europe/Amsterdam"), + ("Europe/Andorra", "Europe/Andorra"), + ("Europe/Astrakhan", "Europe/Astrakhan"), + ("Europe/Athens", "Europe/Athens"), + ("Europe/Belfast", "Europe/Belfast"), + ("Europe/Belgrade", "Europe/Belgrade"), + ("Europe/Berlin", "Europe/Berlin"), + ("Europe/Bratislava", "Europe/Bratislava"), + ("Europe/Brussels", "Europe/Brussels"), + ("Europe/Bucharest", "Europe/Bucharest"), + ("Europe/Budapest", "Europe/Budapest"), + ("Europe/Busingen", "Europe/Busingen"), + ("Europe/Chisinau", "Europe/Chisinau"), + ("Europe/Copenhagen", "Europe/Copenhagen"), + ("Europe/Dublin", "Europe/Dublin"), + ("Europe/Gibraltar", "Europe/Gibraltar"), + ("Europe/Guernsey", "Europe/Guernsey"), + ("Europe/Helsinki", "Europe/Helsinki"), + ("Europe/Isle_of_Man", "Europe/Isle_of_Man"), + ("Europe/Istanbul", "Europe/Istanbul"), + ("Europe/Jersey", "Europe/Jersey"), + ("Europe/Kaliningrad", "Europe/Kaliningrad"), + ("Europe/Kiev", "Europe/Kiev"), + ("Europe/Kirov", "Europe/Kirov"), + ("Europe/Kyiv", "Europe/Kyiv"), + ("Europe/Lisbon", "Europe/Lisbon"), + ("Europe/Ljubljana", "Europe/Ljubljana"), + ("Europe/London", "Europe/London"), + ("Europe/Luxembourg", "Europe/Luxembourg"), + ("Europe/Madrid", "Europe/Madrid"), + ("Europe/Malta", "Europe/Malta"), + ("Europe/Mariehamn", "Europe/Mariehamn"), + ("Europe/Minsk", "Europe/Minsk"), + ("Europe/Monaco", "Europe/Monaco"), + ("Europe/Moscow", "Europe/Moscow"), + ("Europe/Nicosia", "Europe/Nicosia"), + ("Europe/Oslo", "Europe/Oslo"), + ("Europe/Paris", "Europe/Paris"), + ("Europe/Podgorica", "Europe/Podgorica"), + ("Europe/Prague", "Europe/Prague"), + ("Europe/Riga", "Europe/Riga"), + ("Europe/Rome", "Europe/Rome"), + ("Europe/Samara", "Europe/Samara"), + ("Europe/San_Marino", "Europe/San_Marino"), + ("Europe/Sarajevo", "Europe/Sarajevo"), + ("Europe/Saratov", "Europe/Saratov"), + ("Europe/Simferopol", "Europe/Simferopol"), + ("Europe/Skopje", "Europe/Skopje"), + ("Europe/Sofia", "Europe/Sofia"), + ("Europe/Stockholm", "Europe/Stockholm"), + ("Europe/Tallinn", "Europe/Tallinn"), + ("Europe/Tirane", "Europe/Tirane"), + ("Europe/Tiraspol", "Europe/Tiraspol"), + ("Europe/Ulyanovsk", "Europe/Ulyanovsk"), + ("Europe/Uzhgorod", "Europe/Uzhgorod"), + ("Europe/Vaduz", "Europe/Vaduz"), + ("Europe/Vatican", "Europe/Vatican"), + ("Europe/Vienna", "Europe/Vienna"), + ("Europe/Vilnius", "Europe/Vilnius"), + ("Europe/Volgograd", "Europe/Volgograd"), + ("Europe/Warsaw", "Europe/Warsaw"), + ("Europe/Zagreb", "Europe/Zagreb"), + ("Europe/Zaporozhye", "Europe/Zaporozhye"), + ("Europe/Zurich", "Europe/Zurich"), + ("Factory", "Factory"), + ("GB", "GB"), + ("GB-Eire", "GB-Eire"), + ("GMT", "GMT"), + ("GMT+0", "GMT+0"), + ("GMT-0", "GMT-0"), + ("GMT0", "GMT0"), + ("Greenwich", "Greenwich"), + ("HST", "HST"), + ("Hongkong", "Hongkong"), + ("Iceland", "Iceland"), + ("Indian/Antananarivo", "Indian/Antananarivo"), + ("Indian/Chagos", "Indian/Chagos"), + ("Indian/Christmas", "Indian/Christmas"), + ("Indian/Cocos", "Indian/Cocos"), + ("Indian/Comoro", "Indian/Comoro"), + ("Indian/Kerguelen", "Indian/Kerguelen"), + ("Indian/Mahe", "Indian/Mahe"), + ("Indian/Maldives", "Indian/Maldives"), + ("Indian/Mauritius", "Indian/Mauritius"), + ("Indian/Mayotte", "Indian/Mayotte"), + ("Indian/Reunion", "Indian/Reunion"), + ("Iran", "Iran"), + ("Israel", "Israel"), + ("Jamaica", "Jamaica"), + ("Japan", "Japan"), + ("Kwajalein", "Kwajalein"), + ("Libya", "Libya"), + ("MET", "MET"), + ("MST", "MST"), + ("MST7MDT", "MST7MDT"), + ("Mexico/BajaNorte", "Mexico/BajaNorte"), + ("Mexico/BajaSur", "Mexico/BajaSur"), + ("Mexico/General", "Mexico/General"), + ("NZ", "NZ"), + ("NZ-CHAT", "NZ-CHAT"), + ("Navajo", "Navajo"), + ("PRC", "PRC"), + ("PST8PDT", "PST8PDT"), + ("Pacific/Apia", "Pacific/Apia"), + ("Pacific/Auckland", "Pacific/Auckland"), + ("Pacific/Bougainville", "Pacific/Bougainville"), + ("Pacific/Chatham", "Pacific/Chatham"), + ("Pacific/Chuuk", "Pacific/Chuuk"), + ("Pacific/Easter", "Pacific/Easter"), + ("Pacific/Efate", "Pacific/Efate"), + ("Pacific/Enderbury", "Pacific/Enderbury"), + ("Pacific/Fakaofo", "Pacific/Fakaofo"), + ("Pacific/Fiji", "Pacific/Fiji"), + ("Pacific/Funafuti", "Pacific/Funafuti"), + ("Pacific/Galapagos", "Pacific/Galapagos"), + ("Pacific/Gambier", "Pacific/Gambier"), + ("Pacific/Guadalcanal", "Pacific/Guadalcanal"), + ("Pacific/Guam", "Pacific/Guam"), + ("Pacific/Honolulu", "Pacific/Honolulu"), + ("Pacific/Johnston", "Pacific/Johnston"), + ("Pacific/Kanton", "Pacific/Kanton"), + ("Pacific/Kiritimati", "Pacific/Kiritimati"), + ("Pacific/Kosrae", "Pacific/Kosrae"), + ("Pacific/Kwajalein", "Pacific/Kwajalein"), + ("Pacific/Majuro", "Pacific/Majuro"), + ("Pacific/Marquesas", "Pacific/Marquesas"), + ("Pacific/Midway", "Pacific/Midway"), + ("Pacific/Nauru", "Pacific/Nauru"), + ("Pacific/Niue", "Pacific/Niue"), + ("Pacific/Norfolk", "Pacific/Norfolk"), + ("Pacific/Noumea", "Pacific/Noumea"), + ("Pacific/Pago_Pago", "Pacific/Pago_Pago"), + ("Pacific/Palau", "Pacific/Palau"), + ("Pacific/Pitcairn", "Pacific/Pitcairn"), + ("Pacific/Pohnpei", "Pacific/Pohnpei"), + ("Pacific/Ponape", "Pacific/Ponape"), + ("Pacific/Port_Moresby", "Pacific/Port_Moresby"), + ("Pacific/Rarotonga", "Pacific/Rarotonga"), + ("Pacific/Saipan", "Pacific/Saipan"), + ("Pacific/Samoa", "Pacific/Samoa"), + ("Pacific/Tahiti", "Pacific/Tahiti"), + ("Pacific/Tarawa", "Pacific/Tarawa"), + ("Pacific/Tongatapu", "Pacific/Tongatapu"), + ("Pacific/Truk", "Pacific/Truk"), + ("Pacific/Wake", "Pacific/Wake"), + ("Pacific/Wallis", "Pacific/Wallis"), + ("Pacific/Yap", "Pacific/Yap"), + ("Poland", "Poland"), + ("Portugal", "Portugal"), + ("ROC", "ROC"), + ("ROK", "ROK"), + ("Singapore", "Singapore"), + ("Turkey", "Turkey"), + ("UCT", "UCT"), + ("US/Alaska", "US/Alaska"), + ("US/Aleutian", "US/Aleutian"), + ("US/Arizona", "US/Arizona"), + ("US/Central", "US/Central"), + ("US/East-Indiana", "US/East-Indiana"), + ("US/Eastern", "US/Eastern"), + ("US/Hawaii", "US/Hawaii"), + ("US/Indiana-Starke", "US/Indiana-Starke"), + ("US/Michigan", "US/Michigan"), + ("US/Mountain", "US/Mountain"), + ("US/Pacific", "US/Pacific"), + ("US/Samoa", "US/Samoa"), + ("UTC", "UTC"), + ("Universal", "Universal"), + ("W-SU", "W-SU"), + ("WET", "WET"), + ("Zulu", "Zulu"), + ("localtime", "localtime"), + ], + default="UTC", + max_length=255, + ), + ), + ] diff --git a/bookwyrm/migrations/0200_auto_20240327_1914.py b/bookwyrm/migrations/0200_auto_20240327_1914.py new file mode 100644 index 000000000..38180b3f9 --- /dev/null +++ b/bookwyrm/migrations/0200_auto_20240327_1914.py @@ -0,0 +1,27 @@ +# Generated by Django 3.2.25 on 2024-03-27 19:14 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("bookwyrm", "0199_merge_20240326_1217"), + ] + + operations = [ + migrations.RemoveField( + model_name="addfiletotar", + name="childjob_ptr", + ), + migrations.RemoveField( + model_name="addfiletotar", + name="parent_export_job", + ), + migrations.DeleteModel( + name="AddBookToUserExportJob", + ), + migrations.DeleteModel( + name="AddFileToTar", + ), + ] diff --git a/bookwyrm/migrations/0200_status_bookwyrm_st_thread__cf064f_idx.py b/bookwyrm/migrations/0200_status_bookwyrm_st_thread__cf064f_idx.py new file mode 100644 index 000000000..daca654c7 --- /dev/null +++ b/bookwyrm/migrations/0200_status_bookwyrm_st_thread__cf064f_idx.py @@ -0,0 +1,19 @@ +# Generated by Django 3.2.25 on 2024-04-03 19:05 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("bookwyrm", "0199_status_bookwyrm_st_remote__06aeba_idx"), + ] + + operations = [ + migrations.AddIndex( + model_name="status", + index=models.Index( + fields=["thread_id"], name="bookwyrm_st_thread__cf064f_idx" + ), + ), + ] diff --git a/bookwyrm/migrations/0201_alter_hashtag_name_alter_user_localname.py b/bookwyrm/migrations/0201_alter_hashtag_name_alter_user_localname.py new file mode 100644 index 000000000..4fe41ec35 --- /dev/null +++ b/bookwyrm/migrations/0201_alter_hashtag_name_alter_user_localname.py @@ -0,0 +1,39 @@ +# Generated by Django 4.2.11 on 2024-04-01 21:09 + +import bookwyrm.models.fields +from django.db import migrations, models +from django.contrib.postgres.operations import CreateCollation + + +class Migration(migrations.Migration): + + dependencies = [ + ("bookwyrm", "0200_alter_user_preferred_timezone"), + ] + + operations = [ + CreateCollation( + "case_insensitive", + provider="icu", + locale="und-u-ks-level2", + deterministic=False, + ), + migrations.AlterField( + model_name="hashtag", + name="name", + field=bookwyrm.models.fields.CharField( + db_collation="case_insensitive", max_length=256 + ), + ), + migrations.AlterField( + model_name="user", + name="localname", + field=models.CharField( + db_collation="case_insensitive", + max_length=255, + null=True, + unique=True, + validators=[bookwyrm.models.fields.validate_localname], + ), + ), + ] diff --git a/bookwyrm/migrations/0201_keypair_bookwyrm_ke_remote__472927_idx.py b/bookwyrm/migrations/0201_keypair_bookwyrm_ke_remote__472927_idx.py new file mode 100644 index 000000000..e3d27a11b --- /dev/null +++ b/bookwyrm/migrations/0201_keypair_bookwyrm_ke_remote__472927_idx.py @@ -0,0 +1,19 @@ +# Generated by Django 3.2.25 on 2024-04-03 19:10 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("bookwyrm", "0200_status_bookwyrm_st_thread__cf064f_idx"), + ] + + operations = [ + migrations.AddIndex( + model_name="keypair", + index=models.Index( + fields=["remote_id"], name="bookwyrm_ke_remote__472927_idx" + ), + ), + ] diff --git a/bookwyrm/migrations/0202_user_bookwyrm_us_usernam_b2546d_idx.py b/bookwyrm/migrations/0202_user_bookwyrm_us_usernam_b2546d_idx.py new file mode 100644 index 000000000..d8666fe3f --- /dev/null +++ b/bookwyrm/migrations/0202_user_bookwyrm_us_usernam_b2546d_idx.py @@ -0,0 +1,19 @@ +# Generated by Django 3.2.25 on 2024-04-03 19:14 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("bookwyrm", "0201_keypair_bookwyrm_ke_remote__472927_idx"), + ] + + operations = [ + migrations.AddIndex( + model_name="user", + index=models.Index( + fields=["username"], name="bookwyrm_us_usernam_b2546d_idx" + ), + ), + ] diff --git a/bookwyrm/migrations/0203_user_bookwyrm_us_is_acti_972dc4_idx.py b/bookwyrm/migrations/0203_user_bookwyrm_us_is_acti_972dc4_idx.py new file mode 100644 index 000000000..b07f1c8a9 --- /dev/null +++ b/bookwyrm/migrations/0203_user_bookwyrm_us_is_acti_972dc4_idx.py @@ -0,0 +1,19 @@ +# Generated by Django 3.2.25 on 2024-04-03 19:22 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("bookwyrm", "0202_user_bookwyrm_us_usernam_b2546d_idx"), + ] + + operations = [ + migrations.AddIndex( + model_name="user", + index=models.Index( + fields=["is_active", "local"], name="bookwyrm_us_is_acti_972dc4_idx" + ), + ), + ] diff --git a/bookwyrm/migrations/0204_merge_20240409_1042.py b/bookwyrm/migrations/0204_merge_20240409_1042.py new file mode 100644 index 000000000..5656ac586 --- /dev/null +++ b/bookwyrm/migrations/0204_merge_20240409_1042.py @@ -0,0 +1,13 @@ +# Generated by Django 3.2.25 on 2024-04-09 10:42 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("bookwyrm", "0197_mergedauthor_mergedbook"), + ("bookwyrm", "0203_user_bookwyrm_us_is_acti_972dc4_idx"), + ] + + operations = [] diff --git a/bookwyrm/migrations/0205_merge_20240410_2022.py b/bookwyrm/migrations/0205_merge_20240410_2022.py new file mode 100644 index 000000000..294f48487 --- /dev/null +++ b/bookwyrm/migrations/0205_merge_20240410_2022.py @@ -0,0 +1,13 @@ +# Generated by Django 4.2.11 on 2024-04-10 20:22 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("bookwyrm", "0201_alter_hashtag_name_alter_user_localname"), + ("bookwyrm", "0204_merge_20240409_1042"), + ] + + operations = [] diff --git a/bookwyrm/migrations/0205_merge_20240413_0232.py b/bookwyrm/migrations/0205_merge_20240413_0232.py new file mode 100644 index 000000000..9cca29c45 --- /dev/null +++ b/bookwyrm/migrations/0205_merge_20240413_0232.py @@ -0,0 +1,13 @@ +# Generated by Django 3.2.25 on 2024-04-13 02:32 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("bookwyrm", "0200_auto_20240327_1914"), + ("bookwyrm", "0204_merge_20240409_1042"), + ] + + operations = [] diff --git a/bookwyrm/migrations/0206_merge_20240415_1537.py b/bookwyrm/migrations/0206_merge_20240415_1537.py new file mode 100644 index 000000000..454e69880 --- /dev/null +++ b/bookwyrm/migrations/0206_merge_20240415_1537.py @@ -0,0 +1,13 @@ +# Generated by Django 4.2.11 on 2024-04-15 15:37 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("bookwyrm", "0205_merge_20240410_2022"), + ("bookwyrm", "0205_merge_20240413_0232"), + ] + + operations = [] diff --git a/bookwyrm/models/__init__.py b/bookwyrm/models/__init__.py index e1c862f2b..6bb99c7f2 100644 --- a/bookwyrm/models/__init__.py +++ b/bookwyrm/models/__init__.py @@ -26,6 +26,8 @@ from .federated_server import FederatedServer from .group import Group, GroupMember, GroupMemberInvitation from .import_job import ImportJob, ImportItem +from .bookwyrm_import_job import BookwyrmImportJob +from .bookwyrm_export_job import BookwyrmExportJob from .move import MoveUser diff --git a/bookwyrm/models/activitypub_mixin.py b/bookwyrm/models/activitypub_mixin.py index 36317ad4e..06ef373e6 100644 --- a/bookwyrm/models/activitypub_mixin.py +++ b/bookwyrm/models/activitypub_mixin.py @@ -152,8 +152,9 @@ class ActivitypubMixin: # find anyone who's tagged in a status, for example mentions = self.recipients if hasattr(self, "recipients") else [] - # we always send activities to explicitly mentioned users' inboxes - recipients = [u.inbox for u in mentions or [] if not u.local] + # we always send activities to explicitly mentioned users (using shared inboxes + # where available to avoid duplicate submissions to a given instance) + recipients = {u.shared_inbox or u.inbox for u in mentions if not u.local} # unless it's a dm, all the followers should receive the activity if privacy != "direct": @@ -168,23 +169,23 @@ class ActivitypubMixin: # filter users first by whether they're using the desired software # this lets us send book updates only to other bw servers if software: - queryset = queryset.filter(bookwyrm_user=(software == "bookwyrm")) + queryset = queryset.filter(bookwyrm_user=software == "bookwyrm") # if there's a user, we only want to send to the user's followers if user: queryset = queryset.filter(following=user) - # ideally, we will send to shared inboxes for efficiency - shared_inboxes = ( - queryset.filter(shared_inbox__isnull=False) - .values_list("shared_inbox", flat=True) - .distinct() + # as above, we prefer shared inboxes if available + recipients.update( + queryset.filter(shared_inbox__isnull=False).values_list( + "shared_inbox", flat=True + ) ) - # but not everyone has a shared inbox - inboxes = queryset.filter(shared_inbox__isnull=True).values_list( - "inbox", flat=True + recipients.update( + queryset.filter(shared_inbox__isnull=True).values_list( + "inbox", flat=True + ) ) - recipients += list(shared_inboxes) + list(inboxes) - return list(set(recipients)) + return list(recipients) def to_activity_dataclass(self): """convert from a model to an activity""" @@ -205,14 +206,10 @@ class ObjectMixin(ActivitypubMixin): created: Optional[bool] = None, software: Any = None, priority: str = BROADCAST, + broadcast: bool = True, **kwargs: Any, ) -> None: """broadcast created/updated/deleted objects as appropriate""" - broadcast = kwargs.get("broadcast", True) - # this bonus kwarg would cause an error in the base save method - if "broadcast" in kwargs: - del kwargs["broadcast"] - created = created or not bool(self.id) # first off, we want to save normally no matter what super().save(*args, **kwargs) @@ -602,7 +599,7 @@ def to_ordered_collection_page( if activity_page.has_next(): next_page = f"{remote_id}?page={activity_page.next_page_number()}" if activity_page.has_previous(): - prev_page = f"{remote_id}?page=%d{activity_page.previous_page_number()}" + prev_page = f"{remote_id}?page={activity_page.previous_page_number()}" return activitypub.OrderedCollectionPage( id=f"{remote_id}?page={page}", partOf=remote_id, diff --git a/bookwyrm/models/author.py b/bookwyrm/models/author.py index 981e3c0cc..20c4e9e00 100644 --- a/bookwyrm/models/author.py +++ b/bookwyrm/models/author.py @@ -1,20 +1,25 @@ """ database schema for info about authors """ -import re -from typing import Tuple, Any -from django.contrib.postgres.indexes import GinIndex +import re +from typing import Any + from django.db import models +from django.contrib.postgres.indexes import GinIndex +import pgtrigger from bookwyrm import activitypub -from bookwyrm.settings import DOMAIN +from bookwyrm.settings import BASE_URL +from bookwyrm.utils.db import format_trigger -from .book import BookDataModel +from .book import BookDataModel, MergedAuthor from . import fields class Author(BookDataModel): """basic biographic info""" + merged_model = MergedAuthor + wikipedia_link = fields.CharField( max_length=255, blank=True, null=True, deduplication_field=True ) @@ -40,12 +45,12 @@ class Author(BookDataModel): ) bio = fields.HtmlField(null=True, blank=True) - def save(self, *args: Tuple[Any, ...], **kwargs: dict[str, Any]) -> None: + def save(self, *args: Any, **kwargs: Any) -> None: """normalize isni format""" - if self.isni: + if self.isni is not None: self.isni = re.sub(r"\s", "", self.isni) - return super().save(*args, **kwargs) + super().save(*args, **kwargs) @property def isni_link(self): @@ -65,11 +70,48 @@ class Author(BookDataModel): def get_remote_id(self): """editions and works both use "book" instead of model_name""" - return f"https://{DOMAIN}/author/{self.id}" - - activity_serializer = activitypub.Author + return f"{BASE_URL}/author/{self.id}" class Meta: - """sets up postgres GIN index field""" + """sets up indexes and triggers""" + + # pylint: disable=line-too-long indexes = (GinIndex(fields=["search_vector"]),) + triggers = [ + pgtrigger.Trigger( + name="update_search_vector_on_author_edit", + when=pgtrigger.Before, + operation=pgtrigger.Insert + | pgtrigger.UpdateOf("name", "aliases", "search_vector"), + func=format_trigger( + """new.search_vector := + -- author name, with priority A + setweight(to_tsvector('simple', new.name), 'A') || + -- author aliases, with priority B + setweight(to_tsvector('simple', coalesce(array_to_string(new.aliases, ' '), '')), 'B'); + RETURN new; + """ + ), + ), + pgtrigger.Trigger( + name="reset_book_search_vector_on_author_edit", + when=pgtrigger.After, + operation=pgtrigger.UpdateOf("name", "aliases"), + func=format_trigger( + """WITH updated_books AS ( + SELECT book_id + FROM bookwyrm_book_authors + WHERE author_id = new.id + ) + UPDATE bookwyrm_book + SET search_vector = '' + FROM updated_books + WHERE id = updated_books.book_id; + RETURN new; + """ + ), + ), + ] + + activity_serializer = activitypub.Author diff --git a/bookwyrm/models/base_model.py b/bookwyrm/models/base_model.py index 2d39e2a6f..ca13d9553 100644 --- a/bookwyrm/models/base_model.py +++ b/bookwyrm/models/base_model.py @@ -10,7 +10,7 @@ from django.http import Http404 from django.utils.translation import gettext_lazy as _ from django.utils.text import slugify -from bookwyrm.settings import DOMAIN +from bookwyrm.settings import BASE_URL from .fields import RemoteIdField @@ -38,7 +38,7 @@ class BookWyrmModel(models.Model): def get_remote_id(self): """generate the url that resolves to the local object, without a slug""" - base_path = f"https://{DOMAIN}" + base_path = BASE_URL if hasattr(self, "user"): base_path = f"{base_path}{self.user.local_path}" @@ -53,7 +53,7 @@ class BookWyrmModel(models.Model): @property def local_path(self): """how to link to this object in the local app, with a slug""" - local = self.get_remote_id().replace(f"https://{DOMAIN}", "") + local = self.get_remote_id().replace(BASE_URL, "") name = None if hasattr(self, "name_field"): diff --git a/bookwyrm/models/book.py b/bookwyrm/models/book.py index 6893b9da1..4ff377dbb 100644 --- a/bookwyrm/models/book.py +++ b/bookwyrm/models/book.py @@ -1,29 +1,33 @@ """ database schema for books and shelves """ + from itertools import chain import re -from typing import Any +from typing import Any, Dict, Optional, Iterable +from typing_extensions import Self from django.contrib.postgres.search import SearchVectorField from django.contrib.postgres.indexes import GinIndex from django.core.cache import cache from django.db import models, transaction -from django.db.models import Prefetch +from django.db.models import Prefetch, ManyToManyField from django.dispatch import receiver from django.utils.translation import gettext_lazy as _ from model_utils import FieldTracker from model_utils.managers import InheritanceManager from imagekit.models import ImageSpecField +import pgtrigger from bookwyrm import activitypub from bookwyrm.isbn.isbn import hyphenator_singleton as hyphenator from bookwyrm.preview_images import generate_edition_preview_image_task from bookwyrm.settings import ( - DOMAIN, + BASE_URL, DEFAULT_LANGUAGE, LANGUAGE_ARTICLES, ENABLE_PREVIEW_IMAGES, ENABLE_THUMBNAIL_GENERATION, ) +from bookwyrm.utils.db import format_trigger, add_update_fields from .activitypub_mixin import OrderedCollectionPageMixin, ObjectMixin from .base_model import BookWyrmModel @@ -92,24 +96,134 @@ class BookDataModel(ObjectMixin, BookWyrmModel): abstract = True - def save(self, *args: Any, **kwargs: Any) -> None: + def save( + self, *args: Any, update_fields: Optional[Iterable[str]] = None, **kwargs: Any + ) -> None: """ensure that the remote_id is within this instance""" if self.id: self.remote_id = self.get_remote_id() + update_fields = add_update_fields(update_fields, "remote_id") else: self.origin_id = self.remote_id self.remote_id = None - return super().save(*args, **kwargs) + update_fields = add_update_fields(update_fields, "origin_id", "remote_id") + + super().save(*args, update_fields=update_fields, **kwargs) # pylint: disable=arguments-differ def broadcast(self, activity, sender, software="bookwyrm", **kwargs): """only send book data updates to other bookwyrm instances""" super().broadcast(activity, sender, software=software, **kwargs) + def merge_into(self, canonical: Self, dry_run=False) -> Dict[str, Any]: + """merge this entity into another entity""" + if canonical.id == self.id: + raise ValueError(f"Cannot merge {self} into itself") + + absorbed_fields = canonical.absorb_data_from(self, dry_run=dry_run) + + if dry_run: + return absorbed_fields + + canonical.save() + + self.merged_model.objects.create(deleted_id=self.id, merged_into=canonical) + + # move related models to canonical + related_models = [ + (r.remote_field.name, r.related_model) for r in self._meta.related_objects + ] + # pylint: disable=protected-access + for related_field, related_model in related_models: + # Skip the ManyToMany fields that aren’t auto-created. These + # should have a corresponding OneToMany field in the model for + # the linking table anyway. If we update it through that model + # instead then we won’t lose the extra fields in the linking + # table. + # pylint: disable=protected-access + related_field_obj = related_model._meta.get_field(related_field) + if isinstance(related_field_obj, ManyToManyField): + through = related_field_obj.remote_field.through + if not through._meta.auto_created: + continue + related_objs = related_model.objects.filter(**{related_field: self}) + for related_obj in related_objs: + try: + setattr(related_obj, related_field, canonical) + related_obj.save() + except TypeError: + getattr(related_obj, related_field).add(canonical) + getattr(related_obj, related_field).remove(self) + + self.delete() + return absorbed_fields + + def absorb_data_from(self, other: Self, dry_run=False) -> Dict[str, Any]: + """fill empty fields with values from another entity""" + absorbed_fields = {} + for data_field in self._meta.get_fields(): + if not hasattr(data_field, "activitypub_field"): + continue + canonical_value = getattr(self, data_field.name) + other_value = getattr(other, data_field.name) + if not other_value: + continue + if isinstance(data_field, fields.ArrayField): + if new_values := list(set(other_value) - set(canonical_value)): + # append at the end (in no particular order) + if not dry_run: + setattr(self, data_field.name, canonical_value + new_values) + absorbed_fields[data_field.name] = new_values + elif isinstance(data_field, fields.PartialDateField): + if ( + (not canonical_value) + or (other_value.has_day and not canonical_value.has_day) + or (other_value.has_month and not canonical_value.has_month) + ): + if not dry_run: + setattr(self, data_field.name, other_value) + absorbed_fields[data_field.name] = other_value + else: + if not canonical_value: + if not dry_run: + setattr(self, data_field.name, other_value) + absorbed_fields[data_field.name] = other_value + return absorbed_fields + + +class MergedBookDataModel(models.Model): + """a BookDataModel instance that has been merged into another instance. kept + to be able to redirect old URLs""" + + deleted_id = models.IntegerField(primary_key=True) + + class Meta: + """abstract just like BookDataModel""" + + abstract = True + + +class MergedBook(MergedBookDataModel): + """an Book that has been merged into another one""" + + merged_into = models.ForeignKey( + "Book", on_delete=models.PROTECT, related_name="absorbed" + ) + + +class MergedAuthor(MergedBookDataModel): + """an Author that has been merged into another one""" + + merged_into = models.ForeignKey( + "Author", on_delete=models.PROTECT, related_name="absorbed" + ) + class Book(BookDataModel): """a generic book, which can mean either an edition or a work""" + merged_model = MergedBook + connector = models.ForeignKey("Connector", on_delete=models.PROTECT, null=True) # book/work metadata @@ -190,9 +304,13 @@ class Book(BookDataModel): """properties of this edition, as a string""" items = [ self.physical_format if hasattr(self, "physical_format") else None, - f"{self.languages[0]} language" - if self.languages and self.languages[0] and self.languages[0] != "English" - else None, + ( + f"{self.languages[0]} language" + if self.languages + and self.languages[0] + and self.languages[0] != "English" + else None + ), str(self.published_date.year) if self.published_date else None, ", ".join(self.publishers) if hasattr(self, "publishers") else None, ] @@ -210,11 +328,11 @@ class Book(BookDataModel): if not isinstance(self, (Edition, Work)): raise ValueError("Books should be added as Editions or Works") - return super().save(*args, **kwargs) + super().save(*args, **kwargs) def get_remote_id(self): """editions and works both use "book" instead of model_name""" - return f"https://{DOMAIN}/book/{self.id}" + return f"{BASE_URL}/book/{self.id}" def guess_sort_title(self): """Get a best-guess sort title for the current book""" @@ -232,9 +350,49 @@ class Book(BookDataModel): ) class Meta: - """sets up postgres GIN index field""" + """set up indexes and triggers""" + + # pylint: disable=line-too-long indexes = (GinIndex(fields=["search_vector"]),) + triggers = [ + pgtrigger.Trigger( + name="update_search_vector_on_book_edit", + when=pgtrigger.Before, + operation=pgtrigger.Insert + | pgtrigger.UpdateOf("title", "subtitle", "series", "search_vector"), + func=format_trigger( + """ + WITH author_names AS ( + SELECT array_to_string(bookwyrm_author.name || bookwyrm_author.aliases, ' ') AS name_and_aliases + FROM bookwyrm_author + LEFT JOIN bookwyrm_book_authors + ON bookwyrm_author.id = bookwyrm_book_authors.author_id + WHERE bookwyrm_book_authors.book_id = new.id + ) + SELECT + -- title, with priority A (parse in English, default to simple if empty) + setweight(COALESCE(nullif( + to_tsvector('english', new.title), ''), + to_tsvector('simple', new.title)), 'A') || + + -- subtitle, with priority B (always in English?) + setweight(to_tsvector('english', COALESCE(new.subtitle, '')), 'B') || + + -- list of authors names and aliases (with priority C) + (SELECT setweight(to_tsvector('simple', COALESCE(array_to_string(ARRAY_AGG(name_and_aliases), ' '), '')), 'C') + FROM author_names + ) || + + --- last: series name, with lowest priority + setweight(to_tsvector('english', COALESCE(new.series, '')), 'D') + + INTO new.search_vector; + RETURN new; + """ + ), + ) + ] class Work(OrderedCollectionPageMixin, Book): @@ -247,10 +405,11 @@ class Work(OrderedCollectionPageMixin, Book): def save(self, *args, **kwargs): """set some fields on the edition object""" + super().save(*args, **kwargs) + # set rank for edition in self.editions.all(): edition.save() - return super().save(*args, **kwargs) @property def default_edition(self): @@ -356,33 +515,48 @@ class Edition(Book): # max rank is 9 return rank - def save(self, *args: Any, **kwargs: Any) -> None: + def save( + self, *args: Any, update_fields: Optional[Iterable[str]] = None, **kwargs: Any + ) -> None: """set some fields on the edition object""" # calculate isbn 10/13 - if self.isbn_13 and self.isbn_13[:3] == "978" and not self.isbn_10: + if ( + self.isbn_10 is None + and self.isbn_13 is not None + and self.isbn_13[:3] == "978" + ): self.isbn_10 = isbn_13_to_10(self.isbn_13) - if self.isbn_10 and not self.isbn_13: + update_fields = add_update_fields(update_fields, "isbn_10") + if self.isbn_13 is None and self.isbn_10 is not None: self.isbn_13 = isbn_10_to_13(self.isbn_10) + update_fields = add_update_fields(update_fields, "isbn_13") # normalize isbn format - if self.isbn_10: + if self.isbn_10 is not None: self.isbn_10 = normalize_isbn(self.isbn_10) - if self.isbn_13: + if self.isbn_13 is not None: self.isbn_13 = normalize_isbn(self.isbn_13) # set rank - self.edition_rank = self.get_rank() - - # clear author cache - if self.id: - for author_id in self.authors.values_list("id", flat=True): - cache.delete(f"author-books-{author_id}") + if (new := self.get_rank()) != self.edition_rank: + self.edition_rank = new + update_fields = add_update_fields(update_fields, "edition_rank") # Create sort title by removing articles from title if self.sort_title in [None, ""]: self.sort_title = self.guess_sort_title() + update_fields = add_update_fields(update_fields, "sort_title") - return super().save(*args, **kwargs) + super().save(*args, update_fields=update_fields, **kwargs) + + # clear author cache + if self.id: + cache.delete_many( + [ + f"author-books-{author_id}" + for author_id in self.authors.values_list("id", flat=True) + ] + ) @transaction.atomic def repair(self): diff --git a/bookwyrm/models/bookwyrm_export_job.py b/bookwyrm/models/bookwyrm_export_job.py new file mode 100644 index 000000000..870910c00 --- /dev/null +++ b/bookwyrm/models/bookwyrm_export_job.py @@ -0,0 +1,341 @@ +"""Export user account to tar.gz file for import into another Bookwyrm instance""" + +import logging +import os + +from boto3.session import Session as BotoSession +from s3_tar import S3Tar + +from django.db.models import BooleanField, FileField, JSONField +from django.core.serializers.json import DjangoJSONEncoder +from django.core.files.base import ContentFile +from django.core.files.storage import storages + +from bookwyrm import settings + +from bookwyrm.models import AnnualGoal, ReadThrough, ShelfBook, ListItem +from bookwyrm.models import Review, Comment, Quotation +from bookwyrm.models import Edition +from bookwyrm.models import UserFollows, User, UserBlocks +from bookwyrm.models.job import ParentJob +from bookwyrm.tasks import app, IMPORTS +from bookwyrm.utils.tar import BookwyrmTarFile + +logger = logging.getLogger(__name__) + + +class BookwyrmAwsSession(BotoSession): + """a boto session that always uses settings.AWS_S3_ENDPOINT_URL""" + + def client(self, *args, **kwargs): # pylint: disable=arguments-differ + kwargs["endpoint_url"] = settings.AWS_S3_ENDPOINT_URL + return super().client("s3", *args, **kwargs) + + +def select_exports_storage(): + """callable to allow for dependency on runtime configuration""" + return storages["exports"] + + +class BookwyrmExportJob(ParentJob): + """entry for a specific request to export a bookwyrm user""" + + export_data = FileField(null=True, storage=select_exports_storage) + export_json = JSONField(null=True, encoder=DjangoJSONEncoder) + json_completed = BooleanField(default=False) + + def start_job(self): + """schedule the first task""" + + task = create_export_json_task.delay(job_id=self.id) + self.task_id = task.id + self.save(update_fields=["task_id"]) + + +@app.task(queue=IMPORTS) +def create_export_json_task(job_id): + """create the JSON data for the export""" + + job = BookwyrmExportJob.objects.get(id=job_id) + + # don't start the job if it was stopped from the UI + if job.complete: + return + + try: + job.set_status("active") + + # generate JSON structure + job.export_json = export_json(job.user) + job.save(update_fields=["export_json"]) + + # create archive in separate task + create_archive_task.delay(job_id=job.id) + except Exception as err: # pylint: disable=broad-except + logger.exception( + "create_export_json_task for %s failed with error: %s", job, err + ) + job.set_status("failed") + + +def archive_file_location(file, directory="") -> str: + """get the relative location of a file inside the archive""" + return os.path.join(directory, file.name) + + +def add_file_to_s3_tar(s3_tar: S3Tar, storage, file, directory=""): + """ + add file to S3Tar inside directory, keeping any directories under its + storage location + """ + s3_tar.add_file( + os.path.join(storage.location, file.name), + folder=os.path.dirname(archive_file_location(file, directory=directory)), + ) + + +@app.task(queue=IMPORTS) +def create_archive_task(job_id): + """create the archive containing the JSON file and additional files""" + + job = BookwyrmExportJob.objects.get(id=job_id) + + # don't start the job if it was stopped from the UI + if job.complete: + return + + try: + export_task_id = str(job.task_id) + archive_filename = f"{export_task_id}.tar.gz" + export_json_bytes = DjangoJSONEncoder().encode(job.export_json).encode("utf-8") + + user = job.user + editions = get_books_for_user(user) + + if settings.USE_S3: + # Storage for writing temporary files + exports_storage = storages["exports"] + + # Handle for creating the final archive + s3_tar = S3Tar( + exports_storage.bucket_name, + os.path.join(exports_storage.location, archive_filename), + session=BookwyrmAwsSession(), + ) + + # Save JSON file to a temporary location + export_json_tmp_file = os.path.join(export_task_id, "archive.json") + exports_storage.save( + export_json_tmp_file, + ContentFile(export_json_bytes), + ) + s3_tar.add_file( + os.path.join(exports_storage.location, export_json_tmp_file) + ) + + # Add images to TAR + images_storage = storages["default"] + + if user.avatar: + add_file_to_s3_tar(s3_tar, images_storage, user.avatar) + + for edition in editions: + if edition.cover: + add_file_to_s3_tar( + s3_tar, images_storage, edition.cover, directory="images" + ) + + # Create archive and store file name + s3_tar.tar() + job.export_data = archive_filename + job.save(update_fields=["export_data"]) + + # Delete temporary files + exports_storage.delete(export_json_tmp_file) + + else: + job.export_data = archive_filename + with job.export_data.open("wb") as tar_file: + with BookwyrmTarFile.open(mode="w:gz", fileobj=tar_file) as tar: + # save json file + tar.write_bytes(export_json_bytes) + + # Add avatar image if present + if user.avatar: + tar.add_image(user.avatar) + + for edition in editions: + if edition.cover: + tar.add_image(edition.cover, directory="images") + job.save(update_fields=["export_data"]) + + job.set_status("completed") + + except Exception as err: # pylint: disable=broad-except + logger.exception("create_archive_task for %s failed with error: %s", job, err) + job.set_status("failed") + + +def export_json(user: User): + """create export JSON""" + data = export_user(user) # in the root of the JSON structure + data["settings"] = export_settings(user) + data["goals"] = export_goals(user) + data["books"] = export_books(user) + data["saved_lists"] = export_saved_lists(user) + data["follows"] = export_follows(user) + data["blocks"] = export_blocks(user) + return data + + +def export_user(user: User): + """export user data""" + data = user.to_activity() + if user.avatar: + data["icon"]["url"] = archive_file_location(user.avatar) + else: + data["icon"] = {} + return data + + +def export_settings(user: User): + """Additional settings - can't be serialized as AP""" + vals = [ + "show_goal", + "preferred_timezone", + "default_post_privacy", + "show_suggested_users", + ] + return {k: getattr(user, k) for k in vals} + + +def export_saved_lists(user: User): + """add user saved lists to export JSON""" + return [l.remote_id for l in user.saved_lists.all()] + + +def export_follows(user: User): + """add user follows to export JSON""" + follows = UserFollows.objects.filter(user_subject=user).distinct() + following = User.objects.filter(userfollows_user_object__in=follows).distinct() + return [f.remote_id for f in following] + + +def export_blocks(user: User): + """add user blocks to export JSON""" + blocks = UserBlocks.objects.filter(user_subject=user).distinct() + blocking = User.objects.filter(userblocks_user_object__in=blocks).distinct() + return [b.remote_id for b in blocking] + + +def export_goals(user: User): + """add user reading goals to export JSON""" + reading_goals = AnnualGoal.objects.filter(user=user).distinct() + return [ + {"goal": goal.goal, "year": goal.year, "privacy": goal.privacy} + for goal in reading_goals + ] + + +def export_books(user: User): + """add books to export JSON""" + editions = get_books_for_user(user) + return [export_book(user, edition) for edition in editions] + + +def export_book(user: User, edition: Edition): + """add book to export JSON""" + data = {} + data["work"] = edition.parent_work.to_activity() + data["edition"] = edition.to_activity() + + if edition.cover: + data["edition"]["cover"]["url"] = archive_file_location( + edition.cover, directory="images" + ) + + # authors + data["authors"] = [author.to_activity() for author in edition.authors.all()] + + # Shelves this book is on + # Every ShelfItem is this book so we don't other serializing + shelf_books = ( + ShelfBook.objects.select_related("shelf") + .filter(user=user, book=edition) + .distinct() + ) + data["shelves"] = [shelfbook.shelf.to_activity() for shelfbook in shelf_books] + + # Lists and ListItems + # ListItems include "notes" and "approved" so we need them + # even though we know it's this book + list_items = ListItem.objects.filter(book=edition, user=user).distinct() + + data["lists"] = [] + for item in list_items: + list_info = item.book_list.to_activity() + list_info[ + "privacy" + ] = item.book_list.privacy # this isn't serialized so we add it + list_info["list_item"] = item.to_activity() + data["lists"].append(list_info) + + # Statuses + # Can't use select_subclasses here because + # we need to filter on the "book" value, + # which is not available on an ordinary Status + for status in ["comments", "quotations", "reviews"]: + data[status] = [] + + comments = Comment.objects.filter(user=user, book=edition).all() + for status in comments: + obj = status.to_activity() + obj["progress"] = status.progress + obj["progress_mode"] = status.progress_mode + data["comments"].append(obj) + + quotes = Quotation.objects.filter(user=user, book=edition).all() + for status in quotes: + obj = status.to_activity() + obj["position"] = status.position + obj["endposition"] = status.endposition + obj["position_mode"] = status.position_mode + data["quotations"].append(obj) + + reviews = Review.objects.filter(user=user, book=edition).all() + data["reviews"] = [status.to_activity() for status in reviews] + + # readthroughs can't be serialized to activity + book_readthroughs = ( + ReadThrough.objects.filter(user=user, book=edition).distinct().values() + ) + data["readthroughs"] = list(book_readthroughs) + return data + + +def get_books_for_user(user): + """ + Get all the books and editions related to a user. + + We use union() instead of Q objects because it creates + multiple simple queries in stead of a much more complex DB query + that can time out. + + """ + + shelf_eds = Edition.objects.select_related("parent_work").filter(shelves__user=user) + rt_eds = Edition.objects.select_related("parent_work").filter( + readthrough__user=user + ) + review_eds = Edition.objects.select_related("parent_work").filter(review__user=user) + list_eds = Edition.objects.select_related("parent_work").filter(list__user=user) + comment_eds = Edition.objects.select_related("parent_work").filter( + comment__user=user + ) + quote_eds = Edition.objects.select_related("parent_work").filter( + quotation__user=user + ) + + editions = shelf_eds.union(rt_eds, review_eds, list_eds, comment_eds, quote_eds) + + return editions diff --git a/bookwyrm/models/bookwyrm_import_job.py b/bookwyrm/models/bookwyrm_import_job.py new file mode 100644 index 000000000..5229430eb --- /dev/null +++ b/bookwyrm/models/bookwyrm_import_job.py @@ -0,0 +1,462 @@ +"""Import a user from another Bookwyrm instance""" + +import json +import logging + +from django.db.models import FileField, JSONField, CharField +from django.utils import timezone +from django.utils.html import strip_tags +from django.contrib.postgres.fields import ArrayField as DjangoArrayField + +from bookwyrm import activitypub +from bookwyrm import models +from bookwyrm.tasks import app, IMPORTS +from bookwyrm.models.job import ParentJob, ParentTask, SubTask +from bookwyrm.utils.tar import BookwyrmTarFile + +logger = logging.getLogger(__name__) + + +class BookwyrmImportJob(ParentJob): + """entry for a specific request for importing a bookwyrm user backup""" + + archive_file = FileField(null=True, blank=True) + import_data = JSONField(null=True) + required = DjangoArrayField(CharField(max_length=50, blank=True), blank=True) + + def start_job(self): + """Start the job""" + start_import_task.delay(job_id=self.id, no_children=True) + + +@app.task(queue=IMPORTS, base=ParentTask) +def start_import_task(**kwargs): + """trigger the child import tasks for each user data""" + job = BookwyrmImportJob.objects.get(id=kwargs["job_id"]) + archive_file = job.archive_file + + # don't start the job if it was stopped from the UI + if job.complete: + return + + try: + archive_file.open("rb") + with BookwyrmTarFile.open(mode="r:gz", fileobj=archive_file) as tar: + json_filename = next( + filter(lambda n: n.startswith("archive"), tar.getnames()) + ) + job.import_data = json.loads(tar.read(json_filename).decode("utf-8")) + + if "include_user_profile" in job.required: + update_user_profile(job.user, tar, job.import_data) + if "include_user_settings" in job.required: + update_user_settings(job.user, job.import_data) + if "include_goals" in job.required: + update_goals(job.user, job.import_data.get("goals", [])) + if "include_saved_lists" in job.required: + upsert_saved_lists(job.user, job.import_data.get("saved_lists", [])) + if "include_follows" in job.required: + upsert_follows(job.user, job.import_data.get("follows", [])) + if "include_blocks" in job.required: + upsert_user_blocks(job.user, job.import_data.get("blocks", [])) + + process_books(job, tar) + + job.set_status("complete") + archive_file.close() + + except Exception as err: # pylint: disable=broad-except + logger.exception("User Import Job %s Failed with error: %s", job.id, err) + job.set_status("failed") + + +def process_books(job, tar): + """ + Process user import data related to books + We always import the books even if not assigning + them to shelves, lists etc + """ + + books = job.import_data.get("books") + + for data in books: + book = get_or_create_edition(data, tar) + + if "include_shelves" in job.required: + upsert_shelves(book, job.user, data) + + if "include_readthroughs" in job.required: + upsert_readthroughs(data.get("readthroughs"), job.user, book.id) + + if "include_comments" in job.required: + upsert_statuses( + job.user, models.Comment, data.get("comments"), book.remote_id + ) + if "include_quotations" in job.required: + upsert_statuses( + job.user, models.Quotation, data.get("quotations"), book.remote_id + ) + + if "include_reviews" in job.required: + upsert_statuses( + job.user, models.Review, data.get("reviews"), book.remote_id + ) + + if "include_lists" in job.required: + upsert_lists(job.user, data.get("lists"), book.id) + + +def get_or_create_edition(book_data, tar): + """Take a JSON string of work and edition data, + find or create the edition and work in the database and + return an edition instance""" + + edition = book_data.get("edition") + existing = models.Edition.find_existing(edition) + if existing: + return existing + + # make sure we have the authors in the local DB + # replace the old author ids in the edition JSON + edition["authors"] = [] + for author in book_data.get("authors"): + parsed_author = activitypub.parse(author) + instance = parsed_author.to_model( + model=models.Author, save=True, overwrite=True + ) + + edition["authors"].append(instance.remote_id) + + # we will add the cover later from the tar + # don't try to load it from the old server + cover = edition.get("cover", {}) + cover_path = cover.get("url", None) + edition["cover"] = {} + + # first we need the parent work to exist + work = book_data.get("work") + work["editions"] = [] + parsed_work = activitypub.parse(work) + work_instance = parsed_work.to_model(model=models.Work, save=True, overwrite=True) + + # now we have a work we can add it to the edition + # and create the edition model instance + edition["work"] = work_instance.remote_id + parsed_edition = activitypub.parse(edition) + book = parsed_edition.to_model(model=models.Edition, save=True, overwrite=True) + + # set the cover image from the tar + if cover_path: + tar.write_image_to_file(cover_path, book.cover) + + return book + + +def upsert_readthroughs(data, user, book_id): + """Take a JSON string of readthroughs and + find or create the instances in the database""" + + for read_through in data: + + obj = {} + keys = [ + "progress_mode", + "start_date", + "finish_date", + "stopped_date", + "is_active", + ] + for key in keys: + obj[key] = read_through[key] + obj["user_id"] = user.id + obj["book_id"] = book_id + + existing = models.ReadThrough.objects.filter(**obj).first() + if not existing: + models.ReadThrough.objects.create(**obj) + + +def upsert_statuses(user, cls, data, book_remote_id): + """Take a JSON string of a status and + find or create the instances in the database""" + + for status in data: + if is_alias( + user, status["attributedTo"] + ): # don't let l33t hax0rs steal other people's posts + # update ids and remove replies + status["attributedTo"] = user.remote_id + status["to"] = update_followers_address(user, status["to"]) + status["cc"] = update_followers_address(user, status["cc"]) + status[ + "replies" + ] = ( + {} + ) # this parses incorrectly but we can't set it without knowing the new id + status["inReplyToBook"] = book_remote_id + parsed = activitypub.parse(status) + if not status_already_exists( + user, parsed + ): # don't duplicate posts on multiple import + + instance = parsed.to_model(model=cls, save=True, overwrite=True) + + for val in [ + "progress", + "progress_mode", + "position", + "endposition", + "position_mode", + ]: + if status.get(val): + instance.val = status[val] + + instance.remote_id = instance.get_remote_id() # update the remote_id + instance.save() # save and broadcast + + else: + logger.warning("User does not have permission to import statuses") + + +def upsert_lists(user, lists, book_id): + """Take a list of objects each containing + a list and list item as AP objects + + Because we are creating new IDs we can't assume the id + will exist or be accurate, so we only use to_model for + adding new items after checking whether they exist . + + """ + + book = models.Edition.objects.get(id=book_id) + + for blist in lists: + booklist = models.List.objects.filter(name=blist["name"], user=user).first() + if not booklist: + + blist["owner"] = user.remote_id + parsed = activitypub.parse(blist) + booklist = parsed.to_model(model=models.List, save=True, overwrite=True) + + booklist.privacy = blist["privacy"] + booklist.save() + + item = models.ListItem.objects.filter(book=book, book_list=booklist).exists() + if not item: + count = booklist.books.count() + models.ListItem.objects.create( + book=book, + book_list=booklist, + user=user, + notes=blist["list_item"]["notes"], + approved=blist["list_item"]["approved"], + order=count + 1, + ) + + +def upsert_shelves(book, user, book_data): + """Take shelf JSON objects and create + DB entries if they don't already exist""" + + shelves = book_data["shelves"] + for shelf in shelves: + + book_shelf = models.Shelf.objects.filter(name=shelf["name"], user=user).first() + + if not book_shelf: + book_shelf = models.Shelf.objects.create(name=shelf["name"], user=user) + + # add the book as a ShelfBook if needed + if not models.ShelfBook.objects.filter( + book=book, shelf=book_shelf, user=user + ).exists(): + models.ShelfBook.objects.create( + book=book, shelf=book_shelf, user=user, shelved_date=timezone.now() + ) + + +def update_user_profile(user, tar, data): + """update the user's profile from import data""" + name = data.get("name", None) + username = data.get("preferredUsername") + user.name = name if name else username + user.summary = strip_tags(data.get("summary", None)) + user.save(update_fields=["name", "summary"]) + if data["icon"].get("url"): + avatar_filename = next(filter(lambda n: n.startswith("avatar"), tar.getnames())) + tar.write_image_to_file(avatar_filename, user.avatar) + + +def update_user_settings(user, data): + """update the user's settings from import data""" + + update_fields = ["manually_approves_followers", "hide_follows", "discoverable"] + + ap_fields = [ + ("manuallyApprovesFollowers", "manually_approves_followers"), + ("hideFollows", "hide_follows"), + ("discoverable", "discoverable"), + ] + + for (ap_field, bw_field) in ap_fields: + setattr(user, bw_field, data[ap_field]) + + bw_fields = [ + "show_goal", + "show_suggested_users", + "default_post_privacy", + "preferred_timezone", + ] + + for field in bw_fields: + update_fields.append(field) + setattr(user, field, data["settings"][field]) + + user.save(update_fields=update_fields) + + +@app.task(queue=IMPORTS, base=SubTask) +def update_user_settings_task(job_id): + """wrapper task for user's settings import""" + parent_job = BookwyrmImportJob.objects.get(id=job_id) + + return update_user_settings(parent_job.user, parent_job.import_data.get("user")) + + +def update_goals(user, data): + """update the user's goals from import data""" + + for goal in data: + # edit the existing goal if there is one + existing = models.AnnualGoal.objects.filter( + year=goal["year"], user=user + ).first() + if existing: + for k in goal.keys(): + setattr(existing, k, goal[k]) + existing.save() + else: + goal["user"] = user + models.AnnualGoal.objects.create(**goal) + + +@app.task(queue=IMPORTS, base=SubTask) +def update_goals_task(job_id): + """wrapper task for user's goals import""" + parent_job = BookwyrmImportJob.objects.get(id=job_id) + + return update_goals(parent_job.user, parent_job.import_data.get("goals")) + + +def upsert_saved_lists(user, values): + """Take a list of remote ids and add as saved lists""" + + for remote_id in values: + book_list = activitypub.resolve_remote_id(remote_id, models.List) + if book_list: + user.saved_lists.add(book_list) + + +@app.task(queue=IMPORTS, base=SubTask) +def upsert_saved_lists_task(job_id): + """wrapper task for user's saved lists import""" + parent_job = BookwyrmImportJob.objects.get(id=job_id) + + return upsert_saved_lists( + parent_job.user, parent_job.import_data.get("saved_lists") + ) + + +def upsert_follows(user, values): + """Take a list of remote ids and add as follows""" + + for remote_id in values: + followee = activitypub.resolve_remote_id(remote_id, models.User) + if followee: + (follow_request, created,) = models.UserFollowRequest.objects.get_or_create( + user_subject=user, + user_object=followee, + ) + + if not created: + # this request probably failed to connect with the remote + # and should save to trigger a re-broadcast + follow_request.save() + + +@app.task(queue=IMPORTS, base=SubTask) +def upsert_follows_task(job_id): + """wrapper task for user's follows import""" + parent_job = BookwyrmImportJob.objects.get(id=job_id) + + return upsert_follows(parent_job.user, parent_job.import_data.get("follows")) + + +def upsert_user_blocks(user, user_ids): + """block users""" + + for user_id in user_ids: + user_object = activitypub.resolve_remote_id(user_id, models.User) + if user_object: + exists = models.UserBlocks.objects.filter( + user_subject=user, user_object=user_object + ).exists() + if not exists: + models.UserBlocks.objects.create( + user_subject=user, user_object=user_object + ) + # remove the blocked users's lists from the groups + models.List.remove_from_group(user, user_object) + # remove the blocked user from all blocker's owned groups + models.GroupMember.remove(user, user_object) + + +@app.task(queue=IMPORTS, base=SubTask) +def upsert_user_blocks_task(job_id): + """wrapper task for user's blocks import""" + parent_job = BookwyrmImportJob.objects.get(id=job_id) + + return upsert_user_blocks( + parent_job.user, parent_job.import_data.get("blocked_users") + ) + + +def update_followers_address(user, field): + """statuses to or cc followers need to have the followers + address updated to the new local user""" + + for i, audience in enumerate(field): + if audience.rsplit("/")[-1] == "followers": + field[i] = user.followers_url + + return field + + +def is_alias(user, remote_id): + """check that the user is listed as movedTo or also_known_as + in the remote user's profile""" + + remote_user = activitypub.resolve_remote_id( + remote_id=remote_id, model=models.User, save=False + ) + + if remote_user: + + if remote_user.moved_to: + return user.remote_id == remote_user.moved_to + + if remote_user.also_known_as: + return user in remote_user.also_known_as.all() + + return False + + +def status_already_exists(user, status): + """check whether this status has already been published + by this user. We can't rely on to_model() because it + only matches on remote_id, which we have to change + *after* saving because it needs the primary key (id)""" + + return models.Status.objects.filter( + user=user, content=status.content, published_date=status.published + ).exists() diff --git a/bookwyrm/models/connector.py b/bookwyrm/models/connector.py index 99e73ab37..f4b5be04c 100644 --- a/bookwyrm/models/connector.py +++ b/bookwyrm/models/connector.py @@ -11,7 +11,7 @@ ConnectorFiles = models.TextChoices("ConnectorFiles", CONNECTORS) class Connector(BookWyrmModel): """book data source connectors""" - identifier = models.CharField(max_length=255, unique=True) + identifier = models.CharField(max_length=255, unique=True) # domain priority = models.IntegerField(default=2) name = models.CharField(max_length=255, null=True, blank=True) connector_file = models.CharField(max_length=255, choices=ConnectorFiles.choices) diff --git a/bookwyrm/models/federated_server.py b/bookwyrm/models/federated_server.py index e1081ed45..5e08fc11d 100644 --- a/bookwyrm/models/federated_server.py +++ b/bookwyrm/models/federated_server.py @@ -16,7 +16,7 @@ FederationStatus = [ class FederatedServer(BookWyrmModel): """store which servers we federate with""" - server_name = models.CharField(max_length=255, unique=True) + server_name = models.CharField(max_length=255, unique=True) # domain status = models.CharField( max_length=255, default="federated", choices=FederationStatus ) @@ -64,5 +64,4 @@ class FederatedServer(BookWyrmModel): def is_blocked(cls, url: str) -> bool: """look up if a domain is blocked""" url = urlparse(url) - domain = url.netloc - return cls.objects.filter(server_name=domain, status="blocked").exists() + return cls.objects.filter(server_name=url.hostname, status="blocked").exists() diff --git a/bookwyrm/models/fields.py b/bookwyrm/models/fields.py index 4bd580705..6643bdc19 100644 --- a/bookwyrm/models/fields.py +++ b/bookwyrm/models/fields.py @@ -260,12 +260,12 @@ class PrivacyField(ActivitypubFieldMixin, models.CharField): if to == [self.public]: setattr(instance, self.name, "public") + elif self.public in cc: + setattr(instance, self.name, "unlisted") elif to == [user.followers_url]: setattr(instance, self.name, "followers") elif cc == []: setattr(instance, self.name, "direct") - elif self.public in cc: - setattr(instance, self.name, "unlisted") else: setattr(instance, self.name, "followers") return original == getattr(instance, self.name) @@ -482,7 +482,7 @@ class ImageField(ActivitypubFieldMixin, models.ImageField): if not url: return None - return activitypub.Document(url=url, name=alt) + return activitypub.Image(url=url, name=alt) def field_from_activity(self, value, allow_external_connections=True): image_slug = value diff --git a/bookwyrm/models/group.py b/bookwyrm/models/group.py index d02b56ab1..40a32b5dc 100644 --- a/bookwyrm/models/group.py +++ b/bookwyrm/models/group.py @@ -1,7 +1,7 @@ """ do book related things with other users """ from django.db import models, IntegrityError, transaction from django.db.models import Q -from bookwyrm.settings import DOMAIN +from bookwyrm.settings import BASE_URL from .base_model import BookWyrmModel from . import fields from .relationship import UserBlocks @@ -17,7 +17,7 @@ class Group(BookWyrmModel): def get_remote_id(self): """don't want the user to be in there in this case""" - return f"https://{DOMAIN}/group/{self.id}" + return f"{BASE_URL}/group/{self.id}" @classmethod def followers_filter(cls, queryset, viewer): diff --git a/bookwyrm/models/hashtag.py b/bookwyrm/models/hashtag.py index 7894a3528..5126f012d 100644 --- a/bookwyrm/models/hashtag.py +++ b/bookwyrm/models/hashtag.py @@ -2,18 +2,19 @@ from bookwyrm import activitypub from .activitypub_mixin import ActivitypubMixin from .base_model import BookWyrmModel -from .fields import CICharField +from .fields import CharField class Hashtag(ActivitypubMixin, BookWyrmModel): "a hashtag which can be used in statuses" - name = CICharField( + name = CharField( max_length=256, blank=False, null=False, activitypub_field="name", deduplication_field=True, + db_collation="case_insensitive", ) name_field = "name" diff --git a/bookwyrm/models/job.py b/bookwyrm/models/job.py new file mode 100644 index 000000000..5a2653571 --- /dev/null +++ b/bookwyrm/models/job.py @@ -0,0 +1,307 @@ +"""Everything needed for Celery to multi-thread complex tasks.""" + +from django.db import models +from django.db import transaction +from django.utils.translation import gettext_lazy as _ +from django.utils import timezone +from bookwyrm.models.user import User + +from bookwyrm.tasks import app + + +class Job(models.Model): + """Abstract model to store the state of a Task.""" + + class Status(models.TextChoices): + """Possible job states.""" + + PENDING = "pending", _("Pending") + ACTIVE = "active", _("Active") + COMPLETE = "complete", _("Complete") + STOPPED = "stopped", _("Stopped") + FAILED = "failed", _("Failed") + + task_id = models.UUIDField(unique=True, null=True, blank=True) + + created_date = models.DateTimeField(default=timezone.now) + updated_date = models.DateTimeField(default=timezone.now) + complete = models.BooleanField(default=False) + status = models.CharField( + max_length=50, choices=Status.choices, default=Status.PENDING, null=True + ) + + class Meta: + """Make it abstract""" + + abstract = True + + def complete_job(self): + """Report that the job has completed""" + if self.complete: + return + + self.status = self.Status.COMPLETE + self.complete = True + self.updated_date = timezone.now() + + self.save(update_fields=["status", "complete", "updated_date"]) + + def stop_job(self, reason=None): + """Stop the job""" + if self.complete: + return + + self.__terminate_job() + + if reason and reason == "failed": + self.status = self.Status.FAILED + else: + self.status = self.Status.STOPPED + self.complete = True + self.updated_date = timezone.now() + + self.save(update_fields=["status", "complete", "updated_date"]) + + def set_status(self, status): + """Set job status""" + if self.complete: + return + + if self.status == status: + return + + if status == self.Status.COMPLETE: + self.complete_job() + return + + if status == self.Status.STOPPED: + self.stop_job() + return + + if status == self.Status.FAILED: + self.stop_job(reason="failed") + return + + self.updated_date = timezone.now() + self.status = status + + self.save(update_fields=["status", "updated_date"]) + + def __terminate_job(self): + """Tell workers to ignore and not execute this task.""" + app.control.revoke(self.task_id, terminate=True) + + +class ParentJob(Job): + """Store the state of a Task which can spawn many :model:`ChildJob`s to spread + resource load. + + Intended to be sub-classed if necessary via proxy or + multi-table inheritance. + Extends :model:`Job`. + """ + + user = models.ForeignKey(User, on_delete=models.CASCADE) + + def complete_job(self): + """Report that the job has completed and stop pending + children. Extend. + """ + super().complete_job() + self.__terminate_pending_child_jobs() + + def notify_child_job_complete(self): + """let the job know when the items get work done""" + if self.complete: + return + + self.updated_date = timezone.now() + self.save(update_fields=["updated_date"]) + + if not self.complete and self.has_completed: + self.complete_job() + + def __terminate_job(self): # pylint: disable=unused-private-member + """Tell workers to ignore and not execute this task + & pending child tasks. Extend. + """ + super().__terminate_job() + self.__terminate_pending_child_jobs() + + def __terminate_pending_child_jobs(self): + """Tell workers to ignore and not execute any pending child tasks.""" + tasks = self.pending_child_jobs.filter(task_id__isnull=False).values_list( + "task_id", flat=True + ) + app.control.revoke(list(tasks)) + + self.pending_child_jobs.update(status=self.Status.STOPPED) + + @property + def has_completed(self): + """has this job finished""" + return not self.pending_child_jobs.exists() + + @property + def pending_child_jobs(self): + """items that haven't been processed yet""" + return self.child_jobs.filter(complete=False) + + +class ChildJob(Job): + """Stores the state of a Task for the related :model:`ParentJob`. + + Intended to be sub-classed if necessary via proxy or + multi-table inheritance. + Extends :model:`Job`. + """ + + parent_job = models.ForeignKey( + ParentJob, on_delete=models.CASCADE, related_name="child_jobs" + ) + + def set_status(self, status): + """Set job and parent_job status. Extend.""" + super().set_status(status) + + if ( + status == self.Status.ACTIVE + and self.parent_job.status == self.Status.PENDING + ): + self.parent_job.set_status(self.Status.ACTIVE) + + def complete_job(self): + """Report to parent_job that the job has completed. Extend.""" + super().complete_job() + self.parent_job.notify_child_job_complete() + + +class ParentTask(app.Task): + """Used with ParentJob, Abstract Tasks execute code at specific points in + a Task's lifecycle, applying to all Tasks with the same 'base'. + + All status & ParentJob.task_id assignment is managed here for you. + Usage e.g. @app.task(base=ParentTask) + """ + + def before_start( + self, task_id, args, kwargs + ): # pylint: disable=no-self-use, unused-argument + """Handler called before the task starts. Override. + + Prepare ParentJob before the task starts. + + Arguments: + task_id (str): Unique id of the task to execute. + args (Tuple): Original arguments for the task to execute. + kwargs (Dict): Original keyword arguments for the task to execute. + + Keyword Arguments: + job_id (int): Unique 'id' of the ParentJob. + no_children (bool): If 'True' this is the only Task expected to run + for the given ParentJob. + + Returns: + None: The return value of this handler is ignored. + """ + job = ParentJob.objects.get(id=kwargs["job_id"]) + job.task_id = task_id + job.save(update_fields=["task_id"]) + + if kwargs["no_children"]: + job.set_status(ChildJob.Status.ACTIVE) + + def on_success( + self, retval, task_id, args, kwargs + ): # pylint: disable=no-self-use, unused-argument + """Run by the worker if the task executes successfully. Override. + + Update ParentJob on Task complete. + + Arguments: + retval (Any): The return value of the task. + task_id (str): Unique id of the executed task. + args (Tuple): Original arguments for the executed task. + kwargs (Dict): Original keyword arguments for the executed task. + + Keyword Arguments: + job_id (int): Unique 'id' of the ParentJob. + no_children (bool): If 'True' this is the only Task expected to run + for the given ParentJob. + + Returns: + None: The return value of this handler is ignored. + """ + + if kwargs["no_children"]: + job = ParentJob.objects.get(id=kwargs["job_id"]) + job.complete_job() + + +class SubTask(app.Task): + """Used with ChildJob, Abstract Tasks execute code at specific points in + a Task's lifecycle, applying to all Tasks with the same 'base'. + + All status & ChildJob.task_id assignment is managed here for you. + Usage e.g. @app.task(base=SubTask) + """ + + def before_start( + self, task_id, *args, **kwargs + ): # pylint: disable=no-self-use, unused-argument + """Handler called before the task starts. Override. + + Prepare ChildJob before the task starts. + + Arguments: + task_id (str): Unique id of the task to execute. + args (Tuple): Original arguments for the task to execute. + kwargs (Dict): Original keyword arguments for the task to execute. + + Keyword Arguments: + job_id (int): Unique 'id' of the ParentJob. + child_id (int): Unique 'id' of the ChildJob. + + Returns: + None: The return value of this handler is ignored. + """ + child_job = ChildJob.objects.get(id=kwargs["child_id"]) + child_job.task_id = task_id + child_job.save(update_fields=["task_id"]) + child_job.set_status(ChildJob.Status.ACTIVE) + + def on_success( + self, retval, task_id, *args, **kwargs + ): # pylint: disable=no-self-use, unused-argument + """Run by the worker if the task executes successfully. Override. + + Notify ChildJob of task completion. + + Arguments: + retval (Any): The return value of the task. + task_id (str): Unique id of the executed task. + args (Tuple): Original arguments for the executed task. + kwargs (Dict): Original keyword arguments for the executed task. + + Keyword Arguments: + job_id (int): Unique 'id' of the ParentJob. + child_id (int): Unique 'id' of the ChildJob. + + Returns: + None: The return value of this handler is ignored. + """ + subtask = ChildJob.objects.get(id=kwargs["child_id"]) + subtask.complete_job() + + +@transaction.atomic +def create_child_job(parent_job, task_callback): + """Utility method for creating a ChildJob + and running a task to avoid DB race conditions + """ + child_job = ChildJob.objects.create(parent_job=parent_job) + transaction.on_commit( + lambda: task_callback.delay(job_id=parent_job.id, child_id=child_job.id) + ) + + return child_job diff --git a/bookwyrm/models/link.py b/bookwyrm/models/link.py index d334a9d29..4519f0c81 100644 --- a/bookwyrm/models/link.py +++ b/bookwyrm/models/link.py @@ -1,4 +1,5 @@ """ outlink data """ +from typing import Optional, Iterable from urllib.parse import urlparse from django.core.exceptions import PermissionDenied @@ -6,6 +7,7 @@ from django.db import models from django.utils.translation import gettext_lazy as _ from bookwyrm import activitypub +from bookwyrm.utils.db import add_update_fields from .activitypub_mixin import ActivitypubMixin from .base_model import BookWyrmModel from . import fields @@ -34,17 +36,19 @@ class Link(ActivitypubMixin, BookWyrmModel): """link name via the associated domain""" return self.domain.name - def save(self, *args, **kwargs): + def save(self, *args, update_fields: Optional[Iterable[str]] = None, **kwargs): """create a link""" # get or create the associated domain if not self.domain: - domain = urlparse(self.url).netloc + domain = urlparse(self.url).hostname self.domain, _ = LinkDomain.objects.get_or_create(domain=domain) + update_fields = add_update_fields(update_fields, "domain") # this is never broadcast, the owning model broadcasts an update if "broadcast" in kwargs: del kwargs["broadcast"] - return super().save(*args, **kwargs) + + super().save(*args, update_fields=update_fields, **kwargs) AvailabilityChoices = [ @@ -88,8 +92,10 @@ class LinkDomain(BookWyrmModel): return raise PermissionDenied() - def save(self, *args, **kwargs): + def save(self, *args, update_fields: Optional[Iterable[str]] = None, **kwargs): """set a default name""" if not self.name: self.name = self.domain - super().save(*args, **kwargs) + update_fields = add_update_fields(update_fields, "name") + + super().save(*args, update_fields=update_fields, **kwargs) diff --git a/bookwyrm/models/list.py b/bookwyrm/models/list.py index 63dd5b23f..df7e8162c 100644 --- a/bookwyrm/models/list.py +++ b/bookwyrm/models/list.py @@ -1,4 +1,5 @@ """ make a list of books!! """ +from typing import Optional, Iterable import uuid from django.core.exceptions import PermissionDenied @@ -7,7 +8,8 @@ from django.db.models import Q from django.utils import timezone from bookwyrm import activitypub -from bookwyrm.settings import DOMAIN +from bookwyrm.settings import BASE_URL +from bookwyrm.utils.db import add_update_fields from .activitypub_mixin import CollectionItemMixin, OrderedCollectionMixin from .base_model import BookWyrmModel @@ -50,7 +52,7 @@ class List(OrderedCollectionMixin, BookWyrmModel): def get_remote_id(self): """don't want the user to be in there in this case""" - return f"https://{DOMAIN}/list/{self.id}" + return f"{BASE_URL}/list/{self.id}" @property def collection_queryset(self): @@ -124,11 +126,13 @@ class List(OrderedCollectionMixin, BookWyrmModel): group=None, curation="closed" ) - def save(self, *args, **kwargs): + def save(self, *args, update_fields: Optional[Iterable[str]] = None, **kwargs): """on save, update embed_key and avoid clash with existing code""" if not self.embed_key: self.embed_key = uuid.uuid4() - super().save(*args, **kwargs) + update_fields = add_update_fields(update_fields, "embed_key") + + super().save(*args, update_fields=update_fields, **kwargs) class ListItem(CollectionItemMixin, BookWyrmModel): diff --git a/bookwyrm/models/move.py b/bookwyrm/models/move.py index d6d8ef78f..5038058b7 100644 --- a/bookwyrm/models/move.py +++ b/bookwyrm/models/move.py @@ -10,7 +10,7 @@ from .notification import Notification, NotificationType class Move(ActivityMixin, BookWyrmModel): - """migrating an activitypub user account""" + """migrating an activitypub object""" user = fields.ForeignKey( "User", on_delete=models.PROTECT, activitypub_field="actor" @@ -48,24 +48,21 @@ class MoveUser(Move): """update user info and broadcast it""" # only allow if the source is listed in the target's alsoKnownAs - if self.user in self.target.also_known_as.all(): - self.user.also_known_as.add(self.target.id) - self.user.update_active_date() - self.user.moved_to = self.target.remote_id - self.user.save(update_fields=["moved_to"]) - - if self.user.local: - kwargs[ - "broadcast" - ] = True # Only broadcast if we are initiating the Move - - super().save(*args, **kwargs) - - for follower in self.user.followers.all(): - if follower.local: - Notification.notify( - follower, self.user, notification_type=NotificationType.MOVE - ) - - else: + if self.user not in self.target.also_known_as.all(): raise PermissionDenied() + + self.user.also_known_as.add(self.target.id) + self.user.update_active_date() + self.user.moved_to = self.target.remote_id + self.user.save(update_fields=["moved_to"]) + + if self.user.local: + kwargs["broadcast"] = True # Only broadcast if we are initiating the Move + + super().save(*args, **kwargs) + + for follower in self.user.followers.all(): + if follower.local: + Notification.notify( + follower, self.user, notification_type=NotificationType.MOVE + ) diff --git a/bookwyrm/models/notification.py b/bookwyrm/models/notification.py index d056c05b3..ca1e2aeb0 100644 --- a/bookwyrm/models/notification.py +++ b/bookwyrm/models/notification.py @@ -1,8 +1,16 @@ """ alert a user to activity """ from django.db import models, transaction from django.dispatch import receiver +from bookwyrm.models.bookwyrm_export_job import BookwyrmExportJob from .base_model import BookWyrmModel -from . import Boost, Favorite, GroupMemberInvitation, ImportJob, LinkDomain +from . import ( + Boost, + Favorite, + GroupMemberInvitation, + ImportJob, + BookwyrmImportJob, + LinkDomain, +) from . import ListItem, Report, Status, User, UserFollowRequest from .site import InviteRequest @@ -23,6 +31,8 @@ class NotificationType(models.TextChoices): # Imports IMPORT = "IMPORT" + USER_IMPORT = "USER_IMPORT" + USER_EXPORT = "USER_EXPORT" # List activity ADD = "ADD" @@ -63,6 +73,9 @@ class Notification(BookWyrmModel): ) related_status = models.ForeignKey("Status", on_delete=models.CASCADE, null=True) related_import = models.ForeignKey("ImportJob", on_delete=models.CASCADE, null=True) + related_user_export = models.ForeignKey( + "BookwyrmExportJob", on_delete=models.CASCADE, null=True + ) related_list_items = models.ManyToManyField( "ListItem", symmetrical=False, related_name="notifications" ) @@ -226,6 +239,36 @@ def notify_user_on_import_complete( ) +@receiver(models.signals.post_save, sender=BookwyrmImportJob) +# pylint: disable=unused-argument +def notify_user_on_user_import_complete( + sender, instance, *args, update_fields=None, **kwargs +): + """we imported your user details! aren't you proud of us""" + update_fields = update_fields or [] + if not instance.complete or "complete" not in update_fields: + return + Notification.objects.create( + user=instance.user, notification_type=NotificationType.USER_IMPORT + ) + + +@receiver(models.signals.post_save, sender=BookwyrmExportJob) +# pylint: disable=unused-argument +def notify_user_on_user_export_complete( + sender, instance, *args, update_fields=None, **kwargs +): + """we exported your user details! aren't you proud of us""" + update_fields = update_fields or [] + if not instance.complete or "complete" not in update_fields: + return + Notification.objects.create( + user=instance.user, + notification_type=NotificationType.USER_EXPORT, + related_user_export=instance, + ) + + @receiver(models.signals.post_save, sender=Report) @transaction.atomic # pylint: disable=unused-argument diff --git a/bookwyrm/models/readthrough.py b/bookwyrm/models/readthrough.py index 4911c715b..7700b4a87 100644 --- a/bookwyrm/models/readthrough.py +++ b/bookwyrm/models/readthrough.py @@ -1,9 +1,13 @@ """ progress in a book """ +from typing import Optional, Iterable + from django.core import validators from django.core.cache import cache from django.db import models from django.db.models import F, Q +from bookwyrm.utils.db import add_update_fields + from .base_model import BookWyrmModel @@ -30,14 +34,17 @@ class ReadThrough(BookWyrmModel): stopped_date = models.DateTimeField(blank=True, null=True) is_active = models.BooleanField(default=True) - def save(self, *args, **kwargs): + def save(self, *args, update_fields: Optional[Iterable[str]] = None, **kwargs): """update user active time""" - cache.delete(f"latest_read_through-{self.user_id}-{self.book_id}") - self.user.update_active_date() # an active readthrough must have an unset finish date if self.finish_date or self.stopped_date: self.is_active = False - super().save(*args, **kwargs) + update_fields = add_update_fields(update_fields, "is_active") + + super().save(*args, update_fields=update_fields, **kwargs) + + cache.delete(f"latest_read_through-{self.user_id}-{self.book_id}") + self.user.update_active_date() def create_update(self): """add update to the readthrough""" diff --git a/bookwyrm/models/relationship.py b/bookwyrm/models/relationship.py index 7af6ad5ab..745ff78b6 100644 --- a/bookwyrm/models/relationship.py +++ b/bookwyrm/models/relationship.py @@ -38,14 +38,16 @@ class UserRelationship(BookWyrmModel): def save(self, *args, **kwargs): """clear the template cache""" - clear_cache(self.user_subject, self.user_object) super().save(*args, **kwargs) + clear_cache(self.user_subject, self.user_object) + def delete(self, *args, **kwargs): """clear the template cache""" - clear_cache(self.user_subject, self.user_object) super().delete(*args, **kwargs) + clear_cache(self.user_subject, self.user_object) + class Meta: """relationships should be unique""" @@ -65,6 +67,13 @@ class UserRelationship(BookWyrmModel): base_path = self.user_subject.remote_id return f"{base_path}#follows/{self.id}" + def get_accept_reject_id(self, status): + """get id for sending an accept or reject of a local user""" + + base_path = self.user_object.remote_id + status_id = self.id or 0 + return f"{base_path}#{status}/{status_id}" + class UserFollows(ActivityMixin, UserRelationship): """Following a user""" @@ -105,6 +114,20 @@ class UserFollows(ActivityMixin, UserRelationship): ) return obj + def reject(self): + """generate a Reject for this follow. This would normally happen + when a user deletes a follow they previously accepted""" + + if self.user_object.local: + activity = activitypub.Reject( + id=self.get_accept_reject_id(status="rejects"), + actor=self.user_object.remote_id, + object=self.to_activity(), + ).serialize() + self.broadcast(activity, self.user_object) + + self.delete() + class UserFollowRequest(ActivitypubMixin, UserRelationship): """following a user requires manual or automatic confirmation""" @@ -148,13 +171,6 @@ class UserFollowRequest(ActivitypubMixin, UserRelationship): if not manually_approves: self.accept() - def get_accept_reject_id(self, status): - """get id for sending an accept or reject of a local user""" - - base_path = self.user_object.remote_id - status_id = self.id or 0 - return f"{base_path}#{status}/{status_id}" - def accept(self, broadcast_only=False): """turn this request into the real deal""" user = self.user_object diff --git a/bookwyrm/models/report.py b/bookwyrm/models/report.py index 74a9bbe41..64ade3a40 100644 --- a/bookwyrm/models/report.py +++ b/bookwyrm/models/report.py @@ -3,7 +3,7 @@ from django.core.exceptions import PermissionDenied from django.db import models from django.utils.translation import gettext_lazy as _ -from bookwyrm.settings import DOMAIN +from bookwyrm.settings import BASE_URL from .base_model import BookWyrmModel @@ -46,7 +46,7 @@ class Report(BookWyrmModel): raise PermissionDenied() def get_remote_id(self): - return f"https://{DOMAIN}/settings/reports/{self.id}" + return f"{BASE_URL}/settings/reports/{self.id}" def comment(self, user, note): """comment on a report""" diff --git a/bookwyrm/models/shelf.py b/bookwyrm/models/shelf.py index 3d92f8d43..0b9ef2b09 100644 --- a/bookwyrm/models/shelf.py +++ b/bookwyrm/models/shelf.py @@ -1,13 +1,15 @@ """ puttin' books on shelves """ import re +from typing import Optional, Iterable from django.core.cache import cache from django.core.exceptions import PermissionDenied from django.db import models from django.utils import timezone from bookwyrm import activitypub -from bookwyrm.settings import DOMAIN +from bookwyrm.settings import BASE_URL from bookwyrm.tasks import BROADCAST +from bookwyrm.utils.db import add_update_fields from .activitypub_mixin import CollectionItemMixin, OrderedCollectionMixin from .base_model import BookWyrmModel from . import fields @@ -44,8 +46,9 @@ class Shelf(OrderedCollectionMixin, BookWyrmModel): """set the identifier""" super().save(*args, priority=priority, **kwargs) if not self.identifier: + # this needs the auto increment ID from the save() above self.identifier = self.get_identifier() - super().save(*args, **kwargs, broadcast=False) + super().save(*args, **kwargs, broadcast=False, update_fields={"identifier"}) def get_identifier(self): """custom-shelf-123 for the url""" @@ -71,7 +74,7 @@ class Shelf(OrderedCollectionMixin, BookWyrmModel): @property def local_path(self): """No slugs""" - return self.get_remote_id().replace(f"https://{DOMAIN}", "") + return self.get_remote_id().replace(BASE_URL, "") def raise_not_deletable(self, viewer): """don't let anyone delete a default shelf""" @@ -100,10 +103,21 @@ class ShelfBook(CollectionItemMixin, BookWyrmModel): activity_serializer = activitypub.ShelfItem collection_field = "shelf" - def save(self, *args, priority=BROADCAST, **kwargs): + def save( + self, + *args, + priority=BROADCAST, + update_fields: Optional[Iterable[str]] = None, + **kwargs, + ): if not self.user: self.user = self.shelf.user - if self.id and self.user.local: + update_fields = add_update_fields(update_fields, "user") + + is_update = self.id is not None + super().save(*args, priority=priority, update_fields=update_fields, **kwargs) + + if is_update and self.user.local: # remove all caches related to all editions of this book cache.delete_many( [ @@ -111,7 +125,6 @@ class ShelfBook(CollectionItemMixin, BookWyrmModel): for book in self.book.parent_work.editions.all() ] ) - super().save(*args, priority=priority, **kwargs) def delete(self, *args, **kwargs): if self.id and self.user.local: diff --git a/bookwyrm/models/site.py b/bookwyrm/models/site.py index b962d597b..6c2a73422 100644 --- a/bookwyrm/models/site.py +++ b/bookwyrm/models/site.py @@ -1,5 +1,6 @@ """ the particulars for this instance of BookWyrm """ import datetime +from typing import Optional, Iterable from urllib.parse import urljoin import uuid @@ -10,8 +11,12 @@ from django.dispatch import receiver from django.utils import timezone from model_utils import FieldTracker +from bookwyrm.connectors.abstract_connector import get_data from bookwyrm.preview_images import generate_site_preview_image_task -from bookwyrm.settings import DOMAIN, ENABLE_PREVIEW_IMAGES, STATIC_FULL_URL +from bookwyrm.settings import BASE_URL, ENABLE_PREVIEW_IMAGES, STATIC_FULL_URL +from bookwyrm.settings import RELEASE_API +from bookwyrm.tasks import app, MISC +from bookwyrm.utils.db import add_update_fields from .base_model import BookWyrmModel, new_access_code from .user import User from .fields import get_absolute_url @@ -45,7 +50,7 @@ class SiteSettings(SiteModel): default_theme = models.ForeignKey( "Theme", null=True, blank=True, on_delete=models.SET_NULL ) - version = models.CharField(null=True, blank=True, max_length=10) + available_version = models.CharField(null=True, blank=True, max_length=10) # admin setup options install_mode = models.BooleanField(default=False) @@ -96,6 +101,8 @@ class SiteSettings(SiteModel): imports_enabled = models.BooleanField(default=True) import_size_limit = models.IntegerField(default=0) import_limit_reset = models.IntegerField(default=0) + user_exports_enabled = models.BooleanField(default=False) + user_import_time_limit = models.IntegerField(default=48) field_tracker = FieldTracker(fields=["name", "instance_tagline", "logo"]) @@ -131,16 +138,19 @@ class SiteSettings(SiteModel): return get_absolute_url(uploaded) return urljoin(STATIC_FULL_URL, default_path) - def save(self, *args, **kwargs): + def save(self, *args, update_fields: Optional[Iterable[str]] = None, **kwargs): """if require_confirm_email is disabled, make sure no users are pending, if enabled, make sure invite_question_text is not empty""" + if not self.invite_question_text: + self.invite_question_text = "What is your favourite book?" + update_fields = add_update_fields(update_fields, "invite_question_text") + + super().save(*args, update_fields=update_fields, **kwargs) + if not self.require_confirm_email: User.objects.filter(is_active=False, deactivation_reason="pending").update( is_active=True, deactivation_reason=None ) - if not self.invite_question_text: - self.invite_question_text = "What is your favourite book?" - super().save(*args, **kwargs) class Theme(SiteModel): @@ -183,7 +193,7 @@ class SiteInvite(models.Model): @property def link(self): """formats the invite link""" - return f"https://{DOMAIN}/invite/{self.code}" + return f"{BASE_URL}/invite/{self.code}" class InviteRequest(BookWyrmModel): @@ -230,7 +240,7 @@ class PasswordReset(models.Model): @property def link(self): """formats the invite link""" - return f"https://{DOMAIN}/password-reset/{self.code}" + return f"{BASE_URL}/password-reset/{self.code}" # pylint: disable=unused-argument @@ -243,3 +253,14 @@ def preview_image(instance, *args, **kwargs): if len(changed_fields) > 0: generate_site_preview_image_task.delay() + + +@app.task(queue=MISC) +def check_for_updates_task(): + """See if git remote knows about a new version""" + site = SiteSettings.objects.get() + release = get_data(RELEASE_API, timeout=3) + available_version = release.get("tag_name", None) + if available_version: + site.available_version = available_version + site.save(update_fields=["available_version"]) diff --git a/bookwyrm/models/status.py b/bookwyrm/models/status.py index cc44fe2bf..9dc60e647 100644 --- a/bookwyrm/models/status.py +++ b/bookwyrm/models/status.py @@ -1,6 +1,6 @@ """ models for storing different kinds of Activities """ from dataclasses import MISSING -from typing import Optional +from typing import Optional, Iterable import re from django.apps import apps @@ -12,12 +12,15 @@ from django.db.models import Q from django.dispatch import receiver from django.template.loader import get_template from django.utils import timezone +from django.utils.translation import gettext_lazy as _ +from django.utils.translation import ngettext_lazy from model_utils import FieldTracker from model_utils.managers import InheritanceManager from bookwyrm import activitypub from bookwyrm.preview_images import generate_edition_preview_image_task from bookwyrm.settings import ENABLE_PREVIEW_IMAGES +from bookwyrm.utils.db import add_update_fields from .activitypub_mixin import ActivitypubMixin, ActivityMixin from .activitypub_mixin import OrderedCollectionPageMixin from .base_model import BookWyrmModel @@ -78,13 +81,18 @@ class Status(OrderedCollectionPageMixin, BookWyrmModel): """default sorting""" ordering = ("-published_date",) + indexes = [ + models.Index(fields=["remote_id"]), + models.Index(fields=["thread_id"]), + ] - def save(self, *args, **kwargs): + def save(self, *args, update_fields: Optional[Iterable[str]] = None, **kwargs): """save and notify""" - if self.reply_parent: + if self.thread_id is None and self.reply_parent: self.thread_id = self.reply_parent.thread_id or self.reply_parent_id + update_fields = add_update_fields(update_fields, "thread_id") - super().save(*args, **kwargs) + super().save(*args, update_fields=update_fields, **kwargs) if not self.reply_parent: self.thread_id = self.id @@ -107,14 +115,14 @@ class Status(OrderedCollectionPageMixin, BookWyrmModel): @property def recipients(self): """tagged users who definitely need to get this status in broadcast""" - mentions = [u for u in self.mention_users.all() if not u.local] + mentions = {u for u in self.mention_users.all() if not u.local} if ( hasattr(self, "reply_parent") and self.reply_parent and not self.reply_parent.user.local ): - mentions.append(self.reply_parent.user) - return list(set(mentions)) + mentions.add(self.reply_parent.user) + return list(mentions) @classmethod def ignore_activity( @@ -178,6 +186,24 @@ class Status(OrderedCollectionPageMixin, BookWyrmModel): """you can't boost dms""" return self.privacy in ["unlisted", "public"] + @property + def page_title(self): + """title of the page when only this status is shown""" + return _("%(display_name)s's status") % {"display_name": self.user.display_name} + + @property + def page_description(self): + """description of the page in meta tags when only this status is shown""" + return None + + @property + def page_image(self): + """image to use as preview in meta tags when only this status is shown""" + if self.mention_books.exists(): + book = self.mention_books.first() + return book.preview_image or book.cover + return self.user.preview_image + def to_replies(self, **kwargs): """helper function for loading AP serialized replies to a status""" return self.to_ordered_collection( @@ -301,6 +327,10 @@ class BookStatus(Status): abstract = True + @property + def page_image(self): + return self.book.preview_image or self.book.cover or super().page_image + class Comment(BookStatus): """like a review but without a rating and transient""" @@ -332,17 +362,26 @@ class Comment(BookStatus): activity_serializer = activitypub.Comment + @property + def page_title(self): + return _("%(display_name)s's comment on %(book_title)s") % { + "display_name": self.user.display_name, + "book_title": self.book.title, + } + class Quotation(BookStatus): """like a review but without a rating and transient""" quote = fields.HtmlField() raw_quote = models.TextField(blank=True, null=True) - position = models.IntegerField( - validators=[MinValueValidator(0)], null=True, blank=True + position = models.TextField( + null=True, + blank=True, ) - endposition = models.IntegerField( - validators=[MinValueValidator(0)], null=True, blank=True + endposition = models.TextField( + null=True, + blank=True, ) position_mode = models.CharField( max_length=3, @@ -355,10 +394,10 @@ class Quotation(BookStatus): def _format_position(self) -> Optional[str]: """serialize page position""" beg = self.position - end = self.endposition or 0 + end = self.endposition if self.position_mode != "PG" or not beg: return None - return f"pp. {beg}-{end}" if end > beg else f"p. {beg}" + return f"pp. {beg}-{end}" if end else f"p. {beg}" @property def pure_content(self): @@ -374,6 +413,13 @@ class Quotation(BookStatus): activity_serializer = activitypub.Quotation + @property + def page_title(self): + return _("%(display_name)s's quote from %(book_title)s") % { + "display_name": self.user.display_name, + "book_title": self.book.title, + } + class Review(BookStatus): """a book review""" @@ -403,14 +449,22 @@ class Review(BookStatus): """indicate the book in question for mastodon (or w/e) users""" return self.content + @property + def page_title(self): + return _("%(display_name)s's review of %(book_title)s") % { + "display_name": self.user.display_name, + "book_title": self.book.title, + } + activity_serializer = activitypub.Review pure_type = "Article" def save(self, *args, **kwargs): """clear rating caches""" + super().save(*args, **kwargs) + if self.book.parent_work: cache.delete(f"book-rating-{self.book.parent_work.id}") - super().save(*args, **kwargs) class ReviewRating(Review): @@ -426,6 +480,18 @@ class ReviewRating(Review): template = get_template("snippets/generated_status/rating.html") return template.render({"book": self.book, "rating": self.rating}).strip() + @property + def page_description(self): + return ngettext_lazy( + "%(display_name)s rated %(book_title)s: %(display_rating).1f star", + "%(display_name)s rated %(book_title)s: %(display_rating).1f stars", + "display_rating", + ) % { + "display_name": self.user.display_name, + "book_title": self.book.title, + "display_rating": self.rating, + } + activity_serializer = activitypub.Rating pure_type = "Note" diff --git a/bookwyrm/models/user.py b/bookwyrm/models/user.py index 75ca1d527..ed5c59e9c 100644 --- a/bookwyrm/models/user.py +++ b/bookwyrm/models/user.py @@ -1,28 +1,31 @@ """ database schema for user data """ +import datetime import re +import zoneinfo +from typing import Optional, Iterable from urllib.parse import urlparse from uuid import uuid4 from django.apps import apps from django.contrib.auth.models import AbstractUser -from django.contrib.postgres.fields import ArrayField, CICharField +from django.contrib.postgres.fields import ArrayField as DjangoArrayField from django.core.exceptions import PermissionDenied, ObjectDoesNotExist from django.dispatch import receiver from django.db import models, transaction, IntegrityError from django.utils import timezone from django.utils.translation import gettext_lazy as _ from model_utils import FieldTracker -import pytz from bookwyrm import activitypub from bookwyrm.connectors import get_data, ConnectorException from bookwyrm.models.shelf import Shelf from bookwyrm.models.status import Status from bookwyrm.preview_images import generate_user_preview_image_task -from bookwyrm.settings import DOMAIN, ENABLE_PREVIEW_IMAGES, USE_HTTPS, LANGUAGES +from bookwyrm.settings import BASE_URL, ENABLE_PREVIEW_IMAGES, LANGUAGES from bookwyrm.signatures import create_key_pair from bookwyrm.tasks import app, MISC from bookwyrm.utils import regex +from bookwyrm.utils.db import add_update_fields from .activitypub_mixin import OrderedCollectionPageMixin, ActivitypubMixin from .base_model import BookWyrmModel, DeactivationReason, new_access_code from .federated_server import FederatedServer @@ -42,12 +45,6 @@ def get_feed_filter_choices(): return [f[0] for f in FeedFilterChoices] -def site_link(): - """helper for generating links to the site""" - protocol = "https" if USE_HTTPS else "http" - return f"{protocol}://{DOMAIN}" - - # pylint: disable=too-many-public-methods class User(OrderedCollectionPageMixin, AbstractUser): """a user who wants to read books""" @@ -81,11 +78,12 @@ class User(OrderedCollectionPageMixin, AbstractUser): summary = fields.HtmlField(null=True, blank=True) local = models.BooleanField(default=False) bookwyrm_user = fields.BooleanField(default=True) - localname = CICharField( + localname = models.CharField( max_length=255, null=True, unique=True, validators=[fields.validate_localname], + db_collation="case_insensitive", ) # name is your display name, which you can change at will name = fields.CharField(max_length=100, null=True, blank=True) @@ -162,7 +160,7 @@ class User(OrderedCollectionPageMixin, AbstractUser): show_guided_tour = models.BooleanField(default=True) # feed options - feed_status_types = ArrayField( + feed_status_types = DjangoArrayField( models.CharField(max_length=10, blank=False, choices=FeedFilterChoices), size=8, default=get_feed_filter_choices, @@ -171,8 +169,8 @@ class User(OrderedCollectionPageMixin, AbstractUser): summary_keys = models.JSONField(null=True) preferred_timezone = models.CharField( - choices=[(str(tz), str(tz)) for tz in pytz.all_timezones], - default=str(pytz.utc), + choices=[(str(tz), str(tz)) for tz in sorted(zoneinfo.available_timezones())], + default=str(datetime.timezone.utc), max_length=255, ) preferred_language = models.CharField( @@ -198,6 +196,14 @@ class User(OrderedCollectionPageMixin, AbstractUser): hotp_secret = models.CharField(max_length=32, default=None, blank=True, null=True) hotp_count = models.IntegerField(default=0, blank=True, null=True) + class Meta(AbstractUser.Meta): + """indexes""" + + indexes = [ + models.Index(fields=["username"]), + models.Index(fields=["is_active", "local"]), + ] + @property def active_follower_requests(self): """Follow requests from active users""" @@ -206,8 +212,7 @@ class User(OrderedCollectionPageMixin, AbstractUser): @property def confirmation_link(self): """helper for generating confirmation links""" - link = site_link() - return f"{link}/confirm-email/{self.confirmation_code}" + return f"{BASE_URL}/confirm-email/{self.confirmation_code}" @property def following_link(self): @@ -326,6 +331,7 @@ class User(OrderedCollectionPageMixin, AbstractUser): "https://w3id.org/security/v1", { "manuallyApprovesFollowers": "as:manuallyApprovesFollowers", + "Hashtag": "as:Hashtag", "schema": "http://schema.org#", "PropertyValue": "schema:PropertyValue", "value": "schema:value", @@ -335,13 +341,14 @@ class User(OrderedCollectionPageMixin, AbstractUser): ] return activity_object - def save(self, *args, **kwargs): + def save(self, *args, update_fields: Optional[Iterable[str]] = None, **kwargs): """populate fields for new local users""" created = not bool(self.id) if not self.local and not re.match(regex.FULL_USERNAME, self.username): # generate a username that uses the domain (webfinger format) actor_parts = urlparse(self.remote_id) - self.username = f"{self.username}@{actor_parts.netloc}" + self.username = f"{self.username}@{actor_parts.hostname}" + update_fields = add_update_fields(update_fields, "username") # this user already exists, no need to populate fields if not created: @@ -350,26 +357,34 @@ class User(OrderedCollectionPageMixin, AbstractUser): elif not self.deactivation_date: self.deactivation_date = timezone.now() - super().save(*args, **kwargs) + super().save(*args, update_fields=update_fields, **kwargs) return # this is a new remote user, we need to set their remote server field if not self.local: - super().save(*args, **kwargs) + super().save(*args, update_fields=update_fields, **kwargs) transaction.on_commit(lambda: set_remote_server(self.id)) return with transaction.atomic(): # populate fields for local users - link = site_link() - self.remote_id = f"{link}/user/{self.localname}" + self.remote_id = f"{BASE_URL}/user/{self.localname}" self.followers_url = f"{self.remote_id}/followers" self.inbox = f"{self.remote_id}/inbox" - self.shared_inbox = f"{link}/inbox" + self.shared_inbox = f"{BASE_URL}/inbox" self.outbox = f"{self.remote_id}/outbox" + update_fields = add_update_fields( + update_fields, + "remote_id", + "followers_url", + "inbox", + "shared_inbox", + "outbox", + ) + # an id needs to be set before we can proceed with related models - super().save(*args, **kwargs) + super().save(*args, update_fields=update_fields, **kwargs) # make users editors by default try: @@ -509,18 +524,44 @@ class KeyPair(ActivitypubMixin, BookWyrmModel): activity_serializer = activitypub.PublicKey serialize_reverse_fields = [("owner", "owner", "id")] + class Meta: + """indexes""" + + indexes = [ + models.Index(fields=["remote_id"]), + ] + def get_remote_id(self): # self.owner is set by the OneToOneField on User return f"{self.owner.remote_id}/#main-key" - def save(self, *args, **kwargs): + def save(self, *args, update_fields: Optional[Iterable[str]] = None, **kwargs): """create a key pair""" # no broadcasting happening here if "broadcast" in kwargs: del kwargs["broadcast"] + if not self.public_key: self.private_key, self.public_key = create_key_pair() - return super().save(*args, **kwargs) + update_fields = add_update_fields( + update_fields, "private_key", "public_key" + ) + + super().save(*args, update_fields=update_fields, **kwargs) + + +@app.task(queue=MISC) +def erase_user_data(user_id): + """Erase any custom data about this user asynchronously + This is for deleted historical user data that pre-dates data + being cleared automatically""" + user = User.objects.get(id=user_id) + user.erase_user_data() + user.save( + broadcast=False, + update_fields=["email", "avatar", "preview_image", "summary", "name"], + ) + user.erase_user_statuses(broadcast=False) @app.task(queue=MISC) @@ -529,7 +570,7 @@ def set_remote_server(user_id, allow_external_connections=False): user = User.objects.get(id=user_id) actor_parts = urlparse(user.remote_id) federated_server = get_or_create_remote_server( - actor_parts.netloc, allow_external_connections=allow_external_connections + actor_parts.hostname, allow_external_connections=allow_external_connections ) # if we were unable to find the server, we need to create a new entry for it if not federated_server: diff --git a/bookwyrm/preview_images.py b/bookwyrm/preview_images.py index aba372abc..a213490ab 100644 --- a/bookwyrm/preview_images.py +++ b/bookwyrm/preview_images.py @@ -1,4 +1,5 @@ """ Generate social media preview images for twitter/mastodon/etc """ + import math import os import textwrap @@ -42,8 +43,8 @@ def get_imagefont(name, size): return ImageFont.truetype(path, size) except KeyError: logger.error("Font %s not found in config", name) - except OSError: - logger.error("Could not load font %s from file", name) + except OSError as err: + logger.error("Could not load font %s from file: %s", name, err) return ImageFont.load_default() @@ -59,7 +60,7 @@ def get_font(weight, size=28): font.set_variation_by_name("Bold") if weight == "regular": font.set_variation_by_name("Regular") - except AttributeError: + except OSError: pass return font @@ -174,11 +175,13 @@ def generate_instance_layer(content_width): site = models.SiteSettings.objects.get() if site.logo_small: - logo_img = Image.open(site.logo_small) + with Image.open(site.logo_small) as logo_img: + logo_img.load() else: try: static_path = os.path.join(settings.STATIC_ROOT, "images/logo-small.png") - logo_img = Image.open(static_path) + with Image.open(static_path) as logo_img: + logo_img.load() except FileNotFoundError: logo_img = None @@ -210,18 +213,9 @@ def generate_instance_layer(content_width): def generate_rating_layer(rating, content_width): """Places components for rating preview""" - try: - icon_star_full = Image.open( - os.path.join(settings.STATIC_ROOT, "images/icons/star-full.png") - ) - icon_star_empty = Image.open( - os.path.join(settings.STATIC_ROOT, "images/icons/star-empty.png") - ) - icon_star_half = Image.open( - os.path.join(settings.STATIC_ROOT, "images/icons/star-half.png") - ) - except FileNotFoundError: - return None + path_star_full = os.path.join(settings.STATIC_ROOT, "images/icons/star-full.png") + path_star_empty = os.path.join(settings.STATIC_ROOT, "images/icons/star-empty.png") + path_star_half = os.path.join(settings.STATIC_ROOT, "images/icons/star-half.png") icon_size = 64 icon_margin = 10 @@ -236,17 +230,23 @@ def generate_rating_layer(rating, content_width): position_x = 0 - for _ in range(math.floor(rating)): - rating_layer_mask.alpha_composite(icon_star_full, (position_x, 0)) - position_x = position_x + icon_size + icon_margin + try: + with Image.open(path_star_full) as icon_star_full: + for _ in range(math.floor(rating)): + rating_layer_mask.alpha_composite(icon_star_full, (position_x, 0)) + position_x = position_x + icon_size + icon_margin - if math.floor(rating) != math.ceil(rating): - rating_layer_mask.alpha_composite(icon_star_half, (position_x, 0)) - position_x = position_x + icon_size + icon_margin + if math.floor(rating) != math.ceil(rating): + with Image.open(path_star_half) as icon_star_half: + rating_layer_mask.alpha_composite(icon_star_half, (position_x, 0)) + position_x = position_x + icon_size + icon_margin - for _ in range(5 - math.ceil(rating)): - rating_layer_mask.alpha_composite(icon_star_empty, (position_x, 0)) - position_x = position_x + icon_size + icon_margin + with Image.open(path_star_empty) as icon_star_empty: + for _ in range(5 - math.ceil(rating)): + rating_layer_mask.alpha_composite(icon_star_empty, (position_x, 0)) + position_x = position_x + icon_size + icon_margin + except FileNotFoundError: + return None rating_layer_mask = rating_layer_mask.getchannel("A") rating_layer_mask = ImageOps.invert(rating_layer_mask) @@ -289,7 +289,8 @@ def generate_preview_image( texts = texts or {} # Cover try: - inner_img_layer = Image.open(picture) + with Image.open(picture) as inner_img_layer: + inner_img_layer.load() inner_img_layer.thumbnail( (inner_img_width, inner_img_height), Image.Resampling.LANCZOS ) diff --git a/bookwyrm/settings.py b/bookwyrm/settings.py index 4cecc4df6..c075c9c87 100644 --- a/bookwyrm/settings.py +++ b/bookwyrm/settings.py @@ -19,7 +19,6 @@ DOMAIN = env("DOMAIN") with open("VERSION", encoding="utf-8") as f: version = f.read() version = version.replace("\n", "") -f.close() VERSION = version @@ -30,8 +29,11 @@ RELEASE_API = env( PAGE_LENGTH = env.int("PAGE_LENGTH", 15) DEFAULT_LANGUAGE = env("DEFAULT_LANGUAGE", "English") +# TODO: extend maximum age to 1 year once termination of active sessions +# is implemented (see bookwyrm-social#2278, bookwyrm-social#3082). +SESSION_COOKIE_AGE = env.int("SESSION_COOKIE_AGE", 3600 * 24 * 30) # 1 month -JS_CACHE = "ac315a3b" +JS_CACHE = "8a89cad7" # email EMAIL_BACKEND = env("EMAIL_BACKEND", "django.core.mail.backends.smtp.EmailBackend") @@ -99,11 +101,14 @@ INSTALLED_APPS = [ "django.contrib.messages", "django.contrib.staticfiles", "django.contrib.humanize", + "oauth2_provider", + "file_resubmit", "sass_processor", "bookwyrm", "celery", "django_celery_beat", "imagekit", + "pgtrigger", "storages", ] @@ -119,6 +124,7 @@ MIDDLEWARE = [ "bookwyrm.middleware.IPBlocklistMiddleware", "django.contrib.messages.middleware.MessageMiddleware", "django.middleware.clickjacking.XFrameOptionsMiddleware", + "bookwyrm.middleware.FileTooBig", ] ROOT_URLCONF = "bookwyrm.urls" @@ -242,17 +248,22 @@ if env.bool("USE_DUMMY_CACHE", False): CACHES = { "default": { "BACKEND": "django.core.cache.backends.dummy.DummyCache", - } + }, + "file_resubmit": { + "BACKEND": "django.core.cache.backends.dummy.DummyCache", + "LOCATION": "/tmp/file_resubmit_tests/", + }, } else: CACHES = { "default": { - "BACKEND": "django_redis.cache.RedisCache", + "BACKEND": "django.core.cache.backends.redis.RedisCache", "LOCATION": REDIS_ACTIVITY_URL, - "OPTIONS": { - "CLIENT_CLASS": "django_redis.client.DefaultClient", - }, - } + }, + "file_resubmit": { + "BACKEND": "django.core.cache.backends.filebased.FileBasedCache", + "LOCATION": "/tmp/file_resubmit/", + }, } SESSION_ENGINE = "django.contrib.sessions.backends.cache" @@ -308,6 +319,7 @@ LANGUAGES = [ ("eu-es", _("Euskara (Basque)")), ("gl-es", _("Galego (Galician)")), ("it-it", _("Italiano (Italian)")), + ("ko-kr", _("한국어 (Korean)")), ("fi-fi", _("Suomi (Finnish)")), ("fr-fr", _("Français (French)")), ("lt-lt", _("Lietuvių (Lithuanian)")), @@ -318,6 +330,7 @@ LANGUAGES = [ ("pt-pt", _("Português Europeu (European Portuguese)")), ("ro-ro", _("Română (Romanian)")), ("sv-se", _("Svenska (Swedish)")), + ("uk-ua", _("Українська (Ukrainian)")), ("zh-hans", _("简体中文 (Simplified Chinese)")), ("zh-hant", _("繁體中文 (Traditional Chinese)")), ] @@ -331,35 +344,37 @@ TIME_ZONE = "UTC" USE_I18N = True -USE_L10N = True - USE_TZ = True - -agent = requests.utils.default_user_agent() -USER_AGENT = f"{agent} (BookWyrm/{VERSION}; +https://{DOMAIN}/)" - # Imagekit generated thumbnails ENABLE_THUMBNAIL_GENERATION = env.bool("ENABLE_THUMBNAIL_GENERATION", False) IMAGEKIT_CACHEFILE_DIR = "thumbnails" IMAGEKIT_DEFAULT_CACHEFILE_STRATEGY = "bookwyrm.thumbnail_generation.Strategy" -# Static files (CSS, JavaScript, Images) -# https://docs.djangoproject.com/en/3.2/howto/static-files/ - PROJECT_DIR = os.path.dirname(os.path.abspath(__file__)) CSP_ADDITIONAL_HOSTS = env.list("CSP_ADDITIONAL_HOSTS", []) -# Storage - PROTOCOL = "http" if USE_HTTPS: PROTOCOL = "https" SESSION_COOKIE_SECURE = True CSRF_COOKIE_SECURE = True +PORT = env.int("PORT", 443 if USE_HTTPS else 80) +if (USE_HTTPS and PORT == 443) or (not USE_HTTPS and PORT == 80): + NETLOC = DOMAIN +else: + NETLOC = f"{DOMAIN}:{PORT}" +BASE_URL = f"{PROTOCOL}://{NETLOC}" +CSRF_TRUSTED_ORIGINS = [BASE_URL] + +USER_AGENT = f"BookWyrm (BookWyrm/{VERSION}; +{BASE_URL})" + +# Storage + USE_S3 = env.bool("USE_S3", False) USE_AZURE = env.bool("USE_AZURE", False) +S3_SIGNED_URL_EXPIRY = env.int("S3_SIGNED_URL_EXPIRY", 900) if USE_S3: # AWS settings @@ -371,44 +386,117 @@ if USE_S3: AWS_S3_ENDPOINT_URL = env("AWS_S3_ENDPOINT_URL", None) AWS_DEFAULT_ACL = "public-read" AWS_S3_OBJECT_PARAMETERS = {"CacheControl": "max-age=86400"} + AWS_S3_URL_PROTOCOL = env("AWS_S3_URL_PROTOCOL", f"{PROTOCOL}:") + # Storages + STORAGES = { + "default": { + "BACKEND": "storages.backends.s3.S3Storage", + "OPTIONS": { + "location": "images", + "default_acl": "public-read", + "file_overwrite": False, + }, + }, + "staticfiles": { + "BACKEND": "storages.backends.s3.S3Storage", + "OPTIONS": { + "location": "static", + "default_acl": "public-read", + }, + }, + "exports": { + "BACKEND": "storages.backends.s3.S3Storage", + "OPTIONS": { + "location": "images", + "default_acl": None, + "file_overwrite": False, + }, + }, + } # S3 Static settings STATIC_LOCATION = "static" - STATIC_URL = f"{PROTOCOL}://{AWS_S3_CUSTOM_DOMAIN}/{STATIC_LOCATION}/" - STATICFILES_STORAGE = "bookwyrm.storage_backends.StaticStorage" + STATIC_URL = f"{AWS_S3_URL_PROTOCOL}//{AWS_S3_CUSTOM_DOMAIN}/{STATIC_LOCATION}/" + STATIC_FULL_URL = STATIC_URL # S3 Media settings MEDIA_LOCATION = "images" - MEDIA_URL = f"{PROTOCOL}://{AWS_S3_CUSTOM_DOMAIN}/{MEDIA_LOCATION}/" + MEDIA_URL = f"{AWS_S3_URL_PROTOCOL}//{AWS_S3_CUSTOM_DOMAIN}/{MEDIA_LOCATION}/" MEDIA_FULL_URL = MEDIA_URL - STATIC_FULL_URL = STATIC_URL - DEFAULT_FILE_STORAGE = "bookwyrm.storage_backends.ImagesStorage" - CSP_DEFAULT_SRC = ["'self'", AWS_S3_CUSTOM_DOMAIN] + CSP_ADDITIONAL_HOSTS - CSP_SCRIPT_SRC = ["'self'", AWS_S3_CUSTOM_DOMAIN] + CSP_ADDITIONAL_HOSTS + # Content Security Policy + CSP_DEFAULT_SRC = [ + "'self'", + f"{AWS_S3_URL_PROTOCOL}//{AWS_S3_CUSTOM_DOMAIN}" + if AWS_S3_CUSTOM_DOMAIN + else None, + ] + CSP_ADDITIONAL_HOSTS + CSP_SCRIPT_SRC = [ + "'self'", + f"{AWS_S3_URL_PROTOCOL}//{AWS_S3_CUSTOM_DOMAIN}" + if AWS_S3_CUSTOM_DOMAIN + else None, + ] + CSP_ADDITIONAL_HOSTS elif USE_AZURE: + # Azure settings AZURE_ACCOUNT_NAME = env("AZURE_ACCOUNT_NAME") AZURE_ACCOUNT_KEY = env("AZURE_ACCOUNT_KEY") AZURE_CONTAINER = env("AZURE_CONTAINER") AZURE_CUSTOM_DOMAIN = env("AZURE_CUSTOM_DOMAIN") + # Storages + STORAGES = { + "default": { + "BACKEND": "storages.backends.azure_storage.AzureStorage", + "OPTIONS": { + "location": "images", + "overwrite_files": False, + }, + }, + "staticfiles": { + "BACKEND": "storages.backends.azure_storage.AzureStorage", + "OPTIONS": { + "location": "static", + }, + }, + "exports": { + "BACKEND": None, # not implemented yet + }, + } # Azure Static settings STATIC_LOCATION = "static" STATIC_URL = ( f"{PROTOCOL}://{AZURE_CUSTOM_DOMAIN}/{AZURE_CONTAINER}/{STATIC_LOCATION}/" ) - STATICFILES_STORAGE = "bookwyrm.storage_backends.AzureStaticStorage" + STATIC_FULL_URL = STATIC_URL # Azure Media settings MEDIA_LOCATION = "images" MEDIA_URL = ( f"{PROTOCOL}://{AZURE_CUSTOM_DOMAIN}/{AZURE_CONTAINER}/{MEDIA_LOCATION}/" ) MEDIA_FULL_URL = MEDIA_URL - STATIC_FULL_URL = STATIC_URL - DEFAULT_FILE_STORAGE = "bookwyrm.storage_backends.AzureImagesStorage" + # Content Security Policy CSP_DEFAULT_SRC = ["'self'", AZURE_CUSTOM_DOMAIN] + CSP_ADDITIONAL_HOSTS CSP_SCRIPT_SRC = ["'self'", AZURE_CUSTOM_DOMAIN] + CSP_ADDITIONAL_HOSTS else: + # Storages + STORAGES = { + "default": { + "BACKEND": "django.core.files.storage.FileSystemStorage", + }, + "staticfiles": { + "BACKEND": "django.contrib.staticfiles.storage.StaticFilesStorage", + }, + "exports": { + "BACKEND": "django.core.files.storage.FileSystemStorage", + "OPTIONS": { + "location": "exports", + }, + }, + } + # Static settings STATIC_URL = "/static/" + STATIC_FULL_URL = BASE_URL + STATIC_URL + # Media settings MEDIA_URL = "/images/" - MEDIA_FULL_URL = f"{PROTOCOL}://{DOMAIN}{MEDIA_URL}" - STATIC_FULL_URL = f"{PROTOCOL}://{DOMAIN}{STATIC_URL}" + MEDIA_FULL_URL = BASE_URL + MEDIA_URL + # Content Security Policy CSP_DEFAULT_SRC = ["'self'"] + CSP_ADDITIONAL_HOSTS CSP_SCRIPT_SRC = ["'self'"] + CSP_ADDITIONAL_HOSTS @@ -431,3 +519,7 @@ if HTTP_X_FORWARDED_PROTO: # Do not change this setting unless you already have an existing # user with the same username - in which case you should change it! INSTANCE_ACTOR_USERNAME = "bookwyrm.instance.actor" + +# We only allow specifying DATA_UPLOAD_MAX_MEMORY_SIZE in MiB from .env +# (note the difference in variable names). +DATA_UPLOAD_MAX_MEMORY_SIZE = env.int("DATA_UPLOAD_MAX_MEMORY_MiB", 100) << 20 diff --git a/bookwyrm/static/js/autocomplete.js b/bookwyrm/static/js/autocomplete.js index a98cd9634..6836d356d 100644 --- a/bookwyrm/static/js/autocomplete.js +++ b/bookwyrm/static/js/autocomplete.js @@ -111,6 +111,10 @@ const tries = { }, }, f: { + b: { + 2: "FB2", + 3: "FB3", + }, l: { a: { c: "FLAC", diff --git a/bookwyrm/storage_backends.py b/bookwyrm/storage_backends.py deleted file mode 100644 index 6dd9f522c..000000000 --- a/bookwyrm/storage_backends.py +++ /dev/null @@ -1,63 +0,0 @@ -"""Handles backends for storages""" -import os -from tempfile import SpooledTemporaryFile -from storages.backends.s3boto3 import S3Boto3Storage -from storages.backends.azure_storage import AzureStorage - - -class StaticStorage(S3Boto3Storage): # pylint: disable=abstract-method - """Storage class for Static contents""" - - location = "static" - default_acl = "public-read" - - -class ImagesStorage(S3Boto3Storage): # pylint: disable=abstract-method - """Storage class for Image files""" - - location = "images" - default_acl = "public-read" - file_overwrite = False - - """ - This is our custom version of S3Boto3Storage that fixes a bug in - boto3 where the passed in file is closed upon upload. - From: - https://github.com/matthewwithanm/django-imagekit/issues/391#issuecomment-275367006 - https://github.com/boto/boto3/issues/929 - https://github.com/matthewwithanm/django-imagekit/issues/391 - """ - - def _save(self, name, content): - """ - We create a clone of the content file as when this is passed to - boto3 it wrongly closes the file upon upload where as the storage - backend expects it to still be open - """ - # Seek our content back to the start - content.seek(0, os.SEEK_SET) - - # Create a temporary file that will write to disk after a specified - # size. This file will be automatically deleted when closed by - # boto3 or after exiting the `with` statement if the boto3 is fixed - with SpooledTemporaryFile() as content_autoclose: - - # Write our original content into our copy that will be closed by boto3 - content_autoclose.write(content.read()) - - # Upload the object which will auto close the - # content_autoclose instance - return super()._save(name, content_autoclose) - - -class AzureStaticStorage(AzureStorage): # pylint: disable=abstract-method - """Storage class for Static contents""" - - location = "static" - - -class AzureImagesStorage(AzureStorage): # pylint: disable=abstract-method - """Storage class for Image files""" - - location = "images" - overwrite_files = False diff --git a/bookwyrm/templates/413.html b/bookwyrm/templates/413.html new file mode 100644 index 000000000..a849a764f --- /dev/null +++ b/bookwyrm/templates/413.html @@ -0,0 +1,16 @@ +{% extends 'layout.html' %} +{% load i18n %} + +{% block title %}{% trans "File too large" %}{% endblock %} + +{% block content %} +
+

{% trans "File too large" %}

+

{% trans "The file you are uploading is too large." %}

+

+ {% blocktrans trimmed %} + You you can try using a smaller file, or ask your BookWyrm server administrator to increase the DATA_UPLOAD_MAX_MEMORY_SIZE setting. + {% endblocktrans %} +

+
+{% endblock %} diff --git a/bookwyrm/templates/about/about.html b/bookwyrm/templates/about/about.html index 6705793d5..ef3f34037 100644 --- a/bookwyrm/templates/about/about.html +++ b/bookwyrm/templates/about/about.html @@ -31,10 +31,10 @@

-
+
{% if superlatives.top_rated %} {% with book=superlatives.top_rated.default_edition rating=superlatives.top_rated.rating %} -
+
@@ -53,7 +53,7 @@ {% if superlatives.wanted %} {% with book=superlatives.wanted.default_edition %} -
+
@@ -72,7 +72,7 @@ {% if superlatives.controversial %} {% with book=superlatives.controversial.default_edition %} -
+
diff --git a/bookwyrm/templates/author/edit_author.html b/bookwyrm/templates/author/edit_author.html index 12ddc4d28..f3e908c9b 100644 --- a/bookwyrm/templates/author/edit_author.html +++ b/bookwyrm/templates/author/edit_author.html @@ -55,6 +55,8 @@

{{ form.wikipedia_link }}

+

{{ form.wikidata }}

+ {% include 'snippets/form_errors.html' with errors_list=form.wikipedia_link.errors id="desc_wikipedia_link" %}

{{ form.website }}

diff --git a/bookwyrm/templates/book/book.html b/bookwyrm/templates/book/book.html index 8e76fb014..83500a54b 100644 --- a/bookwyrm/templates/book/book.html +++ b/bookwyrm/templates/book/book.html @@ -9,7 +9,8 @@ {% block title %}{{ book|book_title }}{% endblock %} {% block opengraph %} - {% include 'snippets/opengraph.html' with title=book.title description=book|book_description image=book.preview_image %} + {% firstof book.preview_image book.cover as book_image %} + {% include 'snippets/opengraph.html' with title=book.title description=book|book_description image=book_image %} {% endblock %} {% block content %} @@ -44,18 +45,22 @@ {% endif %} {% if book.series %} - + {% spaceless %} {% if book.authors.exists %}
{% endif %} + {% if book.series_number %} + , # + {{ book.series_number }} + {% endif %} + {% endspaceless %} {% endif %}

{% endif %} diff --git a/bookwyrm/templates/confirm_email/confirm_email.html b/bookwyrm/templates/confirm_email/confirm_email.html index abdd3a734..49a1ebd2d 100644 --- a/bookwyrm/templates/confirm_email/confirm_email.html +++ b/bookwyrm/templates/confirm_email/confirm_email.html @@ -6,8 +6,8 @@ {% block content %}

{% trans "Confirm your email address" %}

-
-
+
+

{% trans "A confirmation code has been sent to the email address you used to register your account." %}

diff --git a/bookwyrm/templates/email/html_layout.html b/bookwyrm/templates/email/html_layout.html index b9f88732f..467d6d6e5 100644 --- a/bookwyrm/templates/email/html_layout.html +++ b/bookwyrm/templates/email/html_layout.html @@ -2,10 +2,10 @@
-

{% blocktrans %}BookWyrm hosted on {{ site_name }}{% endblocktrans %}

+

{% blocktrans %}BookWyrm hosted on {{ site_name }}{% endblocktrans %}

{% if user %} -

{% trans "Email preference" %}

+

{% trans "Email preference" %}

{% endif %}
diff --git a/bookwyrm/templates/email/invite/html_content.html b/bookwyrm/templates/email/invite/html_content.html index adc993b7b..9d2cda364 100644 --- a/bookwyrm/templates/email/invite/html_content.html +++ b/bookwyrm/templates/email/invite/html_content.html @@ -12,6 +12,6 @@

{% url 'code-of-conduct' as coc_path %} {% url 'about' as about_path %} - {% blocktrans %}Learn more about {{ site_name }}.{% endblocktrans %} + {% blocktrans %}Learn more about {{ site_name }}.{% endblocktrans %}

{% endblock %} diff --git a/bookwyrm/templates/email/invite/text_content.html b/bookwyrm/templates/email/invite/text_content.html index 26dcd1720..05fe91456 100644 --- a/bookwyrm/templates/email/invite/text_content.html +++ b/bookwyrm/templates/email/invite/text_content.html @@ -5,6 +5,6 @@ {{ invite_link }} -{% blocktrans %}Learn more about {{ site_name }}:{% endblocktrans %} https://{{ domain }}{% url 'about' %} +{% blocktrans %}Learn more about {{ site_name }}:{% endblocktrans %} {{ base_url }}{% url 'about' %} {% endblock %} diff --git a/bookwyrm/templates/feed/feed.html b/bookwyrm/templates/feed/feed.html index 7ecf10b70..1b6cf29ff 100644 --- a/bookwyrm/templates/feed/feed.html +++ b/bookwyrm/templates/feed/feed.html @@ -41,7 +41,7 @@ {% endif %} - {% if annual_summary_year and tab.key == 'home' %} + {% if annual_summary_year and tab.key == 'home' and has_summary_read_throughs %}