mirror of
https://github.com/bookwyrm-social/bookwyrm.git
synced 2024-11-22 09:31:08 +00:00
Merge branch 'main' into move-fix
This commit is contained in:
commit
839ab2fafd
315 changed files with 19376 additions and 4583 deletions
18
.env.example
18
.env.example
|
@ -71,14 +71,20 @@ ENABLE_THUMBNAIL_GENERATION=true
|
|||
USE_S3=false
|
||||
AWS_ACCESS_KEY_ID=
|
||||
AWS_SECRET_ACCESS_KEY=
|
||||
# seconds for signed S3 urls to expire
|
||||
# this is currently only used for user export files
|
||||
S3_SIGNED_URL_EXPIRY=900
|
||||
|
||||
# Commented are example values if you use a non-AWS, S3-compatible service
|
||||
# AWS S3 should work with only AWS_STORAGE_BUCKET_NAME and AWS_S3_REGION_NAME
|
||||
# non-AWS S3-compatible services will need AWS_STORAGE_BUCKET_NAME,
|
||||
# along with both AWS_S3_CUSTOM_DOMAIN and AWS_S3_ENDPOINT_URL
|
||||
# along with both AWS_S3_CUSTOM_DOMAIN and AWS_S3_ENDPOINT_URL.
|
||||
# AWS_S3_URL_PROTOCOL must end in ":" and defaults to the same protocol as
|
||||
# the BookWyrm instance ("http:" or "https:", based on USE_SSL).
|
||||
|
||||
# AWS_STORAGE_BUCKET_NAME= # "example-bucket-name"
|
||||
# AWS_S3_CUSTOM_DOMAIN=None # "example-bucket-name.s3.fr-par.scw.cloud"
|
||||
# AWS_S3_URL_PROTOCOL=None # "http:"
|
||||
# AWS_S3_REGION_NAME=None # "fr-par"
|
||||
# AWS_S3_ENDPOINT_URL=None # "https://s3.fr-par.scw.cloud"
|
||||
|
||||
|
@ -137,6 +143,10 @@ TWO_FACTOR_LOGIN_MAX_SECONDS=60
|
|||
# and AWS_S3_CUSTOM_DOMAIN (if used) are added by default.
|
||||
# Value should be a comma-separated list of host names.
|
||||
CSP_ADDITIONAL_HOSTS=
|
||||
# The last number here means "megabytes"
|
||||
# Increase if users are having trouble uploading BookWyrm export files.
|
||||
DATA_UPLOAD_MAX_MEMORY_SIZE = (1024**2 * 100)
|
||||
|
||||
# Time before being logged out (in seconds)
|
||||
# SESSION_COOKIE_AGE=2592000 # current default: 30 days
|
||||
|
||||
# Maximum allowed memory for file uploads (increase if users are having trouble
|
||||
# uploading BookWyrm export files).
|
||||
# DATA_UPLOAD_MAX_MEMORY_MiB=100
|
||||
|
|
17
.github/workflows/black.yml
vendored
17
.github/workflows/black.yml
vendored
|
@ -1,17 +0,0 @@
|
|||
name: Python Formatting (run ./bw-dev black to fix)
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: psf/black@22.12.0
|
||||
with:
|
||||
version: 22.12.0
|
8
.github/workflows/codeql-analysis.yml
vendored
8
.github/workflows/codeql-analysis.yml
vendored
|
@ -36,11 +36,11 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
|
@ -51,7 +51,7 @@ jobs:
|
|||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
uses: github/codeql-action/autobuild@v3
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
|
@ -65,4 +65,4 @@ jobs:
|
|||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
uses: github/codeql-action/analyze@v3
|
||||
|
|
2
.github/workflows/curlylint.yaml
vendored
2
.github/workflows/curlylint.yaml
vendored
|
@ -10,7 +10,7 @@ jobs:
|
|||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install curlylint
|
||||
run: pip install curlylint
|
||||
|
|
70
.github/workflows/django-tests.yml
vendored
70
.github/workflows/django-tests.yml
vendored
|
@ -1,70 +0,0 @@
|
|||
name: Run Python Tests
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ubuntu-20.04
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:13
|
||||
env:
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: hunter2
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
ports:
|
||||
- 5432:5432
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
- name: Check migrations up-to-date
|
||||
run: |
|
||||
python ./manage.py makemigrations --check
|
||||
env:
|
||||
SECRET_KEY: beepbeep
|
||||
DOMAIN: your.domain.here
|
||||
EMAIL_HOST: ""
|
||||
EMAIL_HOST_USER: ""
|
||||
EMAIL_HOST_PASSWORD: ""
|
||||
- name: Run Tests
|
||||
env:
|
||||
SECRET_KEY: beepbeep
|
||||
DEBUG: false
|
||||
USE_HTTPS: true
|
||||
DOMAIN: your.domain.here
|
||||
BOOKWYRM_DATABASE_BACKEND: postgres
|
||||
MEDIA_ROOT: images/
|
||||
POSTGRES_PASSWORD: hunter2
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_DB: github_actions
|
||||
POSTGRES_HOST: 127.0.0.1
|
||||
CELERY_BROKER: ""
|
||||
REDIS_BROKER_PORT: 6379
|
||||
REDIS_BROKER_PASSWORD: beep
|
||||
USE_DUMMY_CACHE: true
|
||||
FLOWER_PORT: 8888
|
||||
EMAIL_HOST: "smtp.mailgun.org"
|
||||
EMAIL_PORT: 587
|
||||
EMAIL_HOST_USER: ""
|
||||
EMAIL_HOST_PASSWORD: ""
|
||||
EMAIL_USE_TLS: true
|
||||
ENABLE_PREVIEW_IMAGES: false
|
||||
ENABLE_THUMBNAIL_GENERATION: true
|
||||
HTTP_X_FORWARDED_PROTO: false
|
||||
run: |
|
||||
pytest -n 3
|
5
.github/workflows/lint-frontend.yaml
vendored
5
.github/workflows/lint-frontend.yaml
vendored
|
@ -19,10 +19,11 @@ jobs:
|
|||
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it.
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install modules
|
||||
run: npm install stylelint stylelint-config-recommended stylelint-config-standard stylelint-order eslint
|
||||
# run: npm install stylelint stylelint-config-recommended stylelint-config-standard stylelint-order eslint
|
||||
run: npm install eslint@^8.9.0
|
||||
|
||||
# See .stylelintignore for files that are not linted.
|
||||
# - name: Run stylelint
|
||||
|
|
50
.github/workflows/mypy.yml
vendored
50
.github/workflows/mypy.yml
vendored
|
@ -1,50 +0,0 @@
|
|||
name: Mypy
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
- name: Analysing the code with mypy
|
||||
env:
|
||||
SECRET_KEY: beepbeep
|
||||
DEBUG: false
|
||||
USE_HTTPS: true
|
||||
DOMAIN: your.domain.here
|
||||
BOOKWYRM_DATABASE_BACKEND: postgres
|
||||
MEDIA_ROOT: images/
|
||||
POSTGRES_PASSWORD: hunter2
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_DB: github_actions
|
||||
POSTGRES_HOST: 127.0.0.1
|
||||
CELERY_BROKER: ""
|
||||
REDIS_BROKER_PORT: 6379
|
||||
REDIS_BROKER_PASSWORD: beep
|
||||
USE_DUMMY_CACHE: true
|
||||
FLOWER_PORT: 8888
|
||||
EMAIL_HOST: "smtp.mailgun.org"
|
||||
EMAIL_PORT: 587
|
||||
EMAIL_HOST_USER: ""
|
||||
EMAIL_HOST_PASSWORD: ""
|
||||
EMAIL_USE_TLS: true
|
||||
ENABLE_PREVIEW_IMAGES: false
|
||||
ENABLE_THUMBNAIL_GENERATION: true
|
||||
HTTP_X_FORWARDED_PROTO: false
|
||||
run: |
|
||||
mypy bookwyrm celerywyrm
|
2
.github/workflows/prettier.yaml
vendored
2
.github/workflows/prettier.yaml
vendored
|
@ -14,7 +14,7 @@ jobs:
|
|||
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it.
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install modules
|
||||
run: npm install prettier@2.5.1
|
||||
|
|
27
.github/workflows/pylint.yml
vendored
27
.github/workflows/pylint.yml
vendored
|
@ -1,27 +0,0 @@
|
|||
name: Pylint
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
- name: Analysing the code with pylint
|
||||
run: |
|
||||
pylint bookwyrm/
|
||||
|
99
.github/workflows/python.yml
vendored
Normal file
99
.github/workflows/python.yml
vendored
Normal file
|
@ -0,0 +1,99 @@
|
|||
name: Python
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
|
||||
# overrides for .env.example
|
||||
env:
|
||||
POSTGRES_HOST: 127.0.0.1
|
||||
PGPORT: 5432
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: hunter2
|
||||
POSTGRES_DB: github_actions
|
||||
SECRET_KEY: beepbeep
|
||||
EMAIL_HOST_USER: ""
|
||||
EMAIL_HOST_PASSWORD: ""
|
||||
|
||||
jobs:
|
||||
pytest:
|
||||
name: Tests (pytest)
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:13
|
||||
env: # does not inherit from jobs.build.env
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: hunter2
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
ports:
|
||||
- 5432:5432
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python 3.11
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.11
|
||||
cache: pip
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
pip install pytest-github-actions-annotate-failures
|
||||
- name: Set up .env
|
||||
run: cp .env.example .env
|
||||
- name: Check migrations up-to-date
|
||||
run: python ./manage.py makemigrations --check
|
||||
- name: Run Tests
|
||||
run: pytest -n 3
|
||||
|
||||
pylint:
|
||||
name: Linting (pylint)
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python 3.11
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.11
|
||||
cache: pip
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
- name: Analyse code with pylint
|
||||
run: pylint bookwyrm/
|
||||
|
||||
mypy:
|
||||
name: Typing (mypy)
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python 3.11
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.11
|
||||
cache: pip
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
- name: Set up .env
|
||||
run: cp .env.example .env
|
||||
- name: Analyse code with mypy
|
||||
run: mypy bookwyrm celerywyrm
|
||||
|
||||
black:
|
||||
name: Formatting (black; run ./bw-dev black to fix)
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: psf/black@stable
|
||||
with:
|
||||
version: "22.*"
|
5
.gitignore
vendored
5
.gitignore
vendored
|
@ -16,6 +16,8 @@
|
|||
# BookWyrm
|
||||
.env
|
||||
/images/
|
||||
/exports/
|
||||
/static/
|
||||
bookwyrm/static/css/bookwyrm.css
|
||||
bookwyrm/static/css/themes/
|
||||
!bookwyrm/static/css/themes/bookwyrm-*.scss
|
||||
|
@ -36,3 +38,6 @@ nginx/default.conf
|
|||
|
||||
#macOS
|
||||
**/.DS_Store
|
||||
|
||||
# Docker
|
||||
docker-compose.override.yml
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
FROM python:3.9
|
||||
FROM python:3.11
|
||||
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
|
||||
|
|
|
@ -10,7 +10,6 @@ BookWyrm is a social network for tracking your reading, talking about books, wri
|
|||
## Links
|
||||
|
||||
[![Mastodon Follow](https://img.shields.io/mastodon/follow/000146121?domain=https%3A%2F%2Ftech.lgbt&style=social)](https://tech.lgbt/@bookwyrm)
|
||||
[![Twitter Follow](https://img.shields.io/twitter/follow/BookWyrmSocial?style=social)](https://twitter.com/BookWyrmSocial)
|
||||
|
||||
- [Project homepage](https://joinbookwyrm.com/)
|
||||
- [Support](https://patreon.com/bookwyrm)
|
||||
|
|
2
VERSION
2
VERSION
|
@ -1 +1 @@
|
|||
0.7.1
|
||||
0.7.3
|
||||
|
|
|
@ -20,6 +20,7 @@ from bookwyrm.tasks import app, MISC
|
|||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
TBookWyrmModel = TypeVar("TBookWyrmModel", bound=base_model.BookWyrmModel)
|
||||
|
||||
|
||||
|
@ -423,6 +424,7 @@ def get_activitypub_data(url):
|
|||
"Date": now,
|
||||
"Signature": make_signature("get", sender, url, now),
|
||||
},
|
||||
timeout=15,
|
||||
)
|
||||
except requests.RequestException:
|
||||
raise ConnectorException()
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
""" actor serializer """
|
||||
from dataclasses import dataclass, field
|
||||
from dataclasses import dataclass
|
||||
from typing import Dict
|
||||
|
||||
from .base_activity import ActivityObject
|
||||
|
@ -35,7 +35,7 @@ class Person(ActivityObject):
|
|||
endpoints: Dict = None
|
||||
name: str = None
|
||||
summary: str = None
|
||||
icon: Image = field(default_factory=lambda: {})
|
||||
icon: Image = None
|
||||
bookwyrmUser: bool = False
|
||||
manuallyApprovesFollowers: str = False
|
||||
discoverable: str = False
|
||||
|
|
|
@ -139,14 +139,14 @@ class ActivityStream(RedisStore):
|
|||
| (
|
||||
Q(following=status.user) & Q(following=status.reply_parent.user)
|
||||
) # if the user is following both authors
|
||||
).distinct()
|
||||
)
|
||||
|
||||
# only visible to the poster's followers and tagged users
|
||||
elif status.privacy == "followers":
|
||||
audience = audience.filter(
|
||||
Q(following=status.user) # if the user is following the author
|
||||
)
|
||||
return audience.distinct()
|
||||
return audience.distinct("id")
|
||||
|
||||
@tracer.start_as_current_span("ActivityStream.get_audience")
|
||||
def get_audience(self, status):
|
||||
|
@ -156,7 +156,7 @@ class ActivityStream(RedisStore):
|
|||
status_author = models.User.objects.filter(
|
||||
is_active=True, local=True, id=status.user.id
|
||||
).values_list("id", flat=True)
|
||||
return list(set(list(audience) + list(status_author)))
|
||||
return list(set(audience) | set(status_author))
|
||||
|
||||
def get_stores_for_users(self, user_ids):
|
||||
"""convert a list of user ids into redis store ids"""
|
||||
|
@ -183,15 +183,13 @@ class HomeStream(ActivityStream):
|
|||
def get_audience(self, status):
|
||||
trace.get_current_span().set_attribute("stream_id", self.key)
|
||||
audience = super()._get_audience(status)
|
||||
if not audience:
|
||||
return []
|
||||
# if the user is following the author
|
||||
audience = audience.filter(following=status.user).values_list("id", flat=True)
|
||||
# if the user is the post's author
|
||||
status_author = models.User.objects.filter(
|
||||
is_active=True, local=True, id=status.user.id
|
||||
).values_list("id", flat=True)
|
||||
return list(set(list(audience) + list(status_author)))
|
||||
return list(set(audience) | set(status_author))
|
||||
|
||||
def get_statuses_for_user(self, user):
|
||||
return models.Status.privacy_filter(
|
||||
|
@ -239,9 +237,7 @@ class BooksStream(ActivityStream):
|
|||
)
|
||||
|
||||
audience = super()._get_audience(status)
|
||||
if not audience:
|
||||
return models.User.objects.none()
|
||||
return audience.filter(shelfbook__book__parent_work=work).distinct()
|
||||
return audience.filter(shelfbook__book__parent_work=work)
|
||||
|
||||
def get_audience(self, status):
|
||||
# only show public statuses on the books feed,
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
"""Do further startup configuration and initialization"""
|
||||
|
||||
import os
|
||||
import urllib
|
||||
import logging
|
||||
|
@ -14,16 +15,16 @@ def download_file(url, destination):
|
|||
"""Downloads a file to the given path"""
|
||||
try:
|
||||
# Ensure our destination directory exists
|
||||
os.makedirs(os.path.dirname(destination))
|
||||
os.makedirs(os.path.dirname(destination), exist_ok=True)
|
||||
with urllib.request.urlopen(url) as stream:
|
||||
with open(destination, "b+w") as outfile:
|
||||
outfile.write(stream.read())
|
||||
except (urllib.error.HTTPError, urllib.error.URLError):
|
||||
logger.info("Failed to download file %s", url)
|
||||
except OSError:
|
||||
logger.info("Couldn't open font file %s for writing", destination)
|
||||
except: # pylint: disable=bare-except
|
||||
logger.info("Unknown error in file download")
|
||||
except (urllib.error.HTTPError, urllib.error.URLError) as err:
|
||||
logger.error("Failed to download file %s: %s", url, err)
|
||||
except OSError as err:
|
||||
logger.error("Couldn't open font file %s for writing: %s", destination, err)
|
||||
except Exception as err: # pylint:disable=broad-except
|
||||
logger.error("Unknown error in file download: %s", err)
|
||||
|
||||
|
||||
class BookwyrmConfig(AppConfig):
|
||||
|
|
|
@ -3,7 +3,9 @@ from __future__ import annotations
|
|||
from abc import ABC, abstractmethod
|
||||
from typing import Optional, TypedDict, Any, Callable, Union, Iterator
|
||||
from urllib.parse import quote_plus
|
||||
import imghdr
|
||||
|
||||
# pylint: disable-next=deprecated-module
|
||||
import imghdr # Deprecated in 3.11 for removal in 3.13; no good alternative yet
|
||||
import logging
|
||||
import re
|
||||
import asyncio
|
||||
|
|
|
@ -15,6 +15,7 @@ class AuthorForm(CustomForm):
|
|||
"aliases",
|
||||
"bio",
|
||||
"wikipedia_link",
|
||||
"wikidata",
|
||||
"website",
|
||||
"born",
|
||||
"died",
|
||||
|
@ -32,6 +33,7 @@ class AuthorForm(CustomForm):
|
|||
"wikipedia_link": forms.TextInput(
|
||||
attrs={"aria-describedby": "desc_wikipedia_link"}
|
||||
),
|
||||
"wikidata": forms.TextInput(attrs={"aria-describedby": "desc_wikidata"}),
|
||||
"website": forms.TextInput(attrs={"aria-describedby": "desc_website"}),
|
||||
"born": forms.SelectDateWidget(attrs={"aria-describedby": "desc_born"}),
|
||||
"died": forms.SelectDateWidget(attrs={"aria-describedby": "desc_died"}),
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
""" using django model forms """
|
||||
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
@ -37,10 +38,9 @@ class FileLinkForm(CustomForm):
|
|||
),
|
||||
)
|
||||
if (
|
||||
not self.instance
|
||||
and models.FileLink.objects.filter(
|
||||
url=url, book=book, filetype=filetype
|
||||
).exists()
|
||||
models.FileLink.objects.filter(url=url, book=book, filetype=filetype)
|
||||
.exclude(pk=self.instance)
|
||||
.exists()
|
||||
):
|
||||
# pylint: disable=line-too-long
|
||||
self.add_error(
|
||||
|
|
|
@ -26,7 +26,7 @@ class IsbnHyphenator:
|
|||
|
||||
def update_range_message(self) -> None:
|
||||
"""Download the range message xml file and save it locally"""
|
||||
response = requests.get(self.__range_message_url)
|
||||
response = requests.get(self.__range_message_url, timeout=15)
|
||||
with open(self.__range_file_path, "w", encoding="utf-8") as file:
|
||||
file.write(response.text)
|
||||
self.__element_tree = None
|
||||
|
|
|
@ -1,13 +1,14 @@
|
|||
""" PROCEED WITH CAUTION: uses deduplication fields to permanently
|
||||
merge book data objects """
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db.models import Count
|
||||
from bookwyrm import models
|
||||
from bookwyrm.management.merge import merge_objects
|
||||
|
||||
|
||||
def dedupe_model(model):
|
||||
def dedupe_model(model, dry_run=False):
|
||||
"""combine duplicate editions and update related models"""
|
||||
print(f"deduplicating {model.__name__}:")
|
||||
fields = model._meta.get_fields()
|
||||
dedupe_fields = [
|
||||
f for f in fields if hasattr(f, "deduplication_field") and f.deduplication_field
|
||||
|
@ -16,30 +17,42 @@ def dedupe_model(model):
|
|||
dupes = (
|
||||
model.objects.values(field.name)
|
||||
.annotate(Count(field.name))
|
||||
.filter(**{"%s__count__gt" % field.name: 1})
|
||||
.filter(**{f"{field.name}__count__gt": 1})
|
||||
.exclude(**{field.name: ""})
|
||||
.exclude(**{f"{field.name}__isnull": True})
|
||||
)
|
||||
|
||||
for dupe in dupes:
|
||||
value = dupe[field.name]
|
||||
if not value or value == "":
|
||||
continue
|
||||
print("----------")
|
||||
print(dupe)
|
||||
objs = model.objects.filter(**{field.name: value}).order_by("id")
|
||||
canonical = objs.first()
|
||||
print("keeping", canonical.remote_id)
|
||||
action = "would merge" if dry_run else "merging"
|
||||
print(
|
||||
f"{action} into {model.__name__} {canonical.remote_id} based on {field.name} {value}:"
|
||||
)
|
||||
for obj in objs[1:]:
|
||||
print(obj.remote_id)
|
||||
merge_objects(canonical, obj)
|
||||
print(f"- {obj.remote_id}")
|
||||
absorbed_fields = obj.merge_into(canonical, dry_run=dry_run)
|
||||
print(f" absorbed fields: {absorbed_fields}")
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""deduplicate allllll the book data models"""
|
||||
|
||||
help = "merges duplicate book data"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
"""add the arguments for this command"""
|
||||
parser.add_argument(
|
||||
"--dry_run",
|
||||
action="store_true",
|
||||
help="don't actually merge, only print what would happen",
|
||||
)
|
||||
|
||||
# pylint: disable=no-self-use,unused-argument
|
||||
def handle(self, *args, **options):
|
||||
"""run deduplications"""
|
||||
dedupe_model(models.Edition)
|
||||
dedupe_model(models.Work)
|
||||
dedupe_model(models.Author)
|
||||
dedupe_model(models.Edition, dry_run=options["dry_run"])
|
||||
dedupe_model(models.Work, dry_run=options["dry_run"])
|
||||
dedupe_model(models.Author, dry_run=options["dry_run"])
|
||||
|
|
|
@ -1,54 +0,0 @@
|
|||
""" Get your admin code to allow install """
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from bookwyrm import models
|
||||
from bookwyrm.settings import VERSION
|
||||
|
||||
|
||||
# pylint: disable=no-self-use
|
||||
class Command(BaseCommand):
|
||||
"""command-line options"""
|
||||
|
||||
help = "What version is this?"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
"""specify which function to run"""
|
||||
parser.add_argument(
|
||||
"--current",
|
||||
action="store_true",
|
||||
help="Version stored in database",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--target",
|
||||
action="store_true",
|
||||
help="Version stored in settings",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--update",
|
||||
action="store_true",
|
||||
help="Update database version",
|
||||
)
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def handle(self, *args, **options):
|
||||
"""execute init"""
|
||||
site = models.SiteSettings.objects.get()
|
||||
current = site.version or "0.0.1"
|
||||
target = VERSION
|
||||
if options.get("current"):
|
||||
print(current)
|
||||
return
|
||||
|
||||
if options.get("target"):
|
||||
print(target)
|
||||
return
|
||||
|
||||
if options.get("update"):
|
||||
site.version = target
|
||||
site.save()
|
||||
return
|
||||
|
||||
if current != target:
|
||||
print(f"{current}/{target}")
|
||||
else:
|
||||
print(current)
|
|
@ -1,50 +0,0 @@
|
|||
from django.db.models import ManyToManyField
|
||||
|
||||
|
||||
def update_related(canonical, obj):
|
||||
"""update all the models with fk to the object being removed"""
|
||||
# move related models to canonical
|
||||
related_models = [
|
||||
(r.remote_field.name, r.related_model) for r in canonical._meta.related_objects
|
||||
]
|
||||
for (related_field, related_model) in related_models:
|
||||
# Skip the ManyToMany fields that aren’t auto-created. These
|
||||
# should have a corresponding OneToMany field in the model for
|
||||
# the linking table anyway. If we update it through that model
|
||||
# instead then we won’t lose the extra fields in the linking
|
||||
# table.
|
||||
related_field_obj = related_model._meta.get_field(related_field)
|
||||
if isinstance(related_field_obj, ManyToManyField):
|
||||
through = related_field_obj.remote_field.through
|
||||
if not through._meta.auto_created:
|
||||
continue
|
||||
related_objs = related_model.objects.filter(**{related_field: obj})
|
||||
for related_obj in related_objs:
|
||||
print("replacing in", related_model.__name__, related_field, related_obj.id)
|
||||
try:
|
||||
setattr(related_obj, related_field, canonical)
|
||||
related_obj.save()
|
||||
except TypeError:
|
||||
getattr(related_obj, related_field).add(canonical)
|
||||
getattr(related_obj, related_field).remove(obj)
|
||||
|
||||
|
||||
def copy_data(canonical, obj):
|
||||
"""try to get the most data possible"""
|
||||
for data_field in obj._meta.get_fields():
|
||||
if not hasattr(data_field, "activitypub_field"):
|
||||
continue
|
||||
data_value = getattr(obj, data_field.name)
|
||||
if not data_value:
|
||||
continue
|
||||
if not getattr(canonical, data_field.name):
|
||||
print("setting data field", data_field.name, data_value)
|
||||
setattr(canonical, data_field.name, data_value)
|
||||
canonical.save()
|
||||
|
||||
|
||||
def merge_objects(canonical, obj):
|
||||
copy_data(canonical, obj)
|
||||
update_related(canonical, obj)
|
||||
# remove the outdated entry
|
||||
obj.delete()
|
|
@ -1,4 +1,3 @@
|
|||
from bookwyrm.management.merge import merge_objects
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
|
||||
|
@ -9,6 +8,11 @@ class MergeCommand(BaseCommand):
|
|||
"""add the arguments for this command"""
|
||||
parser.add_argument("--canonical", type=int, required=True)
|
||||
parser.add_argument("--other", type=int, required=True)
|
||||
parser.add_argument(
|
||||
"--dry_run",
|
||||
action="store_true",
|
||||
help="don't actually merge, only print what would happen",
|
||||
)
|
||||
|
||||
# pylint: disable=no-self-use,unused-argument
|
||||
def handle(self, *args, **options):
|
||||
|
@ -26,4 +30,8 @@ class MergeCommand(BaseCommand):
|
|||
print("other book doesn’t exist!")
|
||||
return
|
||||
|
||||
merge_objects(canonical, other)
|
||||
absorbed_fields = other.merge_into(canonical, dry_run=options["dry_run"])
|
||||
|
||||
action = "would be" if options["dry_run"] else "has been"
|
||||
print(f"{other.remote_id} {action} merged into {canonical.remote_id}")
|
||||
print(f"absorbed fields: {absorbed_fields}")
|
||||
|
|
16
bookwyrm/migrations/0190_book_search_updates.py
Normal file
16
bookwyrm/migrations/0190_book_search_updates.py
Normal file
|
@ -0,0 +1,16 @@
|
|||
# Generated by Django 3.2.20 on 2023-11-24 17:11
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("bookwyrm", "0188_theme_loads"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveIndex(
|
||||
model_name="author",
|
||||
name="bookwyrm_au_search__b050a8_gin",
|
||||
),
|
||||
]
|
|
@ -0,0 +1,76 @@
|
|||
# Generated by Django 3.2.20 on 2023-11-25 00:47
|
||||
|
||||
from importlib import import_module
|
||||
import re
|
||||
|
||||
from django.db import migrations
|
||||
import pgtrigger.compiler
|
||||
import pgtrigger.migrations
|
||||
|
||||
trigger_migration = import_module("bookwyrm.migrations.0077_auto_20210623_2155")
|
||||
|
||||
# it's _very_ convenient for development that this migration be reversible
|
||||
search_vector_trigger = trigger_migration.Migration.operations[4]
|
||||
author_search_vector_trigger = trigger_migration.Migration.operations[5]
|
||||
|
||||
|
||||
assert re.search(r"\bCREATE TRIGGER search_vector_trigger\b", search_vector_trigger.sql)
|
||||
assert re.search(
|
||||
r"\bCREATE TRIGGER author_search_vector_trigger\b",
|
||||
author_search_vector_trigger.sql,
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("bookwyrm", "0190_book_search_updates"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="book",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_search_vector_on_book_edit",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func="new.search_vector := setweight(coalesce(nullif(to_tsvector('english', new.title), ''), to_tsvector('simple', new.title)), 'A') || setweight(to_tsvector('english', coalesce(new.subtitle, '')), 'B') || (SELECT setweight(to_tsvector('simple', coalesce(array_to_string(array_agg(bookwyrm_author.name), ' '), '')), 'C') FROM bookwyrm_author LEFT JOIN bookwyrm_book_authors ON bookwyrm_author.id = bookwyrm_book_authors.author_id WHERE bookwyrm_book_authors.book_id = new.id ) || setweight(to_tsvector('english', coalesce(new.series, '')), 'D');RETURN NEW;",
|
||||
hash="77d6399497c0a89b0bf09d296e33c396da63705c",
|
||||
operation='INSERT OR UPDATE OF "title", "subtitle", "series", "search_vector"',
|
||||
pgid="pgtrigger_update_search_vector_on_book_edit_bec58",
|
||||
table="bookwyrm_book",
|
||||
when="BEFORE",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="author",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="reset_search_vector_on_author_edit",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func="WITH updated_books AS (SELECT book_id FROM bookwyrm_book_authors WHERE author_id = new.id ) UPDATE bookwyrm_book SET search_vector = '' FROM updated_books WHERE id = updated_books.book_id;RETURN NEW;",
|
||||
hash="e7bbf08711ff3724c58f4d92fb7a082ffb3d7826",
|
||||
operation='UPDATE OF "name"',
|
||||
pgid="pgtrigger_reset_search_vector_on_author_edit_a447c",
|
||||
table="bookwyrm_author",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
migrations.RunSQL(
|
||||
sql="""DROP TRIGGER IF EXISTS search_vector_trigger ON bookwyrm_book;
|
||||
DROP FUNCTION IF EXISTS book_trigger;
|
||||
""",
|
||||
reverse_sql=search_vector_trigger.sql,
|
||||
),
|
||||
migrations.RunSQL(
|
||||
sql="""DROP TRIGGER IF EXISTS author_search_vector_trigger ON bookwyrm_author;
|
||||
DROP FUNCTION IF EXISTS author_trigger;
|
||||
""",
|
||||
reverse_sql=author_search_vector_trigger.sql,
|
||||
),
|
||||
migrations.RunSQL(
|
||||
# Recalculate book search vector for any missed author name changes
|
||||
# due to bug in JOIN in the old trigger.
|
||||
sql="UPDATE bookwyrm_book SET search_vector = NULL;",
|
||||
reverse_sql=migrations.RunSQL.noop,
|
||||
),
|
||||
]
|
23
bookwyrm/migrations/0192_make_page_positions_text.py
Normal file
23
bookwyrm/migrations/0192_make_page_positions_text.py
Normal file
|
@ -0,0 +1,23 @@
|
|||
# Generated by Django 3.2.23 on 2024-01-04 23:56
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0191_merge_20240102_0326"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="quotation",
|
||||
name="endposition",
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="quotation",
|
||||
name="position",
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 3.2.23 on 2024-01-02 19:36
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0191_merge_20240102_0326"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RenameField(
|
||||
model_name="sitesettings",
|
||||
old_name="version",
|
||||
new_name="available_version",
|
||||
),
|
||||
]
|
92
bookwyrm/migrations/0193_auto_20240128_0249.py
Normal file
92
bookwyrm/migrations/0193_auto_20240128_0249.py
Normal file
|
@ -0,0 +1,92 @@
|
|||
# Generated by Django 3.2.23 on 2024-01-28 02:49
|
||||
|
||||
import bookwyrm.storage_backends
|
||||
import django.core.serializers.json
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0192_sitesettings_user_exports_enabled"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="bookwyrmexportjob",
|
||||
name="export_json",
|
||||
field=models.JSONField(
|
||||
encoder=django.core.serializers.json.DjangoJSONEncoder, null=True
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="bookwyrmexportjob",
|
||||
name="json_completed",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="bookwyrmexportjob",
|
||||
name="export_data",
|
||||
field=models.FileField(
|
||||
null=True,
|
||||
storage=bookwyrm.storage_backends.ExportsFileStorage,
|
||||
upload_to="",
|
||||
),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="AddFileToTar",
|
||||
fields=[
|
||||
(
|
||||
"childjob_ptr",
|
||||
models.OneToOneField(
|
||||
auto_created=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
parent_link=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
to="bookwyrm.childjob",
|
||||
),
|
||||
),
|
||||
(
|
||||
"parent_export_job",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="child_edition_export_jobs",
|
||||
to="bookwyrm.bookwyrmexportjob",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
bases=("bookwyrm.childjob",),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="AddBookToUserExportJob",
|
||||
fields=[
|
||||
(
|
||||
"childjob_ptr",
|
||||
models.OneToOneField(
|
||||
auto_created=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
parent_link=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
to="bookwyrm.childjob",
|
||||
),
|
||||
),
|
||||
(
|
||||
"edition",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="bookwyrm.edition",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
bases=("bookwyrm.childjob",),
|
||||
),
|
||||
]
|
13
bookwyrm/migrations/0193_merge_20240203_1539.py
Normal file
13
bookwyrm/migrations/0193_merge_20240203_1539.py
Normal file
|
@ -0,0 +1,13 @@
|
|||
# Generated by Django 3.2.23 on 2024-02-03 15:39
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0192_make_page_positions_text"),
|
||||
("bookwyrm", "0192_sitesettings_user_exports_enabled"),
|
||||
]
|
||||
|
||||
operations = []
|
13
bookwyrm/migrations/0194_merge_20240203_1619.py
Normal file
13
bookwyrm/migrations/0194_merge_20240203_1619.py
Normal file
|
@ -0,0 +1,13 @@
|
|||
# Generated by Django 3.2.23 on 2024-02-03 16:19
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0192_rename_version_sitesettings_available_version"),
|
||||
("bookwyrm", "0193_merge_20240203_1539"),
|
||||
]
|
||||
|
||||
operations = []
|
46
bookwyrm/migrations/0195_alter_user_preferred_language.py
Normal file
46
bookwyrm/migrations/0195_alter_user_preferred_language.py
Normal file
|
@ -0,0 +1,46 @@
|
|||
# Generated by Django 3.2.23 on 2024-02-21 00:45
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0194_merge_20240203_1619"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="user",
|
||||
name="preferred_language",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
choices=[
|
||||
("en-us", "English"),
|
||||
("ca-es", "Català (Catalan)"),
|
||||
("de-de", "Deutsch (German)"),
|
||||
("eo-uy", "Esperanto (Esperanto)"),
|
||||
("es-es", "Español (Spanish)"),
|
||||
("eu-es", "Euskara (Basque)"),
|
||||
("gl-es", "Galego (Galician)"),
|
||||
("it-it", "Italiano (Italian)"),
|
||||
("ko-kr", "한국어 (Korean)"),
|
||||
("fi-fi", "Suomi (Finnish)"),
|
||||
("fr-fr", "Français (French)"),
|
||||
("lt-lt", "Lietuvių (Lithuanian)"),
|
||||
("nl-nl", "Nederlands (Dutch)"),
|
||||
("no-no", "Norsk (Norwegian)"),
|
||||
("pl-pl", "Polski (Polish)"),
|
||||
("pt-br", "Português do Brasil (Brazilian Portuguese)"),
|
||||
("pt-pt", "Português Europeu (European Portuguese)"),
|
||||
("ro-ro", "Română (Romanian)"),
|
||||
("sv-se", "Svenska (Swedish)"),
|
||||
("uk-ua", "Українська (Ukrainian)"),
|
||||
("zh-hans", "简体中文 (Simplified Chinese)"),
|
||||
("zh-hant", "繁體中文 (Traditional Chinese)"),
|
||||
],
|
||||
max_length=255,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
]
|
13
bookwyrm/migrations/0196_merge_20240318_1737.py
Normal file
13
bookwyrm/migrations/0196_merge_20240318_1737.py
Normal file
|
@ -0,0 +1,13 @@
|
|||
# Generated by Django 3.2.23 on 2024-03-18 17:37
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0193_auto_20240128_0249"),
|
||||
("bookwyrm", "0195_alter_user_preferred_language"),
|
||||
]
|
||||
|
||||
operations = []
|
13
bookwyrm/migrations/0196_merge_pr3134_into_main.py
Normal file
13
bookwyrm/migrations/0196_merge_pr3134_into_main.py
Normal file
|
@ -0,0 +1,13 @@
|
|||
# Generated by Django 3.2.23 on 2024-03-18 00:48
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0191_migrate_search_vec_triggers_to_pgtriggers"),
|
||||
("bookwyrm", "0195_alter_user_preferred_language"),
|
||||
]
|
||||
|
||||
operations = []
|
41
bookwyrm/migrations/0197_author_search_vector.py
Normal file
41
bookwyrm/migrations/0197_author_search_vector.py
Normal file
|
@ -0,0 +1,41 @@
|
|||
# Generated by Django 3.2.25 on 2024-03-20 15:15
|
||||
|
||||
import django.contrib.postgres.indexes
|
||||
from django.db import migrations
|
||||
import pgtrigger.compiler
|
||||
import pgtrigger.migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0196_merge_pr3134_into_main"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="author",
|
||||
index=django.contrib.postgres.indexes.GinIndex(
|
||||
fields=["search_vector"], name="bookwyrm_au_search__b050a8_gin"
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="author",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_search_vector_on_author_edit",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func="new.search_vector := setweight(to_tsvector('simple', new.name), 'A') || setweight(to_tsvector('simple', coalesce(array_to_string(new.aliases, ' '), '')), 'B');RETURN NEW;",
|
||||
hash="b97919016236d74d0ade51a0769a173ea269da64",
|
||||
operation='INSERT OR UPDATE OF "name", "aliases", "search_vector"',
|
||||
pgid="pgtrigger_update_search_vector_on_author_edit_c61cb",
|
||||
table="bookwyrm_author",
|
||||
when="BEFORE",
|
||||
),
|
||||
),
|
||||
),
|
||||
migrations.RunSQL(
|
||||
# Calculate search vector for all Authors.
|
||||
sql="UPDATE bookwyrm_author SET search_vector = NULL;",
|
||||
reverse_sql="UPDATE bookwyrm_author SET search_vector = NULL;",
|
||||
),
|
||||
]
|
13
bookwyrm/migrations/0197_merge_20240324_0235.py
Normal file
13
bookwyrm/migrations/0197_merge_20240324_0235.py
Normal file
|
@ -0,0 +1,13 @@
|
|||
# Generated by Django 3.2.25 on 2024-03-24 02:35
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0196_merge_20240318_1737"),
|
||||
("bookwyrm", "0196_merge_pr3134_into_main"),
|
||||
]
|
||||
|
||||
operations = []
|
48
bookwyrm/migrations/0197_mergedauthor_mergedbook.py
Normal file
48
bookwyrm/migrations/0197_mergedauthor_mergedbook.py
Normal file
|
@ -0,0 +1,48 @@
|
|||
# Generated by Django 3.2.24 on 2024-02-28 21:30
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0196_merge_pr3134_into_main"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="MergedBook",
|
||||
fields=[
|
||||
("deleted_id", models.IntegerField(primary_key=True, serialize=False)),
|
||||
(
|
||||
"merged_into",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.PROTECT,
|
||||
related_name="absorbed",
|
||||
to="bookwyrm.book",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="MergedAuthor",
|
||||
fields=[
|
||||
("deleted_id", models.IntegerField(primary_key=True, serialize=False)),
|
||||
(
|
||||
"merged_into",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.PROTECT,
|
||||
related_name="absorbed",
|
||||
to="bookwyrm.author",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
]
|
|
@ -0,0 +1,23 @@
|
|||
# Generated by Django 3.2.25 on 2024-03-26 11:37
|
||||
|
||||
import bookwyrm.models.bookwyrm_export_job
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0197_merge_20240324_0235"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="bookwyrmexportjob",
|
||||
name="export_data",
|
||||
field=models.FileField(
|
||||
null=True,
|
||||
storage=bookwyrm.models.bookwyrm_export_job.select_exports_storage,
|
||||
upload_to="",
|
||||
),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,57 @@
|
|||
# Generated by Django 3.2.25 on 2024-03-20 15:52
|
||||
|
||||
from django.db import migrations
|
||||
import pgtrigger.compiler
|
||||
import pgtrigger.migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0197_author_search_vector"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
pgtrigger.migrations.RemoveTrigger(
|
||||
model_name="author",
|
||||
name="reset_search_vector_on_author_edit",
|
||||
),
|
||||
pgtrigger.migrations.RemoveTrigger(
|
||||
model_name="book",
|
||||
name="update_search_vector_on_book_edit",
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="author",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="reset_book_search_vector_on_author_edit",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func="WITH updated_books AS (SELECT book_id FROM bookwyrm_book_authors WHERE author_id = new.id ) UPDATE bookwyrm_book SET search_vector = '' FROM updated_books WHERE id = updated_books.book_id;RETURN NEW;",
|
||||
hash="68422c0f29879c5802b82159dde45297eff53e73",
|
||||
operation='UPDATE OF "name", "aliases"',
|
||||
pgid="pgtrigger_reset_book_search_vector_on_author_edit_a50c7",
|
||||
table="bookwyrm_author",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="book",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_search_vector_on_book_edit",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func="WITH author_names AS (SELECT array_to_string(bookwyrm_author.name || bookwyrm_author.aliases, ' ') AS name_and_aliases FROM bookwyrm_author LEFT JOIN bookwyrm_book_authors ON bookwyrm_author.id = bookwyrm_book_authors.author_id WHERE bookwyrm_book_authors.book_id = new.id ) SELECT setweight(coalesce(nullif(to_tsvector('english', new.title), ''), to_tsvector('simple', new.title)), 'A') || setweight(to_tsvector('english', coalesce(new.subtitle, '')), 'B') || (SELECT setweight(to_tsvector('simple', coalesce(array_to_string(array_agg(name_and_aliases), ' '), '')), 'C') FROM author_names) || setweight(to_tsvector('english', coalesce(new.series, '')), 'D') INTO new.search_vector;RETURN NEW;",
|
||||
hash="9324f5ca76a6f5e63931881d62d11da11f595b2c",
|
||||
operation='INSERT OR UPDATE OF "title", "subtitle", "series", "search_vector"',
|
||||
pgid="pgtrigger_update_search_vector_on_book_edit_bec58",
|
||||
table="bookwyrm_book",
|
||||
when="BEFORE",
|
||||
),
|
||||
),
|
||||
),
|
||||
migrations.RunSQL(
|
||||
# Recalculate search vector for all Books because it now includes
|
||||
# Author aliases.
|
||||
sql="UPDATE bookwyrm_book SET search_vector = NULL;",
|
||||
reverse_sql="UPDATE bookwyrm_book SET search_vector = NULL;",
|
||||
),
|
||||
]
|
13
bookwyrm/migrations/0199_merge_20240326_1217.py
Normal file
13
bookwyrm/migrations/0199_merge_20240326_1217.py
Normal file
|
@ -0,0 +1,13 @@
|
|||
# Generated by Django 3.2.25 on 2024-03-26 12:17
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0198_alter_bookwyrmexportjob_export_data"),
|
||||
("bookwyrm", "0198_book_search_vector_author_aliases"),
|
||||
]
|
||||
|
||||
operations = []
|
|
@ -0,0 +1,19 @@
|
|||
# Generated by Django 3.2.25 on 2024-04-02 19:53
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0198_book_search_vector_author_aliases"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="status",
|
||||
index=models.Index(
|
||||
fields=["remote_id"], name="bookwyrm_st_remote__06aeba_idx"
|
||||
),
|
||||
),
|
||||
]
|
27
bookwyrm/migrations/0200_auto_20240327_1914.py
Normal file
27
bookwyrm/migrations/0200_auto_20240327_1914.py
Normal file
|
@ -0,0 +1,27 @@
|
|||
# Generated by Django 3.2.25 on 2024-03-27 19:14
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0199_merge_20240326_1217"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="addfiletotar",
|
||||
name="childjob_ptr",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="addfiletotar",
|
||||
name="parent_export_job",
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name="AddBookToUserExportJob",
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name="AddFileToTar",
|
||||
),
|
||||
]
|
|
@ -0,0 +1,19 @@
|
|||
# Generated by Django 3.2.25 on 2024-04-03 19:05
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0199_status_bookwyrm_st_remote__06aeba_idx"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="status",
|
||||
index=models.Index(
|
||||
fields=["thread_id"], name="bookwyrm_st_thread__cf064f_idx"
|
||||
),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,19 @@
|
|||
# Generated by Django 3.2.25 on 2024-04-03 19:10
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0200_status_bookwyrm_st_thread__cf064f_idx"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="keypair",
|
||||
index=models.Index(
|
||||
fields=["remote_id"], name="bookwyrm_ke_remote__472927_idx"
|
||||
),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,19 @@
|
|||
# Generated by Django 3.2.25 on 2024-04-03 19:14
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0201_keypair_bookwyrm_ke_remote__472927_idx"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="user",
|
||||
index=models.Index(
|
||||
fields=["username"], name="bookwyrm_us_usernam_b2546d_idx"
|
||||
),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,19 @@
|
|||
# Generated by Django 3.2.25 on 2024-04-03 19:22
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0202_user_bookwyrm_us_usernam_b2546d_idx"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="user",
|
||||
index=models.Index(
|
||||
fields=["is_active", "local"], name="bookwyrm_us_is_acti_972dc4_idx"
|
||||
),
|
||||
),
|
||||
]
|
13
bookwyrm/migrations/0204_merge_20240409_1042.py
Normal file
13
bookwyrm/migrations/0204_merge_20240409_1042.py
Normal file
|
@ -0,0 +1,13 @@
|
|||
# Generated by Django 3.2.25 on 2024-04-09 10:42
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0197_mergedauthor_mergedbook"),
|
||||
("bookwyrm", "0203_user_bookwyrm_us_is_acti_972dc4_idx"),
|
||||
]
|
||||
|
||||
operations = []
|
13
bookwyrm/migrations/0205_merge_20240413_0232.py
Normal file
13
bookwyrm/migrations/0205_merge_20240413_0232.py
Normal file
|
@ -0,0 +1,13 @@
|
|||
# Generated by Django 3.2.25 on 2024-04-13 02:32
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0200_auto_20240327_1914"),
|
||||
("bookwyrm", "0204_merge_20240409_1042"),
|
||||
]
|
||||
|
||||
operations = []
|
|
@ -152,8 +152,9 @@ class ActivitypubMixin:
|
|||
# find anyone who's tagged in a status, for example
|
||||
mentions = self.recipients if hasattr(self, "recipients") else []
|
||||
|
||||
# we always send activities to explicitly mentioned users' inboxes
|
||||
recipients = [u.inbox for u in mentions or [] if not u.local]
|
||||
# we always send activities to explicitly mentioned users (using shared inboxes
|
||||
# where available to avoid duplicate submissions to a given instance)
|
||||
recipients = {u.shared_inbox or u.inbox for u in mentions if not u.local}
|
||||
|
||||
# unless it's a dm, all the followers should receive the activity
|
||||
if privacy != "direct":
|
||||
|
@ -173,18 +174,18 @@ class ActivitypubMixin:
|
|||
if user:
|
||||
queryset = queryset.filter(following=user)
|
||||
|
||||
# ideally, we will send to shared inboxes for efficiency
|
||||
shared_inboxes = (
|
||||
queryset.filter(shared_inbox__isnull=False)
|
||||
.values_list("shared_inbox", flat=True)
|
||||
.distinct()
|
||||
# as above, we prefer shared inboxes if available
|
||||
recipients.update(
|
||||
queryset.filter(shared_inbox__isnull=False).values_list(
|
||||
"shared_inbox", flat=True
|
||||
)
|
||||
)
|
||||
# but not everyone has a shared inbox
|
||||
inboxes = queryset.filter(shared_inbox__isnull=True).values_list(
|
||||
"inbox", flat=True
|
||||
recipients.update(
|
||||
queryset.filter(shared_inbox__isnull=True).values_list(
|
||||
"inbox", flat=True
|
||||
)
|
||||
)
|
||||
recipients += list(shared_inboxes) + list(inboxes)
|
||||
return list(set(recipients))
|
||||
return list(recipients)
|
||||
|
||||
def to_activity_dataclass(self):
|
||||
"""convert from a model to an activity"""
|
||||
|
|
|
@ -1,20 +1,25 @@
|
|||
""" database schema for info about authors """
|
||||
|
||||
import re
|
||||
from typing import Tuple, Any
|
||||
|
||||
from django.contrib.postgres.indexes import GinIndex
|
||||
from django.db import models
|
||||
from django.contrib.postgres.indexes import GinIndex
|
||||
import pgtrigger
|
||||
|
||||
from bookwyrm import activitypub
|
||||
from bookwyrm.settings import DOMAIN
|
||||
from bookwyrm.utils.db import format_trigger
|
||||
|
||||
from .book import BookDataModel
|
||||
from .book import BookDataModel, MergedAuthor
|
||||
from . import fields
|
||||
|
||||
|
||||
class Author(BookDataModel):
|
||||
"""basic biographic info"""
|
||||
|
||||
merged_model = MergedAuthor
|
||||
|
||||
wikipedia_link = fields.CharField(
|
||||
max_length=255, blank=True, null=True, deduplication_field=True
|
||||
)
|
||||
|
@ -67,9 +72,46 @@ class Author(BookDataModel):
|
|||
"""editions and works both use "book" instead of model_name"""
|
||||
return f"https://{DOMAIN}/author/{self.id}"
|
||||
|
||||
activity_serializer = activitypub.Author
|
||||
|
||||
class Meta:
|
||||
"""sets up postgres GIN index field"""
|
||||
"""sets up indexes and triggers"""
|
||||
|
||||
# pylint: disable=line-too-long
|
||||
|
||||
indexes = (GinIndex(fields=["search_vector"]),)
|
||||
triggers = [
|
||||
pgtrigger.Trigger(
|
||||
name="update_search_vector_on_author_edit",
|
||||
when=pgtrigger.Before,
|
||||
operation=pgtrigger.Insert
|
||||
| pgtrigger.UpdateOf("name", "aliases", "search_vector"),
|
||||
func=format_trigger(
|
||||
"""new.search_vector :=
|
||||
-- author name, with priority A
|
||||
setweight(to_tsvector('simple', new.name), 'A') ||
|
||||
-- author aliases, with priority B
|
||||
setweight(to_tsvector('simple', coalesce(array_to_string(new.aliases, ' '), '')), 'B');
|
||||
RETURN new;
|
||||
"""
|
||||
),
|
||||
),
|
||||
pgtrigger.Trigger(
|
||||
name="reset_book_search_vector_on_author_edit",
|
||||
when=pgtrigger.After,
|
||||
operation=pgtrigger.UpdateOf("name", "aliases"),
|
||||
func=format_trigger(
|
||||
"""WITH updated_books AS (
|
||||
SELECT book_id
|
||||
FROM bookwyrm_book_authors
|
||||
WHERE author_id = new.id
|
||||
)
|
||||
UPDATE bookwyrm_book
|
||||
SET search_vector = ''
|
||||
FROM updated_books
|
||||
WHERE id = updated_books.book_id;
|
||||
RETURN new;
|
||||
"""
|
||||
),
|
||||
),
|
||||
]
|
||||
|
||||
activity_serializer = activitypub.Author
|
||||
|
|
|
@ -1,18 +1,21 @@
|
|||
""" database schema for books and shelves """
|
||||
|
||||
from itertools import chain
|
||||
import re
|
||||
from typing import Any
|
||||
from typing import Any, Dict
|
||||
from typing_extensions import Self
|
||||
|
||||
from django.contrib.postgres.search import SearchVectorField
|
||||
from django.contrib.postgres.indexes import GinIndex
|
||||
from django.core.cache import cache
|
||||
from django.db import models, transaction
|
||||
from django.db.models import Prefetch
|
||||
from django.db.models import Prefetch, ManyToManyField
|
||||
from django.dispatch import receiver
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from model_utils import FieldTracker
|
||||
from model_utils.managers import InheritanceManager
|
||||
from imagekit.models import ImageSpecField
|
||||
import pgtrigger
|
||||
|
||||
from bookwyrm import activitypub
|
||||
from bookwyrm.isbn.isbn import hyphenator_singleton as hyphenator
|
||||
|
@ -24,6 +27,7 @@ from bookwyrm.settings import (
|
|||
ENABLE_PREVIEW_IMAGES,
|
||||
ENABLE_THUMBNAIL_GENERATION,
|
||||
)
|
||||
from bookwyrm.utils.db import format_trigger
|
||||
|
||||
from .activitypub_mixin import OrderedCollectionPageMixin, ObjectMixin
|
||||
from .base_model import BookWyrmModel
|
||||
|
@ -106,10 +110,115 @@ class BookDataModel(ObjectMixin, BookWyrmModel):
|
|||
"""only send book data updates to other bookwyrm instances"""
|
||||
super().broadcast(activity, sender, software=software, **kwargs)
|
||||
|
||||
def merge_into(self, canonical: Self, dry_run=False) -> Dict[str, Any]:
|
||||
"""merge this entity into another entity"""
|
||||
if canonical.id == self.id:
|
||||
raise ValueError(f"Cannot merge {self} into itself")
|
||||
|
||||
absorbed_fields = canonical.absorb_data_from(self, dry_run=dry_run)
|
||||
|
||||
if dry_run:
|
||||
return absorbed_fields
|
||||
|
||||
canonical.save()
|
||||
|
||||
self.merged_model.objects.create(deleted_id=self.id, merged_into=canonical)
|
||||
|
||||
# move related models to canonical
|
||||
related_models = [
|
||||
(r.remote_field.name, r.related_model) for r in self._meta.related_objects
|
||||
]
|
||||
# pylint: disable=protected-access
|
||||
for related_field, related_model in related_models:
|
||||
# Skip the ManyToMany fields that aren’t auto-created. These
|
||||
# should have a corresponding OneToMany field in the model for
|
||||
# the linking table anyway. If we update it through that model
|
||||
# instead then we won’t lose the extra fields in the linking
|
||||
# table.
|
||||
# pylint: disable=protected-access
|
||||
related_field_obj = related_model._meta.get_field(related_field)
|
||||
if isinstance(related_field_obj, ManyToManyField):
|
||||
through = related_field_obj.remote_field.through
|
||||
if not through._meta.auto_created:
|
||||
continue
|
||||
related_objs = related_model.objects.filter(**{related_field: self})
|
||||
for related_obj in related_objs:
|
||||
try:
|
||||
setattr(related_obj, related_field, canonical)
|
||||
related_obj.save()
|
||||
except TypeError:
|
||||
getattr(related_obj, related_field).add(canonical)
|
||||
getattr(related_obj, related_field).remove(self)
|
||||
|
||||
self.delete()
|
||||
return absorbed_fields
|
||||
|
||||
def absorb_data_from(self, other: Self, dry_run=False) -> Dict[str, Any]:
|
||||
"""fill empty fields with values from another entity"""
|
||||
absorbed_fields = {}
|
||||
for data_field in self._meta.get_fields():
|
||||
if not hasattr(data_field, "activitypub_field"):
|
||||
continue
|
||||
canonical_value = getattr(self, data_field.name)
|
||||
other_value = getattr(other, data_field.name)
|
||||
if not other_value:
|
||||
continue
|
||||
if isinstance(data_field, fields.ArrayField):
|
||||
if new_values := list(set(other_value) - set(canonical_value)):
|
||||
# append at the end (in no particular order)
|
||||
if not dry_run:
|
||||
setattr(self, data_field.name, canonical_value + new_values)
|
||||
absorbed_fields[data_field.name] = new_values
|
||||
elif isinstance(data_field, fields.PartialDateField):
|
||||
if (
|
||||
(not canonical_value)
|
||||
or (other_value.has_day and not canonical_value.has_day)
|
||||
or (other_value.has_month and not canonical_value.has_month)
|
||||
):
|
||||
if not dry_run:
|
||||
setattr(self, data_field.name, other_value)
|
||||
absorbed_fields[data_field.name] = other_value
|
||||
else:
|
||||
if not canonical_value:
|
||||
if not dry_run:
|
||||
setattr(self, data_field.name, other_value)
|
||||
absorbed_fields[data_field.name] = other_value
|
||||
return absorbed_fields
|
||||
|
||||
|
||||
class MergedBookDataModel(models.Model):
|
||||
"""a BookDataModel instance that has been merged into another instance. kept
|
||||
to be able to redirect old URLs"""
|
||||
|
||||
deleted_id = models.IntegerField(primary_key=True)
|
||||
|
||||
class Meta:
|
||||
"""abstract just like BookDataModel"""
|
||||
|
||||
abstract = True
|
||||
|
||||
|
||||
class MergedBook(MergedBookDataModel):
|
||||
"""an Book that has been merged into another one"""
|
||||
|
||||
merged_into = models.ForeignKey(
|
||||
"Book", on_delete=models.PROTECT, related_name="absorbed"
|
||||
)
|
||||
|
||||
|
||||
class MergedAuthor(MergedBookDataModel):
|
||||
"""an Author that has been merged into another one"""
|
||||
|
||||
merged_into = models.ForeignKey(
|
||||
"Author", on_delete=models.PROTECT, related_name="absorbed"
|
||||
)
|
||||
|
||||
|
||||
class Book(BookDataModel):
|
||||
"""a generic book, which can mean either an edition or a work"""
|
||||
|
||||
merged_model = MergedBook
|
||||
|
||||
connector = models.ForeignKey("Connector", on_delete=models.PROTECT, null=True)
|
||||
|
||||
# book/work metadata
|
||||
|
@ -190,9 +299,13 @@ class Book(BookDataModel):
|
|||
"""properties of this edition, as a string"""
|
||||
items = [
|
||||
self.physical_format if hasattr(self, "physical_format") else None,
|
||||
f"{self.languages[0]} language"
|
||||
if self.languages and self.languages[0] and self.languages[0] != "English"
|
||||
else None,
|
||||
(
|
||||
f"{self.languages[0]} language"
|
||||
if self.languages
|
||||
and self.languages[0]
|
||||
and self.languages[0] != "English"
|
||||
else None
|
||||
),
|
||||
str(self.published_date.year) if self.published_date else None,
|
||||
", ".join(self.publishers) if hasattr(self, "publishers") else None,
|
||||
]
|
||||
|
@ -232,9 +345,49 @@ class Book(BookDataModel):
|
|||
)
|
||||
|
||||
class Meta:
|
||||
"""sets up postgres GIN index field"""
|
||||
"""set up indexes and triggers"""
|
||||
|
||||
# pylint: disable=line-too-long
|
||||
|
||||
indexes = (GinIndex(fields=["search_vector"]),)
|
||||
triggers = [
|
||||
pgtrigger.Trigger(
|
||||
name="update_search_vector_on_book_edit",
|
||||
when=pgtrigger.Before,
|
||||
operation=pgtrigger.Insert
|
||||
| pgtrigger.UpdateOf("title", "subtitle", "series", "search_vector"),
|
||||
func=format_trigger(
|
||||
"""
|
||||
WITH author_names AS (
|
||||
SELECT array_to_string(bookwyrm_author.name || bookwyrm_author.aliases, ' ') AS name_and_aliases
|
||||
FROM bookwyrm_author
|
||||
LEFT JOIN bookwyrm_book_authors
|
||||
ON bookwyrm_author.id = bookwyrm_book_authors.author_id
|
||||
WHERE bookwyrm_book_authors.book_id = new.id
|
||||
)
|
||||
SELECT
|
||||
-- title, with priority A (parse in English, default to simple if empty)
|
||||
setweight(COALESCE(nullif(
|
||||
to_tsvector('english', new.title), ''),
|
||||
to_tsvector('simple', new.title)), 'A') ||
|
||||
|
||||
-- subtitle, with priority B (always in English?)
|
||||
setweight(to_tsvector('english', COALESCE(new.subtitle, '')), 'B') ||
|
||||
|
||||
-- list of authors names and aliases (with priority C)
|
||||
(SELECT setweight(to_tsvector('simple', COALESCE(array_to_string(ARRAY_AGG(name_and_aliases), ' '), '')), 'C')
|
||||
FROM author_names
|
||||
) ||
|
||||
|
||||
--- last: series name, with lowest priority
|
||||
setweight(to_tsvector('english', COALESCE(new.series, '')), 'D')
|
||||
|
||||
INTO new.search_vector;
|
||||
RETURN new;
|
||||
"""
|
||||
),
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
class Work(OrderedCollectionPageMixin, Book):
|
||||
|
|
|
@ -1,216 +1,318 @@
|
|||
"""Export user account to tar.gz file for import into another Bookwyrm instance"""
|
||||
|
||||
import dataclasses
|
||||
import logging
|
||||
from uuid import uuid4
|
||||
import os
|
||||
|
||||
from django.db.models import FileField
|
||||
from boto3.session import Session as BotoSession
|
||||
from s3_tar import S3Tar
|
||||
|
||||
from django.db.models import BooleanField, FileField, JSONField
|
||||
from django.db.models import Q
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.core.files.base import ContentFile
|
||||
from django.utils.module_loading import import_string
|
||||
|
||||
from bookwyrm.models import AnnualGoal, ReadThrough, ShelfBook, List, ListItem
|
||||
from bookwyrm import settings, storage_backends
|
||||
|
||||
from bookwyrm.models import AnnualGoal, ReadThrough, ShelfBook, ListItem
|
||||
from bookwyrm.models import Review, Comment, Quotation
|
||||
from bookwyrm.models import Edition
|
||||
from bookwyrm.models import UserFollows, User, UserBlocks
|
||||
from bookwyrm.models.job import ParentJob, ParentTask
|
||||
from bookwyrm.models.job import ParentJob
|
||||
from bookwyrm.tasks import app, IMPORTS
|
||||
from bookwyrm.utils.tar import BookwyrmTarFile
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BookwyrmAwsSession(BotoSession):
|
||||
"""a boto session that always uses settings.AWS_S3_ENDPOINT_URL"""
|
||||
|
||||
def client(self, *args, **kwargs): # pylint: disable=arguments-differ
|
||||
kwargs["endpoint_url"] = settings.AWS_S3_ENDPOINT_URL
|
||||
return super().client("s3", *args, **kwargs)
|
||||
|
||||
|
||||
def select_exports_storage():
|
||||
"""callable to allow for dependency on runtime configuration"""
|
||||
cls = import_string(settings.EXPORTS_STORAGE)
|
||||
return cls()
|
||||
|
||||
|
||||
class BookwyrmExportJob(ParentJob):
|
||||
"""entry for a specific request to export a bookwyrm user"""
|
||||
|
||||
export_data = FileField(null=True)
|
||||
export_data = FileField(null=True, storage=select_exports_storage)
|
||||
export_json = JSONField(null=True, encoder=DjangoJSONEncoder)
|
||||
json_completed = BooleanField(default=False)
|
||||
|
||||
def start_job(self):
|
||||
"""Start the job"""
|
||||
start_export_task.delay(job_id=self.id, no_children=True)
|
||||
"""schedule the first task"""
|
||||
|
||||
return self
|
||||
task = create_export_json_task.delay(job_id=self.id)
|
||||
self.task_id = task.id
|
||||
self.save(update_fields=["task_id"])
|
||||
|
||||
|
||||
@app.task(queue=IMPORTS, base=ParentTask)
|
||||
def start_export_task(**kwargs):
|
||||
"""trigger the child tasks for each row"""
|
||||
job = BookwyrmExportJob.objects.get(id=kwargs["job_id"])
|
||||
@app.task(queue=IMPORTS)
|
||||
def create_export_json_task(job_id):
|
||||
"""create the JSON data for the export"""
|
||||
|
||||
job = BookwyrmExportJob.objects.get(id=job_id)
|
||||
|
||||
# don't start the job if it was stopped from the UI
|
||||
if job.complete:
|
||||
return
|
||||
|
||||
try:
|
||||
# This is where ChildJobs get made
|
||||
job.export_data = ContentFile(b"", str(uuid4()))
|
||||
json_data = json_export(job.user)
|
||||
tar_export(json_data, job.user, job.export_data)
|
||||
job.save(update_fields=["export_data"])
|
||||
job.set_status("active")
|
||||
|
||||
# generate JSON structure
|
||||
job.export_json = export_json(job.user)
|
||||
job.save(update_fields=["export_json"])
|
||||
|
||||
# create archive in separate task
|
||||
create_archive_task.delay(job_id=job.id)
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
logger.exception("User Export Job %s Failed with error: %s", job.id, err)
|
||||
logger.exception(
|
||||
"create_export_json_task for %s failed with error: %s", job, err
|
||||
)
|
||||
job.set_status("failed")
|
||||
|
||||
job.set_status("complete")
|
||||
|
||||
def archive_file_location(file, directory="") -> str:
|
||||
"""get the relative location of a file inside the archive"""
|
||||
return os.path.join(directory, file.name)
|
||||
|
||||
|
||||
def tar_export(json_data: str, user, file):
|
||||
"""wrap the export information in a tar file"""
|
||||
file.open("wb")
|
||||
with BookwyrmTarFile.open(mode="w:gz", fileobj=file) as tar:
|
||||
tar.write_bytes(json_data.encode("utf-8"))
|
||||
def add_file_to_s3_tar(s3_tar: S3Tar, storage, file, directory=""):
|
||||
"""
|
||||
add file to S3Tar inside directory, keeping any directories under its
|
||||
storage location
|
||||
"""
|
||||
s3_tar.add_file(
|
||||
os.path.join(storage.location, file.name),
|
||||
folder=os.path.dirname(archive_file_location(file, directory=directory)),
|
||||
)
|
||||
|
||||
# Add avatar image if present
|
||||
if getattr(user, "avatar", False):
|
||||
tar.add_image(user.avatar, filename="avatar")
|
||||
|
||||
@app.task(queue=IMPORTS)
|
||||
def create_archive_task(job_id):
|
||||
"""create the archive containing the JSON file and additional files"""
|
||||
|
||||
job = BookwyrmExportJob.objects.get(id=job_id)
|
||||
|
||||
# don't start the job if it was stopped from the UI
|
||||
if job.complete:
|
||||
return
|
||||
|
||||
try:
|
||||
export_task_id = str(job.task_id)
|
||||
archive_filename = f"{export_task_id}.tar.gz"
|
||||
export_json_bytes = DjangoJSONEncoder().encode(job.export_json).encode("utf-8")
|
||||
|
||||
user = job.user
|
||||
editions = get_books_for_user(user)
|
||||
for book in editions:
|
||||
if getattr(book, "cover", False):
|
||||
tar.add_image(book.cover)
|
||||
|
||||
file.close()
|
||||
if settings.USE_S3:
|
||||
# Storage for writing temporary files
|
||||
exports_storage = storage_backends.ExportsS3Storage()
|
||||
|
||||
# Handle for creating the final archive
|
||||
s3_tar = S3Tar(
|
||||
exports_storage.bucket_name,
|
||||
os.path.join(exports_storage.location, archive_filename),
|
||||
session=BookwyrmAwsSession(),
|
||||
)
|
||||
|
||||
# Save JSON file to a temporary location
|
||||
export_json_tmp_file = os.path.join(export_task_id, "archive.json")
|
||||
exports_storage.save(
|
||||
export_json_tmp_file,
|
||||
ContentFile(export_json_bytes),
|
||||
)
|
||||
s3_tar.add_file(
|
||||
os.path.join(exports_storage.location, export_json_tmp_file)
|
||||
)
|
||||
|
||||
# Add images to TAR
|
||||
images_storage = storage_backends.ImagesStorage()
|
||||
|
||||
if user.avatar:
|
||||
add_file_to_s3_tar(s3_tar, images_storage, user.avatar)
|
||||
|
||||
for edition in editions:
|
||||
if edition.cover:
|
||||
add_file_to_s3_tar(
|
||||
s3_tar, images_storage, edition.cover, directory="images"
|
||||
)
|
||||
|
||||
# Create archive and store file name
|
||||
s3_tar.tar()
|
||||
job.export_data = archive_filename
|
||||
job.save(update_fields=["export_data"])
|
||||
|
||||
# Delete temporary files
|
||||
exports_storage.delete(export_json_tmp_file)
|
||||
|
||||
else:
|
||||
job.export_data = archive_filename
|
||||
with job.export_data.open("wb") as tar_file:
|
||||
with BookwyrmTarFile.open(mode="w:gz", fileobj=tar_file) as tar:
|
||||
# save json file
|
||||
tar.write_bytes(export_json_bytes)
|
||||
|
||||
# Add avatar image if present
|
||||
if user.avatar:
|
||||
tar.add_image(user.avatar)
|
||||
|
||||
for edition in editions:
|
||||
if edition.cover:
|
||||
tar.add_image(edition.cover, directory="images")
|
||||
job.save(update_fields=["export_data"])
|
||||
|
||||
job.set_status("completed")
|
||||
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
logger.exception("create_archive_task for %s failed with error: %s", job, err)
|
||||
job.set_status("failed")
|
||||
|
||||
|
||||
def json_export(
|
||||
user,
|
||||
): # pylint: disable=too-many-locals, too-many-statements, too-many-branches
|
||||
"""Generate an export for a user"""
|
||||
def export_json(user: User):
|
||||
"""create export JSON"""
|
||||
data = export_user(user) # in the root of the JSON structure
|
||||
data["settings"] = export_settings(user)
|
||||
data["goals"] = export_goals(user)
|
||||
data["books"] = export_books(user)
|
||||
data["saved_lists"] = export_saved_lists(user)
|
||||
data["follows"] = export_follows(user)
|
||||
data["blocks"] = export_blocks(user)
|
||||
return data
|
||||
|
||||
# User as AP object
|
||||
exported_user = user.to_activity()
|
||||
# I don't love this but it prevents a JSON encoding error
|
||||
# when there is no user image
|
||||
if isinstance(
|
||||
exported_user["icon"],
|
||||
dataclasses._MISSING_TYPE, # pylint: disable=protected-access
|
||||
):
|
||||
exported_user["icon"] = {}
|
||||
|
||||
def export_user(user: User):
|
||||
"""export user data"""
|
||||
data = user.to_activity()
|
||||
if user.avatar:
|
||||
data["icon"]["url"] = archive_file_location(user.avatar)
|
||||
else:
|
||||
# change the URL to be relative to the JSON file
|
||||
file_type = exported_user["icon"]["url"].rsplit(".", maxsplit=1)[-1]
|
||||
filename = f"avatar.{file_type}"
|
||||
exported_user["icon"]["url"] = filename
|
||||
data["icon"] = {}
|
||||
return data
|
||||
|
||||
# Additional settings - can't be serialized as AP
|
||||
|
||||
def export_settings(user: User):
|
||||
"""Additional settings - can't be serialized as AP"""
|
||||
vals = [
|
||||
"show_goal",
|
||||
"preferred_timezone",
|
||||
"default_post_privacy",
|
||||
"show_suggested_users",
|
||||
]
|
||||
exported_user["settings"] = {}
|
||||
for k in vals:
|
||||
exported_user["settings"][k] = getattr(user, k)
|
||||
return {k: getattr(user, k) for k in vals}
|
||||
|
||||
# Reading goals - can't be serialized as AP
|
||||
reading_goals = AnnualGoal.objects.filter(user=user).distinct()
|
||||
exported_user["goals"] = []
|
||||
for goal in reading_goals:
|
||||
exported_user["goals"].append(
|
||||
{"goal": goal.goal, "year": goal.year, "privacy": goal.privacy}
|
||||
)
|
||||
|
||||
# Reading history - can't be serialized as AP
|
||||
readthroughs = ReadThrough.objects.filter(user=user).distinct().values()
|
||||
readthroughs = list(readthroughs)
|
||||
def export_saved_lists(user: User):
|
||||
"""add user saved lists to export JSON"""
|
||||
return [l.remote_id for l in user.saved_lists.all()]
|
||||
|
||||
# Books
|
||||
editions = get_books_for_user(user)
|
||||
exported_user["books"] = []
|
||||
|
||||
for edition in editions:
|
||||
book = {}
|
||||
book["work"] = edition.parent_work.to_activity()
|
||||
book["edition"] = edition.to_activity()
|
||||
|
||||
if book["edition"].get("cover"):
|
||||
# change the URL to be relative to the JSON file
|
||||
filename = book["edition"]["cover"]["url"].rsplit("/", maxsplit=1)[-1]
|
||||
book["edition"]["cover"]["url"] = f"covers/{filename}"
|
||||
|
||||
# authors
|
||||
book["authors"] = []
|
||||
for author in edition.authors.all():
|
||||
book["authors"].append(author.to_activity())
|
||||
|
||||
# Shelves this book is on
|
||||
# Every ShelfItem is this book so we don't other serializing
|
||||
book["shelves"] = []
|
||||
shelf_books = (
|
||||
ShelfBook.objects.select_related("shelf")
|
||||
.filter(user=user, book=edition)
|
||||
.distinct()
|
||||
)
|
||||
|
||||
for shelfbook in shelf_books:
|
||||
book["shelves"].append(shelfbook.shelf.to_activity())
|
||||
|
||||
# Lists and ListItems
|
||||
# ListItems include "notes" and "approved" so we need them
|
||||
# even though we know it's this book
|
||||
book["lists"] = []
|
||||
list_items = ListItem.objects.filter(book=edition, user=user).distinct()
|
||||
|
||||
for item in list_items:
|
||||
list_info = item.book_list.to_activity()
|
||||
list_info[
|
||||
"privacy"
|
||||
] = item.book_list.privacy # this isn't serialized so we add it
|
||||
list_info["list_item"] = item.to_activity()
|
||||
book["lists"].append(list_info)
|
||||
|
||||
# Statuses
|
||||
# Can't use select_subclasses here because
|
||||
# we need to filter on the "book" value,
|
||||
# which is not available on an ordinary Status
|
||||
for status in ["comments", "quotations", "reviews"]:
|
||||
book[status] = []
|
||||
|
||||
comments = Comment.objects.filter(user=user, book=edition).all()
|
||||
for status in comments:
|
||||
obj = status.to_activity()
|
||||
obj["progress"] = status.progress
|
||||
obj["progress_mode"] = status.progress_mode
|
||||
book["comments"].append(obj)
|
||||
|
||||
quotes = Quotation.objects.filter(user=user, book=edition).all()
|
||||
for status in quotes:
|
||||
obj = status.to_activity()
|
||||
obj["position"] = status.position
|
||||
obj["endposition"] = status.endposition
|
||||
obj["position_mode"] = status.position_mode
|
||||
book["quotations"].append(obj)
|
||||
|
||||
reviews = Review.objects.filter(user=user, book=edition).all()
|
||||
for status in reviews:
|
||||
obj = status.to_activity()
|
||||
book["reviews"].append(obj)
|
||||
|
||||
# readthroughs can't be serialized to activity
|
||||
book_readthroughs = (
|
||||
ReadThrough.objects.filter(user=user, book=edition).distinct().values()
|
||||
)
|
||||
book["readthroughs"] = list(book_readthroughs)
|
||||
|
||||
# append everything
|
||||
exported_user["books"].append(book)
|
||||
|
||||
# saved book lists - just the remote id
|
||||
saved_lists = List.objects.filter(id__in=user.saved_lists.all()).distinct()
|
||||
exported_user["saved_lists"] = [l.remote_id for l in saved_lists]
|
||||
|
||||
# follows - just the remote id
|
||||
def export_follows(user: User):
|
||||
"""add user follows to export JSON"""
|
||||
follows = UserFollows.objects.filter(user_subject=user).distinct()
|
||||
following = User.objects.filter(userfollows_user_object__in=follows).distinct()
|
||||
exported_user["follows"] = [f.remote_id for f in following]
|
||||
return [f.remote_id for f in following]
|
||||
|
||||
# blocks - just the remote id
|
||||
|
||||
def export_blocks(user: User):
|
||||
"""add user blocks to export JSON"""
|
||||
blocks = UserBlocks.objects.filter(user_subject=user).distinct()
|
||||
blocking = User.objects.filter(userblocks_user_object__in=blocks).distinct()
|
||||
return [b.remote_id for b in blocking]
|
||||
|
||||
exported_user["blocks"] = [b.remote_id for b in blocking]
|
||||
|
||||
return DjangoJSONEncoder().encode(exported_user)
|
||||
def export_goals(user: User):
|
||||
"""add user reading goals to export JSON"""
|
||||
reading_goals = AnnualGoal.objects.filter(user=user).distinct()
|
||||
return [
|
||||
{"goal": goal.goal, "year": goal.year, "privacy": goal.privacy}
|
||||
for goal in reading_goals
|
||||
]
|
||||
|
||||
|
||||
def export_books(user: User):
|
||||
"""add books to export JSON"""
|
||||
editions = get_books_for_user(user)
|
||||
return [export_book(user, edition) for edition in editions]
|
||||
|
||||
|
||||
def export_book(user: User, edition: Edition):
|
||||
"""add book to export JSON"""
|
||||
data = {}
|
||||
data["work"] = edition.parent_work.to_activity()
|
||||
data["edition"] = edition.to_activity()
|
||||
|
||||
if edition.cover:
|
||||
data["edition"]["cover"]["url"] = archive_file_location(
|
||||
edition.cover, directory="images"
|
||||
)
|
||||
|
||||
# authors
|
||||
data["authors"] = [author.to_activity() for author in edition.authors.all()]
|
||||
|
||||
# Shelves this book is on
|
||||
# Every ShelfItem is this book so we don't other serializing
|
||||
shelf_books = (
|
||||
ShelfBook.objects.select_related("shelf")
|
||||
.filter(user=user, book=edition)
|
||||
.distinct()
|
||||
)
|
||||
data["shelves"] = [shelfbook.shelf.to_activity() for shelfbook in shelf_books]
|
||||
|
||||
# Lists and ListItems
|
||||
# ListItems include "notes" and "approved" so we need them
|
||||
# even though we know it's this book
|
||||
list_items = ListItem.objects.filter(book=edition, user=user).distinct()
|
||||
|
||||
data["lists"] = []
|
||||
for item in list_items:
|
||||
list_info = item.book_list.to_activity()
|
||||
list_info[
|
||||
"privacy"
|
||||
] = item.book_list.privacy # this isn't serialized so we add it
|
||||
list_info["list_item"] = item.to_activity()
|
||||
data["lists"].append(list_info)
|
||||
|
||||
# Statuses
|
||||
# Can't use select_subclasses here because
|
||||
# we need to filter on the "book" value,
|
||||
# which is not available on an ordinary Status
|
||||
for status in ["comments", "quotations", "reviews"]:
|
||||
data[status] = []
|
||||
|
||||
comments = Comment.objects.filter(user=user, book=edition).all()
|
||||
for status in comments:
|
||||
obj = status.to_activity()
|
||||
obj["progress"] = status.progress
|
||||
obj["progress_mode"] = status.progress_mode
|
||||
data["comments"].append(obj)
|
||||
|
||||
quotes = Quotation.objects.filter(user=user, book=edition).all()
|
||||
for status in quotes:
|
||||
obj = status.to_activity()
|
||||
obj["position"] = status.position
|
||||
obj["endposition"] = status.endposition
|
||||
obj["position_mode"] = status.position_mode
|
||||
data["quotations"].append(obj)
|
||||
|
||||
reviews = Review.objects.filter(user=user, book=edition).all()
|
||||
data["reviews"] = [status.to_activity() for status in reviews]
|
||||
|
||||
# readthroughs can't be serialized to activity
|
||||
book_readthroughs = (
|
||||
ReadThrough.objects.filter(user=user, book=edition).distinct().values()
|
||||
)
|
||||
data["readthroughs"] = list(book_readthroughs)
|
||||
return data
|
||||
|
||||
|
||||
def get_books_for_user(user):
|
||||
|
|
|
@ -42,20 +42,23 @@ def start_import_task(**kwargs):
|
|||
try:
|
||||
archive_file.open("rb")
|
||||
with BookwyrmTarFile.open(mode="r:gz", fileobj=archive_file) as tar:
|
||||
job.import_data = json.loads(tar.read("archive.json").decode("utf-8"))
|
||||
json_filename = next(
|
||||
filter(lambda n: n.startswith("archive"), tar.getnames())
|
||||
)
|
||||
job.import_data = json.loads(tar.read(json_filename).decode("utf-8"))
|
||||
|
||||
if "include_user_profile" in job.required:
|
||||
update_user_profile(job.user, tar, job.import_data)
|
||||
if "include_user_settings" in job.required:
|
||||
update_user_settings(job.user, job.import_data)
|
||||
if "include_goals" in job.required:
|
||||
update_goals(job.user, job.import_data.get("goals"))
|
||||
update_goals(job.user, job.import_data.get("goals", []))
|
||||
if "include_saved_lists" in job.required:
|
||||
upsert_saved_lists(job.user, job.import_data.get("saved_lists"))
|
||||
upsert_saved_lists(job.user, job.import_data.get("saved_lists", []))
|
||||
if "include_follows" in job.required:
|
||||
upsert_follows(job.user, job.import_data.get("follows"))
|
||||
upsert_follows(job.user, job.import_data.get("follows", []))
|
||||
if "include_blocks" in job.required:
|
||||
upsert_user_blocks(job.user, job.import_data.get("blocks"))
|
||||
upsert_user_blocks(job.user, job.import_data.get("blocks", []))
|
||||
|
||||
process_books(job, tar)
|
||||
|
||||
|
@ -212,7 +215,7 @@ def upsert_statuses(user, cls, data, book_remote_id):
|
|||
instance.save() # save and broadcast
|
||||
|
||||
else:
|
||||
logger.info("User does not have permission to import statuses")
|
||||
logger.warning("User does not have permission to import statuses")
|
||||
|
||||
|
||||
def upsert_lists(user, lists, book_id):
|
||||
|
|
|
@ -260,12 +260,12 @@ class PrivacyField(ActivitypubFieldMixin, models.CharField):
|
|||
|
||||
if to == [self.public]:
|
||||
setattr(instance, self.name, "public")
|
||||
elif self.public in cc:
|
||||
setattr(instance, self.name, "unlisted")
|
||||
elif to == [user.followers_url]:
|
||||
setattr(instance, self.name, "followers")
|
||||
elif cc == []:
|
||||
setattr(instance, self.name, "direct")
|
||||
elif self.public in cc:
|
||||
setattr(instance, self.name, "unlisted")
|
||||
else:
|
||||
setattr(instance, self.name, "followers")
|
||||
return original == getattr(instance, self.name)
|
||||
|
@ -482,7 +482,7 @@ class ImageField(ActivitypubFieldMixin, models.ImageField):
|
|||
if not url:
|
||||
return None
|
||||
|
||||
return activitypub.Document(url=url, name=alt)
|
||||
return activitypub.Image(url=url, name=alt)
|
||||
|
||||
def field_from_activity(self, value, allow_external_connections=True):
|
||||
image_slug = value
|
||||
|
|
|
@ -135,8 +135,7 @@ class ParentJob(Job):
|
|||
)
|
||||
app.control.revoke(list(tasks))
|
||||
|
||||
for task in self.pending_child_jobs:
|
||||
task.update(status=self.Status.STOPPED)
|
||||
self.pending_child_jobs.update(status=self.Status.STOPPED)
|
||||
|
||||
@property
|
||||
def has_completed(self):
|
||||
|
@ -248,7 +247,7 @@ class SubTask(app.Task):
|
|||
"""
|
||||
|
||||
def before_start(
|
||||
self, task_id, args, kwargs
|
||||
self, task_id, *args, **kwargs
|
||||
): # pylint: disable=no-self-use, unused-argument
|
||||
"""Handler called before the task starts. Override.
|
||||
|
||||
|
@ -272,7 +271,7 @@ class SubTask(app.Task):
|
|||
child_job.set_status(ChildJob.Status.ACTIVE)
|
||||
|
||||
def on_success(
|
||||
self, retval, task_id, args, kwargs
|
||||
self, retval, task_id, *args, **kwargs
|
||||
): # pylint: disable=no-self-use, unused-argument
|
||||
"""Run by the worker if the task executes successfully. Override.
|
||||
|
||||
|
|
|
@ -10,8 +10,11 @@ from django.dispatch import receiver
|
|||
from django.utils import timezone
|
||||
from model_utils import FieldTracker
|
||||
|
||||
from bookwyrm.connectors.abstract_connector import get_data
|
||||
from bookwyrm.preview_images import generate_site_preview_image_task
|
||||
from bookwyrm.settings import DOMAIN, ENABLE_PREVIEW_IMAGES, STATIC_FULL_URL
|
||||
from bookwyrm.settings import RELEASE_API
|
||||
from bookwyrm.tasks import app, MISC
|
||||
from .base_model import BookWyrmModel, new_access_code
|
||||
from .user import User
|
||||
from .fields import get_absolute_url
|
||||
|
@ -45,7 +48,7 @@ class SiteSettings(SiteModel):
|
|||
default_theme = models.ForeignKey(
|
||||
"Theme", null=True, blank=True, on_delete=models.SET_NULL
|
||||
)
|
||||
version = models.CharField(null=True, blank=True, max_length=10)
|
||||
available_version = models.CharField(null=True, blank=True, max_length=10)
|
||||
|
||||
# admin setup options
|
||||
install_mode = models.BooleanField(default=False)
|
||||
|
@ -245,3 +248,14 @@ def preview_image(instance, *args, **kwargs):
|
|||
|
||||
if len(changed_fields) > 0:
|
||||
generate_site_preview_image_task.delay()
|
||||
|
||||
|
||||
@app.task(queue=MISC)
|
||||
def check_for_updates_task():
|
||||
"""See if git remote knows about a new version"""
|
||||
site = SiteSettings.objects.get()
|
||||
release = get_data(RELEASE_API, timeout=3)
|
||||
available_version = release.get("tag_name", None)
|
||||
if available_version:
|
||||
site.available_version = available_version
|
||||
site.save(update_fields=["available_version"])
|
||||
|
|
|
@ -12,6 +12,8 @@ from django.db.models import Q
|
|||
from django.dispatch import receiver
|
||||
from django.template.loader import get_template
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.utils.translation import ngettext_lazy
|
||||
from model_utils import FieldTracker
|
||||
from model_utils.managers import InheritanceManager
|
||||
|
||||
|
@ -78,6 +80,10 @@ class Status(OrderedCollectionPageMixin, BookWyrmModel):
|
|||
"""default sorting"""
|
||||
|
||||
ordering = ("-published_date",)
|
||||
indexes = [
|
||||
models.Index(fields=["remote_id"]),
|
||||
models.Index(fields=["thread_id"]),
|
||||
]
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
"""save and notify"""
|
||||
|
@ -107,14 +113,14 @@ class Status(OrderedCollectionPageMixin, BookWyrmModel):
|
|||
@property
|
||||
def recipients(self):
|
||||
"""tagged users who definitely need to get this status in broadcast"""
|
||||
mentions = [u for u in self.mention_users.all() if not u.local]
|
||||
mentions = {u for u in self.mention_users.all() if not u.local}
|
||||
if (
|
||||
hasattr(self, "reply_parent")
|
||||
and self.reply_parent
|
||||
and not self.reply_parent.user.local
|
||||
):
|
||||
mentions.append(self.reply_parent.user)
|
||||
return list(set(mentions))
|
||||
mentions.add(self.reply_parent.user)
|
||||
return list(mentions)
|
||||
|
||||
@classmethod
|
||||
def ignore_activity(
|
||||
|
@ -178,6 +184,24 @@ class Status(OrderedCollectionPageMixin, BookWyrmModel):
|
|||
"""you can't boost dms"""
|
||||
return self.privacy in ["unlisted", "public"]
|
||||
|
||||
@property
|
||||
def page_title(self):
|
||||
"""title of the page when only this status is shown"""
|
||||
return _("%(display_name)s's status") % {"display_name": self.user.display_name}
|
||||
|
||||
@property
|
||||
def page_description(self):
|
||||
"""description of the page in meta tags when only this status is shown"""
|
||||
return None
|
||||
|
||||
@property
|
||||
def page_image(self):
|
||||
"""image to use as preview in meta tags when only this status is shown"""
|
||||
if self.mention_books.exists():
|
||||
book = self.mention_books.first()
|
||||
return book.preview_image or book.cover
|
||||
return self.user.preview_image
|
||||
|
||||
def to_replies(self, **kwargs):
|
||||
"""helper function for loading AP serialized replies to a status"""
|
||||
return self.to_ordered_collection(
|
||||
|
@ -301,6 +325,10 @@ class BookStatus(Status):
|
|||
|
||||
abstract = True
|
||||
|
||||
@property
|
||||
def page_image(self):
|
||||
return self.book.preview_image or self.book.cover or super().page_image
|
||||
|
||||
|
||||
class Comment(BookStatus):
|
||||
"""like a review but without a rating and transient"""
|
||||
|
@ -332,17 +360,26 @@ class Comment(BookStatus):
|
|||
|
||||
activity_serializer = activitypub.Comment
|
||||
|
||||
@property
|
||||
def page_title(self):
|
||||
return _("%(display_name)s's comment on %(book_title)s") % {
|
||||
"display_name": self.user.display_name,
|
||||
"book_title": self.book.title,
|
||||
}
|
||||
|
||||
|
||||
class Quotation(BookStatus):
|
||||
"""like a review but without a rating and transient"""
|
||||
|
||||
quote = fields.HtmlField()
|
||||
raw_quote = models.TextField(blank=True, null=True)
|
||||
position = models.IntegerField(
|
||||
validators=[MinValueValidator(0)], null=True, blank=True
|
||||
position = models.TextField(
|
||||
null=True,
|
||||
blank=True,
|
||||
)
|
||||
endposition = models.IntegerField(
|
||||
validators=[MinValueValidator(0)], null=True, blank=True
|
||||
endposition = models.TextField(
|
||||
null=True,
|
||||
blank=True,
|
||||
)
|
||||
position_mode = models.CharField(
|
||||
max_length=3,
|
||||
|
@ -355,10 +392,10 @@ class Quotation(BookStatus):
|
|||
def _format_position(self) -> Optional[str]:
|
||||
"""serialize page position"""
|
||||
beg = self.position
|
||||
end = self.endposition or 0
|
||||
end = self.endposition
|
||||
if self.position_mode != "PG" or not beg:
|
||||
return None
|
||||
return f"pp. {beg}-{end}" if end > beg else f"p. {beg}"
|
||||
return f"pp. {beg}-{end}" if end else f"p. {beg}"
|
||||
|
||||
@property
|
||||
def pure_content(self):
|
||||
|
@ -374,6 +411,13 @@ class Quotation(BookStatus):
|
|||
|
||||
activity_serializer = activitypub.Quotation
|
||||
|
||||
@property
|
||||
def page_title(self):
|
||||
return _("%(display_name)s's quote from %(book_title)s") % {
|
||||
"display_name": self.user.display_name,
|
||||
"book_title": self.book.title,
|
||||
}
|
||||
|
||||
|
||||
class Review(BookStatus):
|
||||
"""a book review"""
|
||||
|
@ -403,6 +447,13 @@ class Review(BookStatus):
|
|||
"""indicate the book in question for mastodon (or w/e) users"""
|
||||
return self.content
|
||||
|
||||
@property
|
||||
def page_title(self):
|
||||
return _("%(display_name)s's review of %(book_title)s") % {
|
||||
"display_name": self.user.display_name,
|
||||
"book_title": self.book.title,
|
||||
}
|
||||
|
||||
activity_serializer = activitypub.Review
|
||||
pure_type = "Article"
|
||||
|
||||
|
@ -426,6 +477,18 @@ class ReviewRating(Review):
|
|||
template = get_template("snippets/generated_status/rating.html")
|
||||
return template.render({"book": self.book, "rating": self.rating}).strip()
|
||||
|
||||
@property
|
||||
def page_description(self):
|
||||
return ngettext_lazy(
|
||||
"%(display_name)s rated %(book_title)s: %(display_rating).1f star",
|
||||
"%(display_name)s rated %(book_title)s: %(display_rating).1f stars",
|
||||
"display_rating",
|
||||
) % {
|
||||
"display_name": self.user.display_name,
|
||||
"book_title": self.book.title,
|
||||
"display_rating": self.rating,
|
||||
}
|
||||
|
||||
activity_serializer = activitypub.Rating
|
||||
pure_type = "Note"
|
||||
|
||||
|
|
|
@ -198,6 +198,14 @@ class User(OrderedCollectionPageMixin, AbstractUser):
|
|||
hotp_secret = models.CharField(max_length=32, default=None, blank=True, null=True)
|
||||
hotp_count = models.IntegerField(default=0, blank=True, null=True)
|
||||
|
||||
class Meta(AbstractUser.Meta):
|
||||
"""indexes"""
|
||||
|
||||
indexes = [
|
||||
models.Index(fields=["username"]),
|
||||
models.Index(fields=["is_active", "local"]),
|
||||
]
|
||||
|
||||
@property
|
||||
def active_follower_requests(self):
|
||||
"""Follow requests from active users"""
|
||||
|
@ -509,6 +517,13 @@ class KeyPair(ActivitypubMixin, BookWyrmModel):
|
|||
activity_serializer = activitypub.PublicKey
|
||||
serialize_reverse_fields = [("owner", "owner", "id")]
|
||||
|
||||
class Meta:
|
||||
"""indexes"""
|
||||
|
||||
indexes = [
|
||||
models.Index(fields=["remote_id"]),
|
||||
]
|
||||
|
||||
def get_remote_id(self):
|
||||
# self.owner is set by the OneToOneField on User
|
||||
return f"{self.owner.remote_id}/#main-key"
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
""" Generate social media preview images for twitter/mastodon/etc """
|
||||
|
||||
import math
|
||||
import os
|
||||
import textwrap
|
||||
|
@ -42,8 +43,8 @@ def get_imagefont(name, size):
|
|||
return ImageFont.truetype(path, size)
|
||||
except KeyError:
|
||||
logger.error("Font %s not found in config", name)
|
||||
except OSError:
|
||||
logger.error("Could not load font %s from file", name)
|
||||
except OSError as err:
|
||||
logger.error("Could not load font %s from file: %s", name, err)
|
||||
|
||||
return ImageFont.load_default()
|
||||
|
||||
|
@ -59,7 +60,7 @@ def get_font(weight, size=28):
|
|||
font.set_variation_by_name("Bold")
|
||||
if weight == "regular":
|
||||
font.set_variation_by_name("Regular")
|
||||
except AttributeError:
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
return font
|
||||
|
@ -174,11 +175,13 @@ def generate_instance_layer(content_width):
|
|||
site = models.SiteSettings.objects.get()
|
||||
|
||||
if site.logo_small:
|
||||
logo_img = Image.open(site.logo_small)
|
||||
with Image.open(site.logo_small) as logo_img:
|
||||
logo_img.load()
|
||||
else:
|
||||
try:
|
||||
static_path = os.path.join(settings.STATIC_ROOT, "images/logo-small.png")
|
||||
logo_img = Image.open(static_path)
|
||||
with Image.open(static_path) as logo_img:
|
||||
logo_img.load()
|
||||
except FileNotFoundError:
|
||||
logo_img = None
|
||||
|
||||
|
@ -210,18 +213,9 @@ def generate_instance_layer(content_width):
|
|||
|
||||
def generate_rating_layer(rating, content_width):
|
||||
"""Places components for rating preview"""
|
||||
try:
|
||||
icon_star_full = Image.open(
|
||||
os.path.join(settings.STATIC_ROOT, "images/icons/star-full.png")
|
||||
)
|
||||
icon_star_empty = Image.open(
|
||||
os.path.join(settings.STATIC_ROOT, "images/icons/star-empty.png")
|
||||
)
|
||||
icon_star_half = Image.open(
|
||||
os.path.join(settings.STATIC_ROOT, "images/icons/star-half.png")
|
||||
)
|
||||
except FileNotFoundError:
|
||||
return None
|
||||
path_star_full = os.path.join(settings.STATIC_ROOT, "images/icons/star-full.png")
|
||||
path_star_empty = os.path.join(settings.STATIC_ROOT, "images/icons/star-empty.png")
|
||||
path_star_half = os.path.join(settings.STATIC_ROOT, "images/icons/star-half.png")
|
||||
|
||||
icon_size = 64
|
||||
icon_margin = 10
|
||||
|
@ -236,17 +230,23 @@ def generate_rating_layer(rating, content_width):
|
|||
|
||||
position_x = 0
|
||||
|
||||
for _ in range(math.floor(rating)):
|
||||
rating_layer_mask.alpha_composite(icon_star_full, (position_x, 0))
|
||||
position_x = position_x + icon_size + icon_margin
|
||||
try:
|
||||
with Image.open(path_star_full) as icon_star_full:
|
||||
for _ in range(math.floor(rating)):
|
||||
rating_layer_mask.alpha_composite(icon_star_full, (position_x, 0))
|
||||
position_x = position_x + icon_size + icon_margin
|
||||
|
||||
if math.floor(rating) != math.ceil(rating):
|
||||
rating_layer_mask.alpha_composite(icon_star_half, (position_x, 0))
|
||||
position_x = position_x + icon_size + icon_margin
|
||||
if math.floor(rating) != math.ceil(rating):
|
||||
with Image.open(path_star_half) as icon_star_half:
|
||||
rating_layer_mask.alpha_composite(icon_star_half, (position_x, 0))
|
||||
position_x = position_x + icon_size + icon_margin
|
||||
|
||||
for _ in range(5 - math.ceil(rating)):
|
||||
rating_layer_mask.alpha_composite(icon_star_empty, (position_x, 0))
|
||||
position_x = position_x + icon_size + icon_margin
|
||||
with Image.open(path_star_empty) as icon_star_empty:
|
||||
for _ in range(5 - math.ceil(rating)):
|
||||
rating_layer_mask.alpha_composite(icon_star_empty, (position_x, 0))
|
||||
position_x = position_x + icon_size + icon_margin
|
||||
except FileNotFoundError:
|
||||
return None
|
||||
|
||||
rating_layer_mask = rating_layer_mask.getchannel("A")
|
||||
rating_layer_mask = ImageOps.invert(rating_layer_mask)
|
||||
|
@ -289,7 +289,8 @@ def generate_preview_image(
|
|||
texts = texts or {}
|
||||
# Cover
|
||||
try:
|
||||
inner_img_layer = Image.open(picture)
|
||||
with Image.open(picture) as inner_img_layer:
|
||||
inner_img_layer.load()
|
||||
inner_img_layer.thumbnail(
|
||||
(inner_img_width, inner_img_height), Image.Resampling.LANCZOS
|
||||
)
|
||||
|
|
|
@ -19,7 +19,6 @@ DOMAIN = env("DOMAIN")
|
|||
with open("VERSION", encoding="utf-8") as f:
|
||||
version = f.read()
|
||||
version = version.replace("\n", "")
|
||||
f.close()
|
||||
|
||||
VERSION = version
|
||||
|
||||
|
@ -30,6 +29,9 @@ RELEASE_API = env(
|
|||
|
||||
PAGE_LENGTH = env.int("PAGE_LENGTH", 15)
|
||||
DEFAULT_LANGUAGE = env("DEFAULT_LANGUAGE", "English")
|
||||
# TODO: extend maximum age to 1 year once termination of active sessions
|
||||
# is implemented (see bookwyrm-social#2278, bookwyrm-social#3082).
|
||||
SESSION_COOKIE_AGE = env.int("SESSION_COOKIE_AGE", 3600 * 24 * 30) # 1 month
|
||||
|
||||
JS_CACHE = "8a89cad7"
|
||||
|
||||
|
@ -105,6 +107,7 @@ INSTALLED_APPS = [
|
|||
"celery",
|
||||
"django_celery_beat",
|
||||
"imagekit",
|
||||
"pgtrigger",
|
||||
"storages",
|
||||
]
|
||||
|
||||
|
@ -318,6 +321,7 @@ LANGUAGES = [
|
|||
("eu-es", _("Euskara (Basque)")),
|
||||
("gl-es", _("Galego (Galician)")),
|
||||
("it-it", _("Italiano (Italian)")),
|
||||
("ko-kr", _("한국어 (Korean)")),
|
||||
("fi-fi", _("Suomi (Finnish)")),
|
||||
("fr-fr", _("Français (French)")),
|
||||
("lt-lt", _("Lietuvių (Lithuanian)")),
|
||||
|
@ -347,8 +351,7 @@ USE_L10N = True
|
|||
USE_TZ = True
|
||||
|
||||
|
||||
agent = requests.utils.default_user_agent()
|
||||
USER_AGENT = f"{agent} (BookWyrm/{VERSION}; +https://{DOMAIN}/)"
|
||||
USER_AGENT = f"BookWyrm (BookWyrm/{VERSION}; +https://{DOMAIN}/)"
|
||||
|
||||
# Imagekit generated thumbnails
|
||||
ENABLE_THUMBNAIL_GENERATION = env.bool("ENABLE_THUMBNAIL_GENERATION", False)
|
||||
|
@ -371,6 +374,7 @@ if USE_HTTPS:
|
|||
|
||||
USE_S3 = env.bool("USE_S3", False)
|
||||
USE_AZURE = env.bool("USE_AZURE", False)
|
||||
S3_SIGNED_URL_EXPIRY = env.int("S3_SIGNED_URL_EXPIRY", 900)
|
||||
|
||||
if USE_S3:
|
||||
# AWS settings
|
||||
|
@ -382,19 +386,34 @@ if USE_S3:
|
|||
AWS_S3_ENDPOINT_URL = env("AWS_S3_ENDPOINT_URL", None)
|
||||
AWS_DEFAULT_ACL = "public-read"
|
||||
AWS_S3_OBJECT_PARAMETERS = {"CacheControl": "max-age=86400"}
|
||||
AWS_S3_URL_PROTOCOL = env("AWS_S3_URL_PROTOCOL", f"{PROTOCOL}:")
|
||||
# S3 Static settings
|
||||
STATIC_LOCATION = "static"
|
||||
STATIC_URL = f"{PROTOCOL}://{AWS_S3_CUSTOM_DOMAIN}/{STATIC_LOCATION}/"
|
||||
STATIC_URL = f"{AWS_S3_URL_PROTOCOL}//{AWS_S3_CUSTOM_DOMAIN}/{STATIC_LOCATION}/"
|
||||
STATIC_FULL_URL = STATIC_URL
|
||||
STATICFILES_STORAGE = "bookwyrm.storage_backends.StaticStorage"
|
||||
# S3 Media settings
|
||||
MEDIA_LOCATION = "images"
|
||||
MEDIA_URL = f"{PROTOCOL}://{AWS_S3_CUSTOM_DOMAIN}/{MEDIA_LOCATION}/"
|
||||
MEDIA_URL = f"{AWS_S3_URL_PROTOCOL}//{AWS_S3_CUSTOM_DOMAIN}/{MEDIA_LOCATION}/"
|
||||
MEDIA_FULL_URL = MEDIA_URL
|
||||
STATIC_FULL_URL = STATIC_URL
|
||||
DEFAULT_FILE_STORAGE = "bookwyrm.storage_backends.ImagesStorage"
|
||||
CSP_DEFAULT_SRC = ["'self'", AWS_S3_CUSTOM_DOMAIN] + CSP_ADDITIONAL_HOSTS
|
||||
CSP_SCRIPT_SRC = ["'self'", AWS_S3_CUSTOM_DOMAIN] + CSP_ADDITIONAL_HOSTS
|
||||
# S3 Exports settings
|
||||
EXPORTS_STORAGE = "bookwyrm.storage_backends.ExportsS3Storage"
|
||||
# Content Security Policy
|
||||
CSP_DEFAULT_SRC = [
|
||||
"'self'",
|
||||
f"{AWS_S3_URL_PROTOCOL}//{AWS_S3_CUSTOM_DOMAIN}"
|
||||
if AWS_S3_CUSTOM_DOMAIN
|
||||
else None,
|
||||
] + CSP_ADDITIONAL_HOSTS
|
||||
CSP_SCRIPT_SRC = [
|
||||
"'self'",
|
||||
f"{AWS_S3_URL_PROTOCOL}//{AWS_S3_CUSTOM_DOMAIN}"
|
||||
if AWS_S3_CUSTOM_DOMAIN
|
||||
else None,
|
||||
] + CSP_ADDITIONAL_HOSTS
|
||||
elif USE_AZURE:
|
||||
# Azure settings
|
||||
AZURE_ACCOUNT_NAME = env("AZURE_ACCOUNT_NAME")
|
||||
AZURE_ACCOUNT_KEY = env("AZURE_ACCOUNT_KEY")
|
||||
AZURE_CONTAINER = env("AZURE_CONTAINER")
|
||||
|
@ -404,6 +423,7 @@ elif USE_AZURE:
|
|||
STATIC_URL = (
|
||||
f"{PROTOCOL}://{AZURE_CUSTOM_DOMAIN}/{AZURE_CONTAINER}/{STATIC_LOCATION}/"
|
||||
)
|
||||
STATIC_FULL_URL = STATIC_URL
|
||||
STATICFILES_STORAGE = "bookwyrm.storage_backends.AzureStaticStorage"
|
||||
# Azure Media settings
|
||||
MEDIA_LOCATION = "images"
|
||||
|
@ -411,15 +431,24 @@ elif USE_AZURE:
|
|||
f"{PROTOCOL}://{AZURE_CUSTOM_DOMAIN}/{AZURE_CONTAINER}/{MEDIA_LOCATION}/"
|
||||
)
|
||||
MEDIA_FULL_URL = MEDIA_URL
|
||||
STATIC_FULL_URL = STATIC_URL
|
||||
DEFAULT_FILE_STORAGE = "bookwyrm.storage_backends.AzureImagesStorage"
|
||||
# Azure Exports settings
|
||||
EXPORTS_STORAGE = None # not implemented yet
|
||||
# Content Security Policy
|
||||
CSP_DEFAULT_SRC = ["'self'", AZURE_CUSTOM_DOMAIN] + CSP_ADDITIONAL_HOSTS
|
||||
CSP_SCRIPT_SRC = ["'self'", AZURE_CUSTOM_DOMAIN] + CSP_ADDITIONAL_HOSTS
|
||||
else:
|
||||
# Static settings
|
||||
STATIC_URL = "/static/"
|
||||
STATIC_FULL_URL = f"{PROTOCOL}://{DOMAIN}{STATIC_URL}"
|
||||
STATICFILES_STORAGE = "django.contrib.staticfiles.storage.StaticFilesStorage"
|
||||
# Media settings
|
||||
MEDIA_URL = "/images/"
|
||||
MEDIA_FULL_URL = f"{PROTOCOL}://{DOMAIN}{MEDIA_URL}"
|
||||
STATIC_FULL_URL = f"{PROTOCOL}://{DOMAIN}{STATIC_URL}"
|
||||
DEFAULT_FILE_STORAGE = "django.core.files.storage.FileSystemStorage"
|
||||
# Exports settings
|
||||
EXPORTS_STORAGE = "bookwyrm.storage_backends.ExportsFileStorage"
|
||||
# Content Security Policy
|
||||
CSP_DEFAULT_SRC = ["'self'"] + CSP_ADDITIONAL_HOSTS
|
||||
CSP_SCRIPT_SRC = ["'self'"] + CSP_ADDITIONAL_HOSTS
|
||||
|
||||
|
@ -443,4 +472,6 @@ if HTTP_X_FORWARDED_PROTO:
|
|||
# user with the same username - in which case you should change it!
|
||||
INSTANCE_ACTOR_USERNAME = "bookwyrm.instance.actor"
|
||||
|
||||
DATA_UPLOAD_MAX_MEMORY_SIZE = env.int("DATA_UPLOAD_MAX_MEMORY_SIZE", (1024**2 * 100))
|
||||
# We only allow specifying DATA_UPLOAD_MAX_MEMORY_SIZE in MiB from .env
|
||||
# (note the difference in variable names).
|
||||
DATA_UPLOAD_MAX_MEMORY_SIZE = env.int("DATA_UPLOAD_MAX_MEMORY_MiB", 100) << 20
|
||||
|
|
|
@ -111,6 +111,10 @@ const tries = {
|
|||
},
|
||||
},
|
||||
f: {
|
||||
b: {
|
||||
2: "FB2",
|
||||
3: "FB3",
|
||||
},
|
||||
l: {
|
||||
a: {
|
||||
c: "FLAC",
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
"""Handles backends for storages"""
|
||||
import os
|
||||
from tempfile import SpooledTemporaryFile
|
||||
from django.core.files.storage import FileSystemStorage
|
||||
from storages.backends.s3boto3 import S3Boto3Storage
|
||||
from storages.backends.azure_storage import AzureStorage
|
||||
|
||||
|
@ -61,3 +62,18 @@ class AzureImagesStorage(AzureStorage): # pylint: disable=abstract-method
|
|||
|
||||
location = "images"
|
||||
overwrite_files = False
|
||||
|
||||
|
||||
class ExportsFileStorage(FileSystemStorage): # pylint: disable=abstract-method
|
||||
"""Storage class for exports contents with local files"""
|
||||
|
||||
location = "exports"
|
||||
overwrite_files = False
|
||||
|
||||
|
||||
class ExportsS3Storage(S3Boto3Storage): # pylint: disable=abstract-method
|
||||
"""Storage class for exports contents with S3"""
|
||||
|
||||
location = "exports"
|
||||
default_acl = None
|
||||
overwrite_files = False
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
<h1 class="title">{% trans "File too large" %}</h1>
|
||||
<p class="content">{% trans "The file you are uploading is too large." %}</p>
|
||||
<p class="content">
|
||||
{% blocktrans %}
|
||||
{% blocktrans trimmed %}
|
||||
You you can try using a smaller file, or ask your BookWyrm server administrator to increase the <code>DATA_UPLOAD_MAX_MEMORY_SIZE</code> setting.
|
||||
{% endblocktrans %}
|
||||
</p>
|
||||
|
|
|
@ -55,6 +55,8 @@
|
|||
|
||||
<p class="field"><label class="label" for="id_wikipedia_link">{% trans "Wikipedia link:" %}</label> {{ form.wikipedia_link }}</p>
|
||||
|
||||
<p class="field"><label class="label" for="id_wikidata">{% trans "Wikidata:" %}</label> {{ form.wikidata }}</p>
|
||||
|
||||
{% include 'snippets/form_errors.html' with errors_list=form.wikipedia_link.errors id="desc_wikipedia_link" %}
|
||||
|
||||
<p class="field"><label class="label" for="id_website">{% trans "Website:" %}</label> {{ form.website }}</p>
|
||||
|
|
|
@ -9,7 +9,8 @@
|
|||
{% block title %}{{ book|book_title }}{% endblock %}
|
||||
|
||||
{% block opengraph %}
|
||||
{% include 'snippets/opengraph.html' with title=book.title description=book|book_description image=book.preview_image %}
|
||||
{% firstof book.preview_image book.cover as book_image %}
|
||||
{% include 'snippets/opengraph.html' with title=book.title description=book|book_description image=book_image %}
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
|
@ -44,18 +45,22 @@
|
|||
{% endif %}
|
||||
|
||||
{% if book.series %}
|
||||
<meta itemprop="position" content="{{ book.series_number }}">
|
||||
{% spaceless %}
|
||||
<span itemprop="isPartOf" itemscope itemtype="https://schema.org/BookSeries">
|
||||
{% if book.authors.exists %}
|
||||
<a href="{% url 'book-series-by' book.authors.first.id %}?series_name={{ book.series | urlencode }}"
|
||||
itemprop="url">
|
||||
{% endif %}
|
||||
<span itemprop="name">{{ book.series }}</span>
|
||||
{% if book.series_number %} #{{ book.series_number }}{% endif %}
|
||||
{% if book.authors.exists %}
|
||||
</a>
|
||||
{% endif %}
|
||||
</span>
|
||||
{% if book.series_number %}
|
||||
<span>, #</span>
|
||||
<span itemprop="position">{{ book.series_number }}</span>
|
||||
{% endif %}
|
||||
{% endspaceless %}
|
||||
{% endif %}
|
||||
</p>
|
||||
{% endif %}
|
||||
|
|
|
@ -6,8 +6,8 @@
|
|||
{% block content %}
|
||||
<h1 class="title">{% trans "Confirm your email address" %}</h1>
|
||||
|
||||
<div class="columns">
|
||||
<div class="column">
|
||||
<div class="columns is-multiline">
|
||||
<div class="column is-full is-half-desktop">
|
||||
<div class="block content">
|
||||
<section class="block">
|
||||
<p>{% trans "A confirmation code has been sent to the email address you used to register your account." %}</p>
|
||||
|
|
|
@ -41,7 +41,7 @@
|
|||
</section>
|
||||
{% endif %}
|
||||
|
||||
{% if annual_summary_year and tab.key == 'home' %}
|
||||
{% if annual_summary_year and tab.key == 'home' and has_summary_read_throughs %}
|
||||
<section class="block is-hidden" data-hide="hide_annual_summary_{{ annual_summary_year }}">
|
||||
{% include 'feed/summary_card.html' with year=annual_summary_year %}
|
||||
<hr>
|
||||
|
|
|
@ -2,13 +2,11 @@
|
|||
{% load feed_page_tags %}
|
||||
{% load i18n %}
|
||||
|
||||
{% block title %}{{ title }}{% endblock %}
|
||||
|
||||
|
||||
{% block opengraph %}
|
||||
{% firstof status.book status.mention_books.first as book %}
|
||||
{% if book %}
|
||||
{% include 'snippets/opengraph.html' with image=preview %}
|
||||
{% else %}
|
||||
{% include 'snippets/opengraph.html' %}
|
||||
{% endif %}
|
||||
{% include 'snippets/opengraph.html' with image=page_image %}
|
||||
{% endblock %}
|
||||
|
||||
|
||||
|
|
|
@ -6,8 +6,8 @@
|
|||
{% block content %}
|
||||
|
||||
<h1 class="title">{% trans "Create an Account" %}</h1>
|
||||
<div class="columns">
|
||||
<div class="column">
|
||||
<div class="columns is-multiline">
|
||||
<div class="column is-full is-half-desktop">
|
||||
<div class="block">
|
||||
{% if valid %}
|
||||
<div>
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
{% block content %}
|
||||
<h1 class="title">{% trans "Log in" %}</h1>
|
||||
<div class="columns is-multiline">
|
||||
<div class="column is-half">
|
||||
<div class="column {% if site.allow_registration %}is-half{% else %}is-full is-half-desktop{% endif %}">
|
||||
{% if login_form.non_field_errors %}
|
||||
<p class="notification is-danger">{{ login_form.non_field_errors }}</p>
|
||||
{% endif %}
|
||||
|
@ -20,13 +20,15 @@
|
|||
<div class="field">
|
||||
<label class="label" for="id_localname_confirm">{% trans "Username:" %}</label>
|
||||
<div class="control">
|
||||
<input type="text" name="localname" maxlength="255" class="input" required="" id="id_localname_confirm" value="{{ login_form.localname.value|default:'' }}">
|
||||
<input type="text" name="localname" maxlength="255" class="input" required=""
|
||||
id="id_localname_confirm" value="{{ login_form.localname.value|default:'' }}">
|
||||
</div>
|
||||
</div>
|
||||
<div class="field">
|
||||
<label class="label" for="id_password_confirm">{% trans "Password:" %}</label>
|
||||
<div class="control">
|
||||
<input type="password" name="password" maxlength="128" class="input" required="" id="id_password_confirm" aria-describedby="desc_password">
|
||||
<input type="password" name="password" maxlength="128" class="input" required=""
|
||||
id="id_password_confirm" aria-describedby="desc_password">
|
||||
</div>
|
||||
|
||||
{% include 'snippets/form_errors.html' with errors_list=login_form.password.errors id="desc_password" %}
|
||||
|
@ -58,7 +60,7 @@
|
|||
{% include 'snippets/about.html' %}
|
||||
|
||||
<p class="block">
|
||||
<a href="{% url 'about' %}">{% trans "More about this site" %}</a>
|
||||
<a href="{% url 'about' %}">{% trans "More about this site" %}</a>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -4,8 +4,8 @@
|
|||
{% block title %}{% trans "Reset Password" %}{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="columns">
|
||||
<div class="column">
|
||||
<div class="columns is-multiline">
|
||||
<div class="column is-full is-half-desktop">
|
||||
<div class="block">
|
||||
<h1 class="title">{% trans "Reset Password" %}</h1>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
{% block content %}
|
||||
<h1 class="title">{% trans "Reactivate Account" %}</h1>
|
||||
<div class="columns is-multiline">
|
||||
<div class="column is-half">
|
||||
<div class="column {% if site.allow_registration %}is-half{% else %}is-full is-half-desktop{% endif %}">
|
||||
{% if login_form.non_field_errors %}
|
||||
<p class="notification is-danger">{{ login_form.non_field_errors }}</p>
|
||||
{% endif %}
|
||||
|
@ -16,13 +16,15 @@
|
|||
<div class="field">
|
||||
<label class="label" for="id_localname_confirm">{% trans "Username:" %}</label>
|
||||
<div class="control">
|
||||
<input type="text" name="localname" maxlength="255" class="input" required="" id="id_localname_confirm" value="{{ login_form.localname.value|default:'' }}">
|
||||
<input type="text" name="localname" maxlength="255" class="input" required=""
|
||||
id="id_localname_confirm" value="{{ login_form.localname.value|default:'' }}">
|
||||
</div>
|
||||
</div>
|
||||
<div class="field">
|
||||
<label class="label" for="id_password_confirm">{% trans "Password:" %}</label>
|
||||
<div class="control">
|
||||
<input type="password" name="password" maxlength="128" class="input" required="" id="id_password_confirm" aria-describedby="desc_password">
|
||||
<input type="password" name="password" maxlength="128" class="input" required=""
|
||||
id="id_password_confirm" aria-describedby="desc_password">
|
||||
</div>
|
||||
|
||||
{% include 'snippets/form_errors.html' with errors_list=login_form.password.errors id="desc_password" %}
|
||||
|
@ -51,7 +53,7 @@
|
|||
{% include 'snippets/about.html' %}
|
||||
|
||||
<p class="block">
|
||||
<a href="{% url 'about' %}">{% trans "More about this site" %}</a>
|
||||
<a href="{% url 'about' %}">{% trans "More about this site" %}</a>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -36,7 +36,7 @@
|
|||
<div class="field has-addons">
|
||||
<div class="control">
|
||||
{% if request.user.is_authenticated %}
|
||||
{% trans "Search for a book, user, or list" as search_placeholder %}
|
||||
{% trans "Search for a book, author, user, or list" as search_placeholder %}
|
||||
{% else %}
|
||||
{% trans "Search for a book" as search_placeholder %}
|
||||
{% endif %}
|
||||
|
|
|
@ -14,31 +14,29 @@
|
|||
<p> {% trans "You can create an export file here. This will allow you to migrate your data to another BookWyrm account." %}</p>
|
||||
</div>
|
||||
<div class="block mx-5 columns">
|
||||
{% blocktrans trimmed %}
|
||||
<div class="column is-half">
|
||||
<h2 class="is-size-5">Your file will include:</h2>
|
||||
<h2 class="is-size-5">{% trans "Your file will include:" %}</h2>
|
||||
<ul>
|
||||
<li>User profile</li>
|
||||
<li>Most user settings</li>
|
||||
<li>Reading goals</li>
|
||||
<li>Shelves</li>
|
||||
<li>Reading history</li>
|
||||
<li>Book reviews</li>
|
||||
<li>Statuses</li>
|
||||
<li>Your own lists and saved lists</li>
|
||||
<li>Which users you follow and block</li>
|
||||
<li>{% trans "User profile" %}</li>
|
||||
<li>{% trans "Most user settings" %}</li>
|
||||
<li>{% trans "Reading goals" %}</li>
|
||||
<li>{% trans "Shelves" %}</li>
|
||||
<li>{% trans "Reading history" %}</li>
|
||||
<li>{% trans "Book reviews" %}</li>
|
||||
<li>{% trans "Statuses" %}</li>
|
||||
<li>{% trans "Your own lists and saved lists" %}</li>
|
||||
<li>{% trans "Which users you follow and block" %}</li>
|
||||
</ul>
|
||||
</div>
|
||||
<div class="column is-half">
|
||||
<h2 class="is-size-5">Your file will not include:</h2>
|
||||
<h2 class="is-size-5">{% trans "Your file will not include:" %}</h2>
|
||||
<ul>
|
||||
<li>Direct messages</li>
|
||||
<li>Replies to your statuses</li>
|
||||
<li>Groups</li>
|
||||
<li>Favorites</li>
|
||||
<li>{% trans "Direct messages" %}</li>
|
||||
<li>{% trans "Replies to your statuses" %}</li>
|
||||
<li>{% trans "Groups" %}</li>
|
||||
<li>{% trans "Favorites" %}</li>
|
||||
</ul>
|
||||
</div>
|
||||
{% endblocktrans %}
|
||||
</div>
|
||||
<p class="block">{% trans "In your new BookWyrm account can choose what to import: you will not have to import everything that is exported." %}</p>
|
||||
<p class="notification is-warning">
|
||||
|
@ -49,6 +47,13 @@
|
|||
{% if not site.user_exports_enabled %}
|
||||
<p class="notification is-danger">
|
||||
{% trans "New user exports are currently disabled." %}
|
||||
{% if perms.bookwyrm.edit_instance_settings %}
|
||||
<br/>
|
||||
{% url 'settings-imports' as url %}
|
||||
{% blocktrans trimmed %}
|
||||
User exports settings can be changed from <a href="{{ url }}">the Imports page</a> in the Admin dashboard.
|
||||
{% endblocktrans %}
|
||||
{% endif%}
|
||||
</p>
|
||||
{% elif next_available %}
|
||||
<p class="notification is-warning">
|
||||
|
@ -92,25 +97,25 @@
|
|||
</td>
|
||||
</tr>
|
||||
{% endif %}
|
||||
{% for job in jobs %}
|
||||
{% for export in jobs %}
|
||||
<tr>
|
||||
<td>{{ job.updated_date }}</td>
|
||||
<td>{{ export.job.updated_date }}</td>
|
||||
<td>
|
||||
<span
|
||||
{% if job.status == "stopped" or job.status == "failed" %}
|
||||
{% if export.job.status == "stopped" or export.job.status == "failed" %}
|
||||
class="tag is-danger"
|
||||
{% elif job.status == "pending" %}
|
||||
{% elif export.job.status == "pending" %}
|
||||
class="tag is-warning"
|
||||
{% elif job.complete %}
|
||||
{% elif export.job.complete %}
|
||||
class="tag"
|
||||
{% else %}
|
||||
class="tag is-success"
|
||||
{% endif %}
|
||||
>
|
||||
{% if job.status %}
|
||||
{{ job.status }}
|
||||
{{ job.status_display }}
|
||||
{% elif job.complete %}
|
||||
{% if export.job.status %}
|
||||
{{ export.job.status }}
|
||||
{{ export.job.status_display }}
|
||||
{% elif export.job.complete %}
|
||||
{% trans "Complete" %}
|
||||
{% else %}
|
||||
{% trans "Active" %}
|
||||
|
@ -118,18 +123,20 @@
|
|||
</span>
|
||||
</td>
|
||||
<td>
|
||||
<span>{{ job.export_data|get_file_size }}</span>
|
||||
{% if export.size %}
|
||||
<span>{{ export.size|get_file_size }}</span>
|
||||
{% endif %}
|
||||
</td>
|
||||
<td>
|
||||
{% if job.complete and not job.status == "stopped" and not job.status == "failed" %}
|
||||
<p>
|
||||
<a download="" href="/preferences/user-export/{{ job.task_id }}">
|
||||
<span class="icon icon-download" aria-hidden="true"></span>
|
||||
<span class="is-hidden-mobile">
|
||||
{% trans "Download your export" %}
|
||||
</span>
|
||||
</a>
|
||||
</p>
|
||||
{% if export.url %}
|
||||
<a href="{{ export.url }}">
|
||||
<span class="icon icon-download" aria-hidden="true"></span>
|
||||
<span class="is-hidden-mobile">
|
||||
{% trans "Download your export" %}
|
||||
</span>
|
||||
</a>
|
||||
{% elif export.unavailable %}
|
||||
{% trans "Archive is no longer available" %}
|
||||
{% endif %}
|
||||
</td>
|
||||
</tr>
|
||||
|
|
17
bookwyrm/templates/search/author.html
Normal file
17
bookwyrm/templates/search/author.html
Normal file
|
@ -0,0 +1,17 @@
|
|||
{% extends 'search/layout.html' %}
|
||||
|
||||
{% block panel %}
|
||||
|
||||
{% if results %}
|
||||
<ul class="block">
|
||||
{% for author in results %}
|
||||
<li class="">
|
||||
<a href="{{ author.local_path }}" class="author" itemprop="author" itemscope itemtype="https://schema.org/Thing">
|
||||
<span itemprop="name">{{ author.name }}</span>
|
||||
</a>
|
||||
</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endif %}
|
||||
|
||||
{% endblock %}
|
|
@ -109,7 +109,7 @@
|
|||
<p class="block">
|
||||
{% if request.user.is_authenticated %}
|
||||
{% if not remote %}
|
||||
<a href="{{ request.path }}?q={{ query }}&type=book&remote=true" id="tour-load-from-other-catalogues">
|
||||
<a href="{{ request.path }}?q={{ query|urlencode }}&type=book&remote=true" id="tour-load-from-other-catalogues">
|
||||
{% trans "Load results from other catalogues" %}
|
||||
</a>
|
||||
{% else %}
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
<div class="select" aria-label="{% trans 'Search type' %}">
|
||||
<select name="type">
|
||||
<option value="book" {% if type == "book" %}selected{% endif %}>{% trans "Books" %}</option>
|
||||
<option value="author" {% if type == "author" %}selected{% endif %}>{% trans "Authors" %}</option>
|
||||
{% if request.user.is_authenticated %}
|
||||
<option value="user" {% if type == "user" %}selected{% endif %}>{% trans "Users" %}</option>
|
||||
{% endif %}
|
||||
|
@ -40,15 +41,18 @@
|
|||
<nav class="tabs">
|
||||
<ul>
|
||||
<li{% if type == "book" %} class="is-active"{% endif %}>
|
||||
<a href="{% url 'search' %}?q={{ query }}&type=book">{% trans "Books" %}</a>
|
||||
<a href="{% url 'search' %}?q={{ query|urlencode }}&type=book">{% trans "Books" %}</a>
|
||||
</li>
|
||||
<li{% if type == "author" %} class="is-active"{% endif %}>
|
||||
<a href="{% url 'search' %}?q={{ query|urlencode }}&type=author">{% trans "Authors" %}</a>
|
||||
</li>
|
||||
{% if request.user.is_authenticated %}
|
||||
<li{% if type == "user" %} class="is-active"{% endif %}>
|
||||
<a href="{% url 'search' %}?q={{ query }}&type=user">{% trans "Users" %}</a>
|
||||
<a href="{% url 'search' %}?q={{ query|urlencode }}&type=user">{% trans "Users" %}</a>
|
||||
</li>
|
||||
{% endif %}
|
||||
<li{% if type == "list" %} class="is-active"{% endif %}>
|
||||
<a href="{% url 'search' %}?q={{ query }}&type=list">{% trans "Lists" %}</a>
|
||||
<a href="{% url 'search' %}?q={{ query|urlencode }}&type=list">{% trans "Lists" %}</a>
|
||||
</li>
|
||||
</ul>
|
||||
</nav>
|
||||
|
|
|
@ -45,6 +45,10 @@
|
|||
{% include 'settings/dashboard/warnings/update_version.html' with warning_level="warning" fullwidth=True %}
|
||||
{% endif %}
|
||||
|
||||
{% if schedule_form %}
|
||||
{% include 'settings/dashboard/warnings/check_for_updates.html' with warning_level="success" fullwidth=True %}
|
||||
{% endif %}
|
||||
|
||||
{% if missing_privacy or missing_conduct %}
|
||||
<div class="column is-12 columns m-0 p-0">
|
||||
{% if missing_privacy %}
|
||||
|
|
|
@ -0,0 +1,24 @@
|
|||
{% extends 'settings/dashboard/warnings/layout.html' %}
|
||||
{% load i18n %}
|
||||
|
||||
{% block warning_link %}#{% endblock %}
|
||||
|
||||
{% block warning_text %}
|
||||
|
||||
<form name="check-version" method="POST" action="{% url 'settings-dashboard' %}" class="is-flex is-align-items-center">
|
||||
{% csrf_token %}
|
||||
|
||||
<p class="pr-2">
|
||||
{% blocktrans trimmed with current=current_version available=available_version %}
|
||||
Would you like to automatically check for new BookWyrm releases? (recommended)
|
||||
{% endblocktrans %}
|
||||
</p>
|
||||
|
||||
{{ schedule_form.every.as_hidden }}
|
||||
{{ schedule_form.period.as_hidden }}
|
||||
|
||||
<button class="button is-small" type="submit">{% trans "Schedule checks" %}</button>
|
||||
</form>
|
||||
|
||||
{% endblock %}
|
||||
|
|
@ -157,13 +157,13 @@
|
|||
>
|
||||
<div class="notification is-danger is-light">
|
||||
<p class="my-2">{% trans "Users are currently unable to start new user exports. This is the default setting." %}</p>
|
||||
{% if use_s3 %}
|
||||
<p>{% trans "It is not currently possible to provide user exports when using s3 storage. The BookWyrm development team are working on a fix for this." %}</p>
|
||||
{% if use_azure %}
|
||||
<p>{% trans "It is not currently possible to provide user exports when using Azure storage." %}</p>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% csrf_token %}
|
||||
<div class="control">
|
||||
<button type="submit" class="button is-success" {% if use_s3 %}disabled{% endif %}>
|
||||
<button type="submit" class="button is-success" {% if use_azure %}disabled{% endif %}>
|
||||
{% trans "Enable user exports" %}
|
||||
</button>
|
||||
</div>
|
||||
|
|
|
@ -85,6 +85,10 @@
|
|||
{% url 'settings-celery' as url %}
|
||||
<a href="{{ url }}"{% if url in request.path %} class="is-active" aria-selected="true"{% endif %}>{% trans "Celery status" %}</a>
|
||||
</li>
|
||||
<li>
|
||||
{% url 'settings-schedules' as url %}
|
||||
<a href="{{ url }}"{% if url in request.path %} class="is-active" aria-selected="true"{% endif %}>{% trans "Scheduled tasks" %}</a>
|
||||
</li>
|
||||
<li>
|
||||
{% url 'settings-email-config' as url %}
|
||||
<a href="{{ url }}"{% if url in request.path %} class="is-active" aria-selected="true"{% endif %}>{% trans "Email Configuration" %}</a>
|
||||
|
|
127
bookwyrm/templates/settings/schedules.html
Normal file
127
bookwyrm/templates/settings/schedules.html
Normal file
|
@ -0,0 +1,127 @@
|
|||
{% extends 'settings/layout.html' %}
|
||||
{% load i18n %}
|
||||
{% load humanize %}
|
||||
{% load utilities %}
|
||||
|
||||
{% block title %}
|
||||
{% trans "Scheduled tasks" %}
|
||||
{% endblock %}
|
||||
|
||||
{% block header %}
|
||||
{% trans "Scheduled tasks" %}
|
||||
{% endblock %}
|
||||
|
||||
{% block panel %}
|
||||
|
||||
<div class="block content">
|
||||
<h3>{% trans "Tasks" %}</h3>
|
||||
<div class="table-container">
|
||||
<table class="table is-striped is-fullwidth">
|
||||
<tr>
|
||||
<th>
|
||||
{% trans "Name" %}
|
||||
</th>
|
||||
<th>
|
||||
{% trans "Celery task" %}
|
||||
</th>
|
||||
<th>
|
||||
{% trans "Date changed" %}
|
||||
</th>
|
||||
<th>
|
||||
{% trans "Last run at" %}
|
||||
</th>
|
||||
<th>
|
||||
{% trans "Schedule" %}
|
||||
</th>
|
||||
<th>
|
||||
{% trans "Schedule ID" %}
|
||||
</th>
|
||||
<th>
|
||||
{% trans "Enabled" %}
|
||||
</th>
|
||||
</tr>
|
||||
{% for task in tasks %}
|
||||
<tr>
|
||||
<td>
|
||||
{{ task.name }}
|
||||
</td>
|
||||
<td class="overflow-wrap-anywhere">
|
||||
{{ task.task }}
|
||||
</td>
|
||||
<td>
|
||||
{{ task.date_changed }}
|
||||
</td>
|
||||
<td>
|
||||
{{ task.last_run_at }}
|
||||
</td>
|
||||
<td>
|
||||
{% firstof task.interval task.crontab "None" %}
|
||||
</td>
|
||||
<td>
|
||||
{{ task.interval.id }}
|
||||
</td>
|
||||
<td>
|
||||
<span class="tag">
|
||||
{% if task.enabled %}
|
||||
<span class="icon icon-check" aria-hidden="true"></span>
|
||||
{% endif %}
|
||||
{{ task.enabled|yesno }}
|
||||
</span>
|
||||
{% if task.name != "celery.backend_cleanup" %}
|
||||
<form name="unschedule-{{ task.id }}" method="POST" action="{% url 'settings-schedules' task.id %}">
|
||||
{% csrf_token %}
|
||||
<button type="submit" class="button is-danger is-small">{% trans "Un-schedule" %}</button>
|
||||
</form>
|
||||
{% endif %}
|
||||
</td>
|
||||
</tr>
|
||||
{% empty %}
|
||||
<tr>
|
||||
<td colspan="2">
|
||||
{% trans "No scheduled tasks" %}
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="block content">
|
||||
<h3>{% trans "Schedules" %}</h3>
|
||||
<div class="table-container">
|
||||
<table class="table is-striped is-fullwidth">
|
||||
<tr>
|
||||
<th>
|
||||
{% trans "ID" %}
|
||||
</th>
|
||||
<th>
|
||||
{% trans "Schedule" %}
|
||||
</th>
|
||||
<th>
|
||||
{% trans "Tasks" %}
|
||||
</th>
|
||||
</tr>
|
||||
{% for schedule in schedules %}
|
||||
<tr>
|
||||
<td>
|
||||
{{ schedule.id }}
|
||||
</td>
|
||||
<td class="overflow-wrap-anywhere">
|
||||
{{ schedule }}
|
||||
</td>
|
||||
<td>
|
||||
{{ schedule.periodictask_set.count }}
|
||||
</td>
|
||||
</tr>
|
||||
{% empty %}
|
||||
<tr>
|
||||
<td colspan="2">
|
||||
{% trans "No schedules found" %}
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% endblock %}
|
|
@ -3,7 +3,7 @@
|
|||
|
||||
{% block filter %}
|
||||
<div class="control">
|
||||
<label class="label" for="filter_query">{% trans 'Filter by keyword' %}</label>
|
||||
<input aria-label="Filter by keyword" id="my-books-filter" class="input" type="text" name="filter" placeholder="{% trans 'Enter text here' %}" value="{{ shelves_filter_query|default:'' }}" spellcheck="false" />
|
||||
<label class="label" for="my-books-filter">{% trans 'Filter by keyword' %}</label>
|
||||
<input id="my-books-filter" class="input" type="text" name="filter" placeholder="{% trans 'Enter text here' %}" value="{{ shelves_filter_query|default:'' }}" spellcheck="false" />
|
||||
</div>
|
||||
{% endblock %}
|
||||
|
|
3
bookwyrm/templates/snippets/book_series.html
Normal file
3
bookwyrm/templates/snippets/book_series.html
Normal file
|
@ -0,0 +1,3 @@
|
|||
{% if book.series %}
|
||||
({{book.series}}{%if book.series_number %}, #{{book.series_number}}{% endif %})
|
||||
{% endif %}
|
|
@ -9,12 +9,15 @@
|
|||
|
||||
{% if book.authors.exists %}
|
||||
{% blocktrans trimmed with path=book.local_path title=book|book_title %}
|
||||
<a href="{{ path }}">{{ title }}</a> by
|
||||
<a href="{{ path }}">{{ title }}</a>
|
||||
by
|
||||
{% endblocktrans %} {% include 'snippets/authors.html' with book=book limit=3 %}
|
||||
|
||||
{% else %}
|
||||
<a href="{{ book.local_path }}">{{ book|book_title }}</a>
|
||||
{% endif %}
|
||||
|
||||
{% include 'snippets/book_series.html' with book=book %}
|
||||
|
||||
{% endcache %}
|
||||
{% endspaceless %}
|
||||
|
|
|
@ -56,8 +56,7 @@ uuid: a unique identifier used to make html "id" attributes unique and clarify j
|
|||
<input
|
||||
aria-label="{% if draft.position_mode == 'PG' %}Page{% else %}Percent{% endif %}"
|
||||
class="input"
|
||||
type="number"
|
||||
min="0"
|
||||
type="text"
|
||||
name="position"
|
||||
size="3"
|
||||
value="{% firstof draft.position '' %}"
|
||||
|
@ -72,8 +71,7 @@ uuid: a unique identifier used to make html "id" attributes unique and clarify j
|
|||
<input
|
||||
aria-label="{% if draft.position_mode == 'PG' %}Page{% else %}Percent{% endif %}"
|
||||
class="input"
|
||||
type="number"
|
||||
min="0"
|
||||
type="text"
|
||||
name="endposition"
|
||||
size="3"
|
||||
value="{% firstof draft.endposition '' %}"
|
||||
|
|
|
@ -1,24 +1,25 @@
|
|||
{% load static %}
|
||||
|
||||
{% if preview_images_enabled is True %}
|
||||
{% firstof image site.preview_image as page_image %}
|
||||
{% if page_image %}
|
||||
<meta name="twitter:card" content="summary_large_image">
|
||||
{% if image %}
|
||||
<meta name="twitter:image" content="{{ media_full_url }}{{ image }}">
|
||||
<meta name="og:image" content="{{ media_full_url }}{{ image }}">
|
||||
{% else %}
|
||||
<meta name="twitter:image" content="{{ media_full_url }}{{ site.preview_image }}">
|
||||
<meta name="og:image" content="{{ media_full_url }}{{ site.preview_image }}">
|
||||
{% endif %}
|
||||
<meta name="twitter:image" content="{{ media_full_url }}{{ page_image }}">
|
||||
<meta name="og:image" content="{{ media_full_url }}{{ page_image }}">
|
||||
{% elif site.logo %}
|
||||
<meta name="twitter:card" content="summary">
|
||||
<meta name="twitter:image" content="{{ media_full_url }}{{ site.logo }}">
|
||||
<meta name="twitter:image:alt" content="{{ site.name }} Logo">
|
||||
<meta name="og:image" content="{{ media_full_url }}{{ site.logo }}">
|
||||
{% else %}
|
||||
<meta name="twitter:card" content="summary">
|
||||
<meta name="twitter:image" content="{% if site.logo %}{{ media_full_url }}{{ site.logo }}{% else %}{% static "images/logo.png" %}{% endif %}">
|
||||
<meta name="og:image" content="{% if site.logo %}{{ media_full_url }}{{ site.logo }}{% else %}{% static "images/logo.png" %}{% endif %}">
|
||||
<meta name="twitter:image" content="{% static "images/logo.png" %}">
|
||||
<meta name="twitter:image:alt" content="BookWyrm Logo">
|
||||
<meta name="og:image" content="{% static "images/logo.png" %}">
|
||||
{% endif %}
|
||||
|
||||
<meta name="twitter:image:alt" content="BookWyrm Logo">
|
||||
|
||||
<meta name="twitter:title" content="{% if title %}{{ title }} - {% endif %}{{ site.name }}">
|
||||
<meta name="og:title" content="{% if title %}{{ title }} - {% endif %}{{ site.name }}">
|
||||
|
||||
<meta name="twitter:description" content="{% if description %}{{ description }}{% else %}{{ site.instance_tagline }}{% endif %}">
|
||||
<meta name="og:description" content="{% if description %}{{ description }}{% else %}{{ site.instance_tagline }}{% endif %}">
|
||||
{% firstof description site.instance_tagline as description %}
|
||||
<meta name="twitter:description" content="{{ description }}">
|
||||
<meta name="og:description" content="{{ description }}">
|
||||
|
|
|
@ -17,4 +17,7 @@ commented on <a href="{{ book_path }}">{{ book }}</a>
|
|||
{% endblocktrans %}
|
||||
|
||||
{% endif %}
|
||||
|
||||
{% include 'snippets/book_series.html' with book=book %}
|
||||
|
||||
{% endwith %}
|
||||
|
|
|
@ -17,4 +17,7 @@ quoted <a href="{{ book_path }}">{{ book }}</a>
|
|||
{% endblocktrans %}
|
||||
|
||||
{% endif %}
|
||||
|
||||
{% include 'snippets/book_series.html' with book=book %}
|
||||
|
||||
{% endwith %}
|
||||
|
|
|
@ -19,4 +19,7 @@ finished reading <a href="{{ book_path }}">{{ book }}</a>
|
|||
{% endblocktrans %}
|
||||
|
||||
{% endif %}
|
||||
|
||||
{% include 'snippets/book_series.html' with book=book %}
|
||||
|
||||
{% endspaceless %}
|
||||
|
|
|
@ -19,4 +19,7 @@ started reading <a href="{{ book_path }}">{{ book }}</a>
|
|||
{% endblocktrans %}
|
||||
|
||||
{% endif %}
|
||||
|
||||
{% include 'snippets/book_series.html' with book=book %}
|
||||
|
||||
{% endspaceless %}
|
||||
|
|
|
@ -17,4 +17,7 @@ reviewed <a href="{{ book_path }}">{{ book }}</a>
|
|||
{% endblocktrans %}
|
||||
|
||||
{% endif %}
|
||||
|
||||
{% include 'snippets/book_series.html' with book=book %}
|
||||
|
||||
{% endwith %}
|
||||
|
|
|
@ -19,5 +19,8 @@ stopped reading <a href="{{ book_path }}">{{ book }}</a>
|
|||
{% endblocktrans %}
|
||||
|
||||
{% endif %}
|
||||
|
||||
{% include 'snippets/book_series.html' with book=book %}
|
||||
|
||||
{% endspaceless %}
|
||||
|
||||
|
|
|
@ -19,4 +19,7 @@ wants to read <a href="{{ book_path }}">{{ book }}</a>
|
|||
{% endblocktrans %}
|
||||
|
||||
{% endif %}
|
||||
|
||||
{% include 'snippets/book_series.html' with book=book %}
|
||||
|
||||
{% endspaceless %}
|
||||
|
|
|
@ -126,15 +126,18 @@ def id_to_username(user_id):
|
|||
value = f"{name}@{domain}"
|
||||
|
||||
return value
|
||||
return "a new user account"
|
||||
return _("a new user account")
|
||||
|
||||
|
||||
@register.filter(name="get_file_size")
|
||||
def get_file_size(file):
|
||||
def get_file_size(nbytes):
|
||||
"""display the size of a file in human readable terms"""
|
||||
|
||||
try:
|
||||
raw_size = os.stat(file.path).st_size
|
||||
raw_size = float(nbytes)
|
||||
except (ValueError, TypeError):
|
||||
return repr(nbytes)
|
||||
else:
|
||||
if raw_size < 1024:
|
||||
return f"{raw_size} bytes"
|
||||
if raw_size < 1024**2:
|
||||
|
@ -142,8 +145,6 @@ def get_file_size(file):
|
|||
if raw_size < 1024**3:
|
||||
return f"{raw_size/1024**2:.2f} MB"
|
||||
return f"{raw_size/1024**3:.2f} GB"
|
||||
except Exception: # pylint: disable=broad-except
|
||||
return ""
|
||||
|
||||
|
||||
@register.filter(name="get_user_permission")
|
||||
|
|
|
@ -7,13 +7,13 @@ class Author(TestCase):
|
|||
"""serialize author tests"""
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(self): # pylint: disable=bad-classmethod-argument
|
||||
def setUpTestData(cls):
|
||||
"""initial data"""
|
||||
self.book = models.Edition.objects.create(
|
||||
cls.book = models.Edition.objects.create(
|
||||
title="Example Edition",
|
||||
remote_id="https://example.com/book/1",
|
||||
)
|
||||
self.author = models.Author.objects.create(
|
||||
cls.author = models.Author.objects.create(
|
||||
name="Author fullname",
|
||||
aliases=["One", "Two"],
|
||||
bio="bio bio bio",
|
||||
|
|
|
@ -1,12 +1,10 @@
|
|||
""" tests the base functionality for activitypub dataclasses """
|
||||
from io import BytesIO
|
||||
import json
|
||||
import pathlib
|
||||
from unittest.mock import patch
|
||||
|
||||
from dataclasses import dataclass
|
||||
from django.test import TestCase
|
||||
from PIL import Image
|
||||
import responses
|
||||
|
||||
from bookwyrm import activitypub
|
||||
|
@ -29,16 +27,18 @@ class BaseActivity(TestCase):
|
|||
"""the super class for model-linked activitypub dataclasses"""
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(self): # pylint: disable=bad-classmethod-argument
|
||||
def setUpTestData(cls):
|
||||
"""we're probably going to re-use this so why copy/paste"""
|
||||
with patch("bookwyrm.suggested_users.rerank_suggestions_task.delay"), patch(
|
||||
"bookwyrm.activitystreams.populate_stream_task.delay"
|
||||
), patch("bookwyrm.lists_stream.populate_lists_task.delay"):
|
||||
self.user = models.User.objects.create_user(
|
||||
with (
|
||||
patch("bookwyrm.suggested_users.rerank_suggestions_task.delay"),
|
||||
patch("bookwyrm.activitystreams.populate_stream_task.delay"),
|
||||
patch("bookwyrm.lists_stream.populate_lists_task.delay"),
|
||||
):
|
||||
cls.user = models.User.objects.create_user(
|
||||
"mouse", "mouse@mouse.mouse", "mouseword", local=True, localname="mouse"
|
||||
)
|
||||
self.user.remote_id = "http://example.com/a/b"
|
||||
self.user.save(broadcast=False, update_fields=["remote_id"])
|
||||
cls.user.remote_id = "http://example.com/a/b"
|
||||
cls.user.save(broadcast=False, update_fields=["remote_id"])
|
||||
|
||||
def setUp(self):
|
||||
datafile = pathlib.Path(__file__).parent.joinpath("../data/ap_user.json")
|
||||
|
@ -46,13 +46,11 @@ class BaseActivity(TestCase):
|
|||
# don't try to load the user icon
|
||||
del self.userdata["icon"]
|
||||
|
||||
image_file = pathlib.Path(__file__).parent.joinpath(
|
||||
image_path = pathlib.Path(__file__).parent.joinpath(
|
||||
"../../static/images/default_avi.jpg"
|
||||
)
|
||||
image = Image.open(image_file)
|
||||
output = BytesIO()
|
||||
image.save(output, format=image.format)
|
||||
self.image_data = output.getvalue()
|
||||
with open(image_path, "rb") as image_file:
|
||||
self.image_data = image_file.read()
|
||||
|
||||
def test_get_representative_not_existing(self, *_):
|
||||
"""test that an instance representative actor is created if it does not exist"""
|
||||
|
@ -232,10 +230,12 @@ class BaseActivity(TestCase):
|
|||
)
|
||||
|
||||
# sets the celery task call to the function call
|
||||
with patch("bookwyrm.activitypub.base_activity.set_related_field.delay"):
|
||||
with patch("bookwyrm.models.status.Status.ignore_activity") as discarder:
|
||||
discarder.return_value = False
|
||||
update_data.to_model(model=models.Status, instance=status)
|
||||
with (
|
||||
patch("bookwyrm.activitypub.base_activity.set_related_field.delay"),
|
||||
patch("bookwyrm.models.status.Status.ignore_activity") as discarder,
|
||||
):
|
||||
discarder.return_value = False
|
||||
update_data.to_model(model=models.Status, instance=status)
|
||||
self.assertIsNone(status.attachments.first())
|
||||
|
||||
@responses.activate
|
||||
|
|
|
@ -11,18 +11,20 @@ class Note(TestCase):
|
|||
"""the model-linked ActivityPub dataclass for Note-based types"""
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(self): # pylint: disable=bad-classmethod-argument
|
||||
def setUpTestData(cls):
|
||||
"""create a shared user"""
|
||||
with patch("bookwyrm.suggested_users.rerank_suggestions_task.delay"), patch(
|
||||
"bookwyrm.activitystreams.populate_stream_task.delay"
|
||||
), patch("bookwyrm.lists_stream.populate_lists_task.delay"):
|
||||
self.user = models.User.objects.create_user(
|
||||
with (
|
||||
patch("bookwyrm.suggested_users.rerank_suggestions_task.delay"),
|
||||
patch("bookwyrm.activitystreams.populate_stream_task.delay"),
|
||||
patch("bookwyrm.lists_stream.populate_lists_task.delay"),
|
||||
):
|
||||
cls.user = models.User.objects.create_user(
|
||||
"mouse", "mouse@mouse.mouse", "mouseword", local=True, localname="mouse"
|
||||
)
|
||||
self.user.remote_id = "https://test-instance.org/user/critic"
|
||||
self.user.save(broadcast=False, update_fields=["remote_id"])
|
||||
cls.user.remote_id = "https://test-instance.org/user/critic"
|
||||
cls.user.save(broadcast=False, update_fields=["remote_id"])
|
||||
|
||||
self.book = models.Edition.objects.create(
|
||||
cls.book = models.Edition.objects.create(
|
||||
title="Test Edition", remote_id="http://book.com/book"
|
||||
)
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue