mirror of
https://github.com/bookwyrm-social/bookwyrm.git
synced 2025-01-24 16:08:07 +00:00
Merge branch 'main' into author-table
This commit is contained in:
commit
9e17aebc13
475 changed files with 67400 additions and 11281 deletions
|
@ -5,3 +5,4 @@ __pycache__
|
|||
.git
|
||||
.github
|
||||
.pytest*
|
||||
.env
|
||||
|
|
41
.env.example
41
.env.example
|
@ -8,7 +8,7 @@ USE_HTTPS=true
|
|||
DOMAIN=your.domain.here
|
||||
EMAIL=your@email.here
|
||||
|
||||
# Instance defualt language (see options at bookwyrm/settings.py "LANGUAGES"
|
||||
# Instance default language (see options at bookwyrm/settings.py "LANGUAGES"
|
||||
LANGUAGE_CODE="en-us"
|
||||
# Used for deciding which editions to prefer
|
||||
DEFAULT_LANGUAGE="English"
|
||||
|
@ -21,8 +21,8 @@ MEDIA_ROOT=images/
|
|||
# Database configuration
|
||||
PGPORT=5432
|
||||
POSTGRES_PASSWORD=securedbypassword123
|
||||
POSTGRES_USER=fedireads
|
||||
POSTGRES_DB=fedireads
|
||||
POSTGRES_USER=bookwyrm
|
||||
POSTGRES_DB=bookwyrm
|
||||
POSTGRES_HOST=db
|
||||
|
||||
# Redis activity stream manager
|
||||
|
@ -32,12 +32,17 @@ REDIS_ACTIVITY_PORT=6379
|
|||
REDIS_ACTIVITY_PASSWORD=redispassword345
|
||||
# Optional, use a different redis database (defaults to 0)
|
||||
# REDIS_ACTIVITY_DB_INDEX=0
|
||||
# Alternatively specify the full redis url, i.e. if you need to use a unix:// socket
|
||||
# REDIS_ACTIVITY_URL=
|
||||
|
||||
# Redis as celery broker
|
||||
REDIS_BROKER_HOST=redis_broker
|
||||
REDIS_BROKER_PORT=6379
|
||||
REDIS_BROKER_PASSWORD=redispassword123
|
||||
# Optional, use a different redis database (defaults to 0)
|
||||
# REDIS_BROKER_DB_INDEX=0
|
||||
# Alternatively specify the full redis url, i.e. if you need to use a unix:// socket
|
||||
# REDIS_BROKER_URL=
|
||||
|
||||
# Monitoring for celery
|
||||
FLOWER_PORT=8888
|
||||
|
@ -56,11 +61,11 @@ EMAIL_SENDER_NAME=admin
|
|||
EMAIL_SENDER_DOMAIN=
|
||||
|
||||
# Query timeouts
|
||||
SEARCH_TIMEOUT=15
|
||||
SEARCH_TIMEOUT=5
|
||||
QUERY_TIMEOUT=5
|
||||
|
||||
# Thumbnails Generation
|
||||
ENABLE_THUMBNAIL_GENERATION=false
|
||||
ENABLE_THUMBNAIL_GENERATION=true
|
||||
|
||||
# S3 configuration
|
||||
USE_S3=false
|
||||
|
@ -77,9 +82,15 @@ AWS_SECRET_ACCESS_KEY=
|
|||
# AWS_S3_REGION_NAME=None # "fr-par"
|
||||
# AWS_S3_ENDPOINT_URL=None # "https://s3.fr-par.scw.cloud"
|
||||
|
||||
# Commented are example values if you use Azure Blob Storage
|
||||
# USE_AZURE=true
|
||||
# AZURE_ACCOUNT_NAME= # "example-account-name"
|
||||
# AZURE_ACCOUNT_KEY= # "base64-encoded-access-key"
|
||||
# AZURE_CONTAINER= # "example-blob-container-name"
|
||||
# AZURE_CUSTOM_DOMAIN= # "example-account-name.blob.core.windows.net"
|
||||
|
||||
# Preview image generation can be computing and storage intensive
|
||||
# ENABLE_PREVIEW_IMAGES=True
|
||||
ENABLE_PREVIEW_IMAGES=False
|
||||
|
||||
# Specify RGB tuple or RGB hex strings,
|
||||
# or use_dominant_color_light / use_dominant_color_dark
|
||||
|
@ -108,3 +119,21 @@ OTEL_EXPORTER_OTLP_ENDPOINT=
|
|||
OTEL_EXPORTER_OTLP_HEADERS=
|
||||
# Service name to identify your app
|
||||
OTEL_SERVICE_NAME=
|
||||
|
||||
# Set HTTP_X_FORWARDED_PROTO ONLY to true if you know what you are doing.
|
||||
# Only use it if your proxy is "swallowing" if the original request was made
|
||||
# via https. Please refer to the Django-Documentation and assess the risks
|
||||
# for your instance:
|
||||
# https://docs.djangoproject.com/en/3.2/ref/settings/#secure-proxy-ssl-header
|
||||
HTTP_X_FORWARDED_PROTO=false
|
||||
|
||||
# TOTP settings
|
||||
# TWO_FACTOR_LOGIN_VALIDITY_WINDOW sets the number of codes either side
|
||||
# which will be accepted.
|
||||
TWO_FACTOR_LOGIN_VALIDITY_WINDOW=2
|
||||
TWO_FACTOR_LOGIN_MAX_SECONDS=60
|
||||
|
||||
# Additional hosts to allow in the Content-Security-Policy, "self" (should be DOMAIN)
|
||||
# and AWS_S3_CUSTOM_DOMAIN (if used) are added by default.
|
||||
# Value should be a comma-separated list of host names.
|
||||
CSP_ADDITIONAL_HOSTS=
|
||||
|
|
8
.github/workflows/black.yml
vendored
8
.github/workflows/black.yml
vendored
|
@ -10,6 +10,8 @@ jobs:
|
|||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- uses: psf/black@21.4b2
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: psf/black@22.12.0
|
||||
with:
|
||||
version: 22.12.0
|
||||
|
|
8
.github/workflows/codeql-analysis.yml
vendored
8
.github/workflows/codeql-analysis.yml
vendored
|
@ -36,11 +36,11 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
uses: github/codeql-action/init@v2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
|
@ -51,7 +51,7 @@ jobs:
|
|||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v1
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
|
@ -65,4 +65,4 @@ jobs:
|
|||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
||||
uses: github/codeql-action/analyze@v2
|
||||
|
|
2
.github/workflows/curlylint.yaml
vendored
2
.github/workflows/curlylint.yaml
vendored
|
@ -10,7 +10,7 @@ jobs:
|
|||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install curlylint
|
||||
run: pip install curlylint
|
||||
|
|
5
.github/workflows/django-tests.yml
vendored
5
.github/workflows/django-tests.yml
vendored
|
@ -23,9 +23,9 @@ jobs:
|
|||
ports:
|
||||
- 5432:5432
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Dependencies
|
||||
|
@ -56,5 +56,6 @@ jobs:
|
|||
EMAIL_USE_TLS: true
|
||||
ENABLE_PREVIEW_IMAGES: false
|
||||
ENABLE_THUMBNAIL_GENERATION: true
|
||||
HTTP_X_FORWARDED_PROTO: false
|
||||
run: |
|
||||
pytest -n 3
|
||||
|
|
10
.github/workflows/lint-frontend.yaml
vendored
10
.github/workflows/lint-frontend.yaml
vendored
|
@ -19,16 +19,16 @@ jobs:
|
|||
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it.
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install modules
|
||||
run: npm install stylelint stylelint-config-recommended stylelint-config-standard stylelint-order eslint
|
||||
|
||||
# See .stylelintignore for files that are not linted.
|
||||
- name: Run stylelint
|
||||
run: >
|
||||
npx stylelint bookwyrm/static/css/*.scss bookwyrm/static/css/bookwyrm/**/*.scss \
|
||||
--config dev-tools/.stylelintrc.js
|
||||
# - name: Run stylelint
|
||||
# run: >
|
||||
# npx stylelint bookwyrm/static/css/*.scss bookwyrm/static/css/bookwyrm/**/*.scss \
|
||||
# --config dev-tools/.stylelintrc.js
|
||||
|
||||
# See .eslintignore for files that are not linted.
|
||||
- name: Run ESLint
|
||||
|
|
2
.github/workflows/prettier.yaml
vendored
2
.github/workflows/prettier.yaml
vendored
|
@ -14,7 +14,7 @@ jobs:
|
|||
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it.
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install modules
|
||||
run: npm install prettier
|
||||
|
|
4
.github/workflows/pylint.yml
vendored
4
.github/workflows/pylint.yml
vendored
|
@ -12,9 +12,9 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Dependencies
|
||||
|
|
71
README.md
71
README.md
|
@ -1,60 +1,45 @@
|
|||
# BookWyrm
|
||||
|
||||
Social reading and reviewing, decentralized with ActivityPub
|
||||
[![](https://img.shields.io/github/release/bookwyrm-social/bookwyrm.svg?colorB=58839b)](https://github.com/bookwyrm-social/bookwyrm/releases)
|
||||
[![Run Python Tests](https://github.com/bookwyrm-social/bookwyrm/actions/workflows/django-tests.yml/badge.svg)](https://github.com/bookwyrm-social/bookwyrm/actions/workflows/django-tests.yml)
|
||||
[![Pylint](https://github.com/bookwyrm-social/bookwyrm/actions/workflows/pylint.yml/badge.svg)](https://github.com/bookwyrm-social/bookwyrm/actions/workflows/pylint.yml)
|
||||
|
||||
## Contents
|
||||
- [Joining BookWyrm](#joining-bookwyrm)
|
||||
- [Contributing](#contributing)
|
||||
- [About BookWyrm](#about-bookwyrm)
|
||||
- [What it is and isn't](#what-it-is-and-isnt)
|
||||
- [The role of federation](#the-role-of-federation)
|
||||
- [Features](#features)
|
||||
- [Set up BookWyrm](#set-up-bookwyrm)
|
||||
|
||||
## Joining BookWyrm
|
||||
If you'd like to join an instance, you can check out the [instances](https://joinbookwyrm.com/instances/) list.
|
||||
BookWyrm is a social network for tracking your reading, talking about books, writing reviews, and discovering what to read next. Federation allows BookWyrm users to join small, trusted communities that can connect with one another, and with other ActivityPub services like [Mastodon](https://joinmastodon.org/) and [Pleroma](http://pleroma.social/).
|
||||
|
||||
|
||||
## Contributing
|
||||
See [contributing](https://docs.joinbookwyrm.com/contributing.html) for code, translation or monetary contributions.
|
||||
## Links
|
||||
|
||||
[![Mastodon Follow](https://img.shields.io/mastodon/follow/000146121?domain=https%3A%2F%2Ftech.lgbt&style=social)](https://tech.lgbt/@bookwyrm)
|
||||
[![Twitter Follow](https://img.shields.io/twitter/follow/BookWyrmSocial?style=social)](https://twitter.com/BookWyrmSocial)
|
||||
|
||||
- [Project homepage](https://joinbookwyrm.com/)
|
||||
- [Support](https://patreon.com/bookwyrm)
|
||||
- [Documentation](https://docs.joinbookwyrm.com/)
|
||||
|
||||
|
||||
## About BookWyrm
|
||||
### What it is and isn't
|
||||
BookWyrm is a platform for social reading. You can use it to track what you're reading, review books, and follow your friends. It isn't primarily meant for cataloguing or as a data-source for books, but it does do both of those things to some degree.
|
||||
|
||||
### The role of federation
|
||||
## Federation
|
||||
BookWyrm is built on [ActivityPub](http://activitypub.rocks/). With ActivityPub, it inter-operates with different instances of BookWyrm, and other ActivityPub compliant services, like Mastodon. This means you can run an instance for your book club, and still follow your friend who posts on a server devoted to 20th century Russian speculative fiction. It also means that your friend on mastodon can read and comment on a book review that you post on your BookWyrm instance.
|
||||
|
||||
Federation makes it possible to have small, self-determining communities, in contrast to the monolithic service you find on GoodReads or Twitter. An instance can be focused on a particular interest, be just for a group of friends, or anything else that brings people together. Each community can choose which other instances they want to federate with, and moderate and run their community autonomously. Check out https://runyourown.social/ to get a sense of the philosophy and logistics behind small, high-trust social networks.
|
||||
|
||||
### Features
|
||||
Since the project is still in its early stages, the features are growing every day, and there is plenty of room for suggestions and ideas. Open an [issue](https://github.com/bookwyrm-social/bookwyrm/issues) to get the conversation going!
|
||||
- Posting about books
|
||||
- Compose reviews, with or without ratings, which are aggregated in the book page
|
||||
- Compose other kinds of statuses about books, such as:
|
||||
- Comments on a book
|
||||
- Quotes or excerpts
|
||||
- Reply to statuses
|
||||
- View aggregate reviews of a book across connected BookWyrm instances
|
||||
- Differentiate local and federated reviews and rating in your activity feed
|
||||
- Track reading activity
|
||||
- Shelve books on default "to-read," "currently reading," and "read" shelves
|
||||
- Create custom shelves
|
||||
- Store started reading/finished reading dates, as well as progress updates along the way
|
||||
- Update followers about reading activity (optionally, and with granular privacy controls)
|
||||
- Create lists of books which can be open to submissions from anyone, curated, or only edited by the creator
|
||||
- Federation with ActivityPub
|
||||
- Broadcast and receive user statuses and activity
|
||||
- Share book data between instances to create a networked database of metadata
|
||||
- Identify shared books across instances and aggregate related content
|
||||
- Follow and interact with users across BookWyrm instances
|
||||
- Inter-operate with non-BookWyrm ActivityPub services (currently, Mastodon is supported)
|
||||
- Granular privacy controls
|
||||
- Private, followers-only, and public privacy levels for posting, shelves, and lists
|
||||
- Option for users to manually approve followers
|
||||
- Allow blocking and flagging for moderation
|
||||
## Features
|
||||
|
||||
### The Tech Stack
|
||||
### Post about books
|
||||
Compose reviews, comment on what you're reading, and post quotes from books. You can converse with other BookWyrm users across the network about what they're reading.
|
||||
|
||||
### Track reading activity
|
||||
Keep track of what books you've read, and what books you'd like to read in the future.
|
||||
|
||||
### Federation with ActivityPub
|
||||
Federation allows you to interact with users on other instances and services, and also shares metadata about books and authors, which collaboratively builds a decentralized database of books.
|
||||
|
||||
### Privacy and moderation
|
||||
Users and administrators can control who can see their posts and what other instances to federate with.
|
||||
|
||||
## Tech Stack
|
||||
Web backend
|
||||
- [Django](https://www.djangoproject.com/) web server
|
||||
- [PostgreSQL](https://www.postgresql.org/) database
|
||||
|
|
|
@ -3,7 +3,7 @@ import inspect
|
|||
import sys
|
||||
|
||||
from .base_activity import ActivityEncoder, Signature, naive_parse
|
||||
from .base_activity import Link, Mention
|
||||
from .base_activity import Link, Mention, Hashtag
|
||||
from .base_activity import ActivitySerializerError, resolve_remote_id
|
||||
from .image import Document, Image
|
||||
from .note import Note, GeneratedNote, Article, Comment, Quotation
|
||||
|
|
|
@ -2,12 +2,17 @@
|
|||
from dataclasses import dataclass, fields, MISSING
|
||||
from json import JSONEncoder
|
||||
import logging
|
||||
import requests
|
||||
|
||||
from django.apps import apps
|
||||
from django.db import IntegrityError, transaction
|
||||
from django.utils.http import http_date
|
||||
|
||||
from bookwyrm import models
|
||||
from bookwyrm.connectors import ConnectorException, get_data
|
||||
from bookwyrm.tasks import app
|
||||
from bookwyrm.signatures import make_signature
|
||||
from bookwyrm.settings import DOMAIN, INSTANCE_ACTOR_USERNAME
|
||||
from bookwyrm.tasks import app, MEDIUM
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -95,16 +100,34 @@ class ActivityObject:
|
|||
|
||||
# pylint: disable=too-many-locals,too-many-branches,too-many-arguments
|
||||
def to_model(
|
||||
self, model=None, instance=None, allow_create=True, save=True, overwrite=True
|
||||
self,
|
||||
model=None,
|
||||
instance=None,
|
||||
allow_create=True,
|
||||
save=True,
|
||||
overwrite=True,
|
||||
allow_external_connections=True,
|
||||
):
|
||||
"""convert from an activity to a model instance"""
|
||||
"""convert from an activity to a model instance. Args:
|
||||
model: the django model that this object is being converted to
|
||||
(will guess if not known)
|
||||
instance: an existing database entry that is going to be updated by
|
||||
this activity
|
||||
allow_create: whether a new object should be created if there is no
|
||||
existing object is provided or found matching the remote_id
|
||||
save: store in the database if true, return an unsaved model obj if false
|
||||
overwrite: replace fields in the database with this activity if true,
|
||||
only update blank fields if false
|
||||
allow_external_connections: look up missing data if true,
|
||||
throw an exception if false and an external connection is needed
|
||||
"""
|
||||
model = model or get_model_from_type(self.type)
|
||||
|
||||
# only reject statuses if we're potentially creating them
|
||||
if (
|
||||
allow_create
|
||||
and hasattr(model, "ignore_activity")
|
||||
and model.ignore_activity(self)
|
||||
and model.ignore_activity(self, allow_external_connections)
|
||||
):
|
||||
return None
|
||||
|
||||
|
@ -122,7 +145,10 @@ class ActivityObject:
|
|||
for field in instance.simple_fields:
|
||||
try:
|
||||
changed = field.set_field_from_activity(
|
||||
instance, self, overwrite=overwrite
|
||||
instance,
|
||||
self,
|
||||
overwrite=overwrite,
|
||||
allow_external_connections=allow_external_connections,
|
||||
)
|
||||
if changed:
|
||||
update_fields.append(field.name)
|
||||
|
@ -133,7 +159,11 @@ class ActivityObject:
|
|||
# too early and jank up users
|
||||
for field in instance.image_fields:
|
||||
changed = field.set_field_from_activity(
|
||||
instance, self, save=save, overwrite=overwrite
|
||||
instance,
|
||||
self,
|
||||
save=save,
|
||||
overwrite=overwrite,
|
||||
allow_external_connections=allow_external_connections,
|
||||
)
|
||||
if changed:
|
||||
update_fields.append(field.name)
|
||||
|
@ -156,8 +186,12 @@ class ActivityObject:
|
|||
|
||||
# add many to many fields, which have to be set post-save
|
||||
for field in instance.many_to_many_fields:
|
||||
# mention books/users, for example
|
||||
field.set_field_from_activity(instance, self)
|
||||
# mention books/users/hashtags, for example
|
||||
field.set_field_from_activity(
|
||||
instance,
|
||||
self,
|
||||
allow_external_connections=allow_external_connections,
|
||||
)
|
||||
|
||||
# reversed relationships in the models
|
||||
for (
|
||||
|
@ -194,6 +228,11 @@ class ActivityObject:
|
|||
try:
|
||||
if issubclass(type(v), ActivityObject):
|
||||
data[k] = v.serialize()
|
||||
elif isinstance(v, list):
|
||||
data[k] = [
|
||||
e.serialize() if issubclass(type(e), ActivityObject) else e
|
||||
for e in v
|
||||
]
|
||||
except TypeError:
|
||||
pass
|
||||
data = {k: v for (k, v) in data.items() if v is not None and k not in omit}
|
||||
|
@ -202,7 +241,7 @@ class ActivityObject:
|
|||
return data
|
||||
|
||||
|
||||
@app.task(queue="medium_priority")
|
||||
@app.task(queue=MEDIUM)
|
||||
@transaction.atomic
|
||||
def set_related_field(
|
||||
model_name, origin_model_name, related_field_name, related_remote_id, data
|
||||
|
@ -241,10 +280,10 @@ def set_related_field(
|
|||
|
||||
def get_model_from_type(activity_type):
|
||||
"""given the activity, what type of model"""
|
||||
models = apps.get_models()
|
||||
activity_models = apps.get_models()
|
||||
model = [
|
||||
m
|
||||
for m in models
|
||||
for m in activity_models
|
||||
if hasattr(m, "activity_serializer")
|
||||
and hasattr(m.activity_serializer, "type")
|
||||
and m.activity_serializer.type == activity_type
|
||||
|
@ -256,10 +295,22 @@ def get_model_from_type(activity_type):
|
|||
return model[0]
|
||||
|
||||
|
||||
# pylint: disable=too-many-arguments
|
||||
def resolve_remote_id(
|
||||
remote_id, model=None, refresh=False, save=True, get_activity=False
|
||||
remote_id,
|
||||
model=None,
|
||||
refresh=False,
|
||||
save=True,
|
||||
get_activity=False,
|
||||
allow_external_connections=True,
|
||||
):
|
||||
"""take a remote_id and return an instance, creating if necessary"""
|
||||
"""take a remote_id and return an instance, creating if necessary. Args:
|
||||
remote_id: the unique url for looking up the object in the db or by http
|
||||
model: a string or object representing the model that corresponds to the object
|
||||
save: whether to return an unsaved database entry or a saved one
|
||||
get_activity: whether to return the activitypub object or the model object
|
||||
allow_external_connections: whether to make http connections
|
||||
"""
|
||||
if model: # a bonus check we can do if we already know the model
|
||||
if isinstance(model, str):
|
||||
model = apps.get_model(f"bookwyrm.{model}", require_ready=True)
|
||||
|
@ -267,13 +318,26 @@ def resolve_remote_id(
|
|||
if result and not refresh:
|
||||
return result if not get_activity else result.to_activity_dataclass()
|
||||
|
||||
# The above block will return the object if it already exists in the database.
|
||||
# If it doesn't, an external connection would be needed, so check if that's cool
|
||||
if not allow_external_connections:
|
||||
raise ActivitySerializerError(
|
||||
"Unable to serialize object without making external HTTP requests"
|
||||
)
|
||||
|
||||
# load the data and create the object
|
||||
try:
|
||||
data = get_data(remote_id)
|
||||
except ConnectorException:
|
||||
logger.exception("Could not connect to host for remote_id: %s", remote_id)
|
||||
except ConnectionError:
|
||||
logger.info("Could not connect to host for remote_id: %s", remote_id)
|
||||
return None
|
||||
except requests.HTTPError as e:
|
||||
if (e.response is not None) and e.response.status_code == 401:
|
||||
# This most likely means it's a mastodon with secure fetch enabled.
|
||||
data = get_activitypub_data(remote_id)
|
||||
else:
|
||||
logger.info("Could not connect to host for remote_id: %s", remote_id)
|
||||
return None
|
||||
|
||||
# determine the model implicitly, if not provided
|
||||
# or if it's a model with subclasses like Status, check again
|
||||
if not model or hasattr(model.objects, "select_subclasses"):
|
||||
|
@ -292,6 +356,52 @@ def resolve_remote_id(
|
|||
return item.to_model(model=model, instance=result, save=save)
|
||||
|
||||
|
||||
def get_representative():
|
||||
"""Get or create an actor representing the instance
|
||||
to sign requests to 'secure mastodon' servers"""
|
||||
username = f"{INSTANCE_ACTOR_USERNAME}@{DOMAIN}"
|
||||
email = "bookwyrm@localhost"
|
||||
try:
|
||||
user = models.User.objects.get(username=username)
|
||||
except models.User.DoesNotExist:
|
||||
user = models.User.objects.create_user(
|
||||
username=username,
|
||||
email=email,
|
||||
local=True,
|
||||
localname=INSTANCE_ACTOR_USERNAME,
|
||||
)
|
||||
return user
|
||||
|
||||
|
||||
def get_activitypub_data(url):
|
||||
"""wrapper for request.get"""
|
||||
now = http_date()
|
||||
sender = get_representative()
|
||||
if not sender.key_pair.private_key:
|
||||
# this shouldn't happen. it would be bad if it happened.
|
||||
raise ValueError("No private key found for sender")
|
||||
try:
|
||||
resp = requests.get(
|
||||
url,
|
||||
headers={
|
||||
# pylint: disable=line-too-long
|
||||
"Accept": 'application/ld+json; profile="https://www.w3.org/ns/activitystreams"',
|
||||
"Date": now,
|
||||
"Signature": make_signature("get", sender, url, now),
|
||||
},
|
||||
)
|
||||
except requests.RequestException:
|
||||
raise ConnectorException()
|
||||
if not resp.ok:
|
||||
resp.raise_for_status()
|
||||
try:
|
||||
data = resp.json()
|
||||
except ValueError:
|
||||
raise ConnectorException()
|
||||
|
||||
return data
|
||||
|
||||
|
||||
@dataclass(init=False)
|
||||
class Link(ActivityObject):
|
||||
"""for tagging a book in a status"""
|
||||
|
@ -306,7 +416,9 @@ class Link(ActivityObject):
|
|||
|
||||
def serialize(self, **kwargs):
|
||||
"""remove fields"""
|
||||
omit = ("id", "type", "@context")
|
||||
omit = ("id", "@context")
|
||||
if self.type == "Link":
|
||||
omit += ("type",)
|
||||
return super().serialize(omit=omit)
|
||||
|
||||
|
||||
|
@ -315,3 +427,10 @@ class Mention(Link):
|
|||
"""a subtype of Link for mentioning an actor"""
|
||||
|
||||
type: str = "Mention"
|
||||
|
||||
|
||||
@dataclass(init=False)
|
||||
class Hashtag(Link):
|
||||
"""a subtype of Link for mentioning a hashtag"""
|
||||
|
||||
type: str = "Hashtag"
|
||||
|
|
|
@ -19,6 +19,8 @@ class BookData(ActivityObject):
|
|||
viaf: str = None
|
||||
wikidata: str = None
|
||||
asin: str = None
|
||||
aasin: str = None
|
||||
isfdb: str = None
|
||||
lastEditedBy: str = None
|
||||
links: List[str] = field(default_factory=lambda: [])
|
||||
fileLinks: List[str] = field(default_factory=lambda: [])
|
||||
|
@ -90,3 +92,4 @@ class Author(BookData):
|
|||
bio: str = ""
|
||||
wikipediaLink: str = ""
|
||||
type: str = "Author"
|
||||
website: str = ""
|
||||
|
|
|
@ -1,9 +1,12 @@
|
|||
""" note serializer and children thereof """
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Dict, List
|
||||
from django.apps import apps
|
||||
import re
|
||||
|
||||
from .base_activity import ActivityObject, Link
|
||||
from django.apps import apps
|
||||
from django.db import IntegrityError, transaction
|
||||
|
||||
from .base_activity import ActivityObject, ActivitySerializerError, Link
|
||||
from .image import Document
|
||||
|
||||
|
||||
|
@ -38,6 +41,47 @@ class Note(ActivityObject):
|
|||
updated: str = None
|
||||
type: str = "Note"
|
||||
|
||||
# pylint: disable=too-many-arguments
|
||||
def to_model(
|
||||
self,
|
||||
model=None,
|
||||
instance=None,
|
||||
allow_create=True,
|
||||
save=True,
|
||||
overwrite=True,
|
||||
allow_external_connections=True,
|
||||
):
|
||||
instance = super().to_model(
|
||||
model, instance, allow_create, save, overwrite, allow_external_connections
|
||||
)
|
||||
|
||||
if instance is None:
|
||||
return instance
|
||||
|
||||
# Replace links to hashtags in content with local URLs
|
||||
changed_content = False
|
||||
for hashtag in instance.mention_hashtags.all():
|
||||
updated_content = re.sub(
|
||||
rf'(<a href=")[^"]*(" data-mention="hashtag">{hashtag.name}</a>)',
|
||||
rf"\1{hashtag.remote_id}\2",
|
||||
instance.content,
|
||||
flags=re.IGNORECASE,
|
||||
)
|
||||
if instance.content != updated_content:
|
||||
instance.content = updated_content
|
||||
changed_content = True
|
||||
|
||||
if not save or not changed_content:
|
||||
return instance
|
||||
|
||||
with transaction.atomic():
|
||||
try:
|
||||
instance.save(broadcast=False, update_fields=["content"])
|
||||
except IntegrityError as e:
|
||||
raise ActivitySerializerError(e)
|
||||
|
||||
return instance
|
||||
|
||||
|
||||
@dataclass(init=False)
|
||||
class Article(Note):
|
||||
|
|
|
@ -14,12 +14,12 @@ class Verb(ActivityObject):
|
|||
actor: str
|
||||
object: ActivityObject
|
||||
|
||||
def action(self):
|
||||
def action(self, allow_external_connections=True):
|
||||
"""usually we just want to update and save"""
|
||||
# self.object may return None if the object is invalid in an expected way
|
||||
# ie, Question type
|
||||
if self.object:
|
||||
self.object.to_model()
|
||||
self.object.to_model(allow_external_connections=allow_external_connections)
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
@ -42,7 +42,7 @@ class Delete(Verb):
|
|||
cc: List[str] = field(default_factory=lambda: [])
|
||||
type: str = "Delete"
|
||||
|
||||
def action(self):
|
||||
def action(self, allow_external_connections=True):
|
||||
"""find and delete the activity object"""
|
||||
if not self.object:
|
||||
return
|
||||
|
@ -52,7 +52,11 @@ class Delete(Verb):
|
|||
model = apps.get_model("bookwyrm.User")
|
||||
obj = model.find_existing_by_remote_id(self.object)
|
||||
else:
|
||||
obj = self.object.to_model(save=False, allow_create=False)
|
||||
obj = self.object.to_model(
|
||||
save=False,
|
||||
allow_create=False,
|
||||
allow_external_connections=allow_external_connections,
|
||||
)
|
||||
|
||||
if obj:
|
||||
obj.delete()
|
||||
|
@ -67,11 +71,13 @@ class Update(Verb):
|
|||
to: List[str]
|
||||
type: str = "Update"
|
||||
|
||||
def action(self):
|
||||
def action(self, allow_external_connections=True):
|
||||
"""update a model instance from the dataclass"""
|
||||
if not self.object:
|
||||
return
|
||||
self.object.to_model(allow_create=False)
|
||||
self.object.to_model(
|
||||
allow_create=False, allow_external_connections=allow_external_connections
|
||||
)
|
||||
|
||||
|
||||
@dataclass(init=False)
|
||||
|
@ -80,10 +86,10 @@ class Undo(Verb):
|
|||
|
||||
type: str = "Undo"
|
||||
|
||||
def action(self):
|
||||
def action(self, allow_external_connections=True):
|
||||
"""find and remove the activity object"""
|
||||
if isinstance(self.object, str):
|
||||
# it may be that sometihng should be done with these, but idk what
|
||||
# it may be that something should be done with these, but idk what
|
||||
# this seems just to be coming from pleroma
|
||||
return
|
||||
|
||||
|
@ -92,13 +98,28 @@ class Undo(Verb):
|
|||
model = None
|
||||
if self.object.type == "Follow":
|
||||
model = apps.get_model("bookwyrm.UserFollows")
|
||||
obj = self.object.to_model(model=model, save=False, allow_create=False)
|
||||
obj = self.object.to_model(
|
||||
model=model,
|
||||
save=False,
|
||||
allow_create=False,
|
||||
allow_external_connections=allow_external_connections,
|
||||
)
|
||||
if not obj:
|
||||
# this could be a folloq request not a follow proper
|
||||
# this could be a follow request not a follow proper
|
||||
model = apps.get_model("bookwyrm.UserFollowRequest")
|
||||
obj = self.object.to_model(model=model, save=False, allow_create=False)
|
||||
obj = self.object.to_model(
|
||||
model=model,
|
||||
save=False,
|
||||
allow_create=False,
|
||||
allow_external_connections=allow_external_connections,
|
||||
)
|
||||
else:
|
||||
obj = self.object.to_model(model=model, save=False, allow_create=False)
|
||||
obj = self.object.to_model(
|
||||
model=model,
|
||||
save=False,
|
||||
allow_create=False,
|
||||
allow_external_connections=allow_external_connections,
|
||||
)
|
||||
if not obj:
|
||||
# if we don't have the object, we can't undo it. happens a lot with boosts
|
||||
return
|
||||
|
@ -112,9 +133,9 @@ class Follow(Verb):
|
|||
object: str
|
||||
type: str = "Follow"
|
||||
|
||||
def action(self):
|
||||
def action(self, allow_external_connections=True):
|
||||
"""relationship save"""
|
||||
self.to_model()
|
||||
self.to_model(allow_external_connections=allow_external_connections)
|
||||
|
||||
|
||||
@dataclass(init=False)
|
||||
|
@ -124,9 +145,9 @@ class Block(Verb):
|
|||
object: str
|
||||
type: str = "Block"
|
||||
|
||||
def action(self):
|
||||
def action(self, allow_external_connections=True):
|
||||
"""relationship save"""
|
||||
self.to_model()
|
||||
self.to_model(allow_external_connections=allow_external_connections)
|
||||
|
||||
|
||||
@dataclass(init=False)
|
||||
|
@ -136,7 +157,7 @@ class Accept(Verb):
|
|||
object: Follow
|
||||
type: str = "Accept"
|
||||
|
||||
def action(self):
|
||||
def action(self, allow_external_connections=True):
|
||||
"""accept a request"""
|
||||
obj = self.object.to_model(save=False, allow_create=True)
|
||||
obj.accept()
|
||||
|
@ -149,7 +170,7 @@ class Reject(Verb):
|
|||
object: Follow
|
||||
type: str = "Reject"
|
||||
|
||||
def action(self):
|
||||
def action(self, allow_external_connections=True):
|
||||
"""reject a follow request"""
|
||||
obj = self.object.to_model(save=False, allow_create=False)
|
||||
obj.reject()
|
||||
|
@ -163,7 +184,7 @@ class Add(Verb):
|
|||
object: CollectionItem
|
||||
type: str = "Add"
|
||||
|
||||
def action(self):
|
||||
def action(self, allow_external_connections=True):
|
||||
"""figure out the target to assign the item to a collection"""
|
||||
target = resolve_remote_id(self.target)
|
||||
item = self.object.to_model(save=False)
|
||||
|
@ -177,7 +198,7 @@ class Remove(Add):
|
|||
|
||||
type: str = "Remove"
|
||||
|
||||
def action(self):
|
||||
def action(self, allow_external_connections=True):
|
||||
"""find and remove the activity object"""
|
||||
obj = self.object.to_model(save=False, allow_create=False)
|
||||
if obj:
|
||||
|
@ -191,9 +212,9 @@ class Like(Verb):
|
|||
object: str
|
||||
type: str = "Like"
|
||||
|
||||
def action(self):
|
||||
def action(self, allow_external_connections=True):
|
||||
"""like"""
|
||||
self.to_model()
|
||||
self.to_model(allow_external_connections=allow_external_connections)
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
@ -207,6 +228,6 @@ class Announce(Verb):
|
|||
object: str
|
||||
type: str = "Announce"
|
||||
|
||||
def action(self):
|
||||
def action(self, allow_external_connections=True):
|
||||
"""boost"""
|
||||
self.to_model()
|
||||
self.to_model(allow_external_connections=allow_external_connections)
|
||||
|
|
|
@ -4,27 +4,32 @@ from django.dispatch import receiver
|
|||
from django.db import transaction
|
||||
from django.db.models import signals, Q
|
||||
from django.utils import timezone
|
||||
from opentelemetry import trace
|
||||
|
||||
from bookwyrm import models
|
||||
from bookwyrm.redis_store import RedisStore, r
|
||||
from bookwyrm.tasks import app, LOW, MEDIUM, HIGH
|
||||
from bookwyrm.telemetry import open_telemetry
|
||||
|
||||
|
||||
tracer = open_telemetry.tracer()
|
||||
|
||||
|
||||
class ActivityStream(RedisStore):
|
||||
"""a category of activity stream (like home, local, books)"""
|
||||
|
||||
def stream_id(self, user):
|
||||
def stream_id(self, user_id):
|
||||
"""the redis key for this user's instance of this stream"""
|
||||
return f"{user.id}-{self.key}"
|
||||
return f"{user_id}-{self.key}"
|
||||
|
||||
def unread_id(self, user):
|
||||
def unread_id(self, user_id):
|
||||
"""the redis key for this user's unread count for this stream"""
|
||||
stream_id = self.stream_id(user)
|
||||
stream_id = self.stream_id(user_id)
|
||||
return f"{stream_id}-unread"
|
||||
|
||||
def unread_by_status_type_id(self, user):
|
||||
def unread_by_status_type_id(self, user_id):
|
||||
"""the redis key for this user's unread count for this stream"""
|
||||
stream_id = self.stream_id(user)
|
||||
stream_id = self.stream_id(user_id)
|
||||
return f"{stream_id}-unread-by-type"
|
||||
|
||||
def get_rank(self, obj): # pylint: disable=no-self-use
|
||||
|
@ -33,16 +38,19 @@ class ActivityStream(RedisStore):
|
|||
|
||||
def add_status(self, status, increment_unread=False):
|
||||
"""add a status to users' feeds"""
|
||||
audience = self.get_audience(status)
|
||||
# the pipeline contains all the add-to-stream activities
|
||||
pipeline = self.add_object_to_related_stores(status, execute=False)
|
||||
pipeline = self.add_object_to_stores(
|
||||
status, self.get_stores_for_users(audience), execute=False
|
||||
)
|
||||
|
||||
if increment_unread:
|
||||
for user in self.get_audience(status):
|
||||
for user_id in audience:
|
||||
# add to the unread status count
|
||||
pipeline.incr(self.unread_id(user))
|
||||
pipeline.incr(self.unread_id(user_id))
|
||||
# add to the unread status count for status type
|
||||
pipeline.hincrby(
|
||||
self.unread_by_status_type_id(user), get_status_type(status), 1
|
||||
self.unread_by_status_type_id(user_id), get_status_type(status), 1
|
||||
)
|
||||
|
||||
# and go!
|
||||
|
@ -52,21 +60,21 @@ class ActivityStream(RedisStore):
|
|||
"""add a user's statuses to another user's feed"""
|
||||
# only add the statuses that the viewer should be able to see (ie, not dms)
|
||||
statuses = models.Status.privacy_filter(viewer).filter(user=user)
|
||||
self.bulk_add_objects_to_store(statuses, self.stream_id(viewer))
|
||||
self.bulk_add_objects_to_store(statuses, self.stream_id(viewer.id))
|
||||
|
||||
def remove_user_statuses(self, viewer, user):
|
||||
"""remove a user's status from another user's feed"""
|
||||
# remove all so that followers only statuses are removed
|
||||
statuses = user.status_set.all()
|
||||
self.bulk_remove_objects_from_store(statuses, self.stream_id(viewer))
|
||||
self.bulk_remove_objects_from_store(statuses, self.stream_id(viewer.id))
|
||||
|
||||
def get_activity_stream(self, user):
|
||||
"""load the statuses to be displayed"""
|
||||
# clear unreads for this feed
|
||||
r.set(self.unread_id(user), 0)
|
||||
r.delete(self.unread_by_status_type_id(user))
|
||||
r.set(self.unread_id(user.id), 0)
|
||||
r.delete(self.unread_by_status_type_id(user.id))
|
||||
|
||||
statuses = self.get_store(self.stream_id(user))
|
||||
statuses = self.get_store(self.stream_id(user.id))
|
||||
return (
|
||||
models.Status.objects.select_subclasses()
|
||||
.filter(id__in=statuses)
|
||||
|
@ -83,11 +91,11 @@ class ActivityStream(RedisStore):
|
|||
|
||||
def get_unread_count(self, user):
|
||||
"""get the unread status count for this user's feed"""
|
||||
return int(r.get(self.unread_id(user)) or 0)
|
||||
return int(r.get(self.unread_id(user.id)) or 0)
|
||||
|
||||
def get_unread_count_by_status_type(self, user):
|
||||
"""get the unread status count for this user's feed's status types"""
|
||||
status_types = r.hgetall(self.unread_by_status_type_id(user))
|
||||
status_types = r.hgetall(self.unread_by_status_type_id(user.id))
|
||||
return {
|
||||
str(key.decode("utf-8")): int(value) or 0
|
||||
for key, value in status_types.items()
|
||||
|
@ -95,11 +103,18 @@ class ActivityStream(RedisStore):
|
|||
|
||||
def populate_streams(self, user):
|
||||
"""go from zero to a timeline"""
|
||||
self.populate_store(self.stream_id(user))
|
||||
self.populate_store(self.stream_id(user.id))
|
||||
|
||||
def get_audience(self, status): # pylint: disable=no-self-use
|
||||
"""given a status, what users should see it"""
|
||||
# direct messages don't appeard in feeds, direct comments/reviews/etc do
|
||||
@tracer.start_as_current_span("ActivityStream._get_audience")
|
||||
def _get_audience(self, status): # pylint: disable=no-self-use
|
||||
"""given a status, what users should see it, excluding the author"""
|
||||
trace.get_current_span().set_attribute("status_type", status.status_type)
|
||||
trace.get_current_span().set_attribute("status_privacy", status.privacy)
|
||||
trace.get_current_span().set_attribute(
|
||||
"status_reply_parent_privacy",
|
||||
status.reply_parent.privacy if status.reply_parent else None,
|
||||
)
|
||||
# direct messages don't appear in feeds, direct comments/reviews/etc do
|
||||
if status.privacy == "direct" and status.status_type == "Note":
|
||||
return []
|
||||
|
||||
|
@ -114,19 +129,38 @@ class ActivityStream(RedisStore):
|
|||
# only visible to the poster and mentioned users
|
||||
if status.privacy == "direct":
|
||||
audience = audience.filter(
|
||||
Q(id=status.user.id) # if the user is the post's author
|
||||
| Q(id__in=status.mention_users.all()) # if the user is mentioned
|
||||
Q(id__in=status.mention_users.all()) # if the user is mentioned
|
||||
)
|
||||
|
||||
# don't show replies to statuses the user can't see
|
||||
elif status.reply_parent and status.reply_parent.privacy == "followers":
|
||||
audience = audience.filter(
|
||||
Q(id=status.reply_parent.user.id) # if the user is the OG author
|
||||
| (
|
||||
Q(following=status.user) & Q(following=status.reply_parent.user)
|
||||
) # if the user is following both authors
|
||||
).distinct()
|
||||
|
||||
# only visible to the poster's followers and tagged users
|
||||
elif status.privacy == "followers":
|
||||
audience = audience.filter(
|
||||
Q(id=status.user.id) # if the user is the post's author
|
||||
| Q(following=status.user) # if the user is following the author
|
||||
Q(following=status.user) # if the user is following the author
|
||||
)
|
||||
return audience.distinct()
|
||||
|
||||
def get_stores_for_object(self, obj):
|
||||
return [self.stream_id(u) for u in self.get_audience(obj)]
|
||||
@tracer.start_as_current_span("ActivityStream.get_audience")
|
||||
def get_audience(self, status):
|
||||
"""given a status, what users should see it"""
|
||||
trace.get_current_span().set_attribute("stream_id", self.key)
|
||||
audience = self._get_audience(status)
|
||||
status_author = models.User.objects.filter(
|
||||
is_active=True, local=True, id=status.user.id
|
||||
)
|
||||
return list({user.id for user in list(audience) + list(status_author)})
|
||||
|
||||
def get_stores_for_users(self, user_ids):
|
||||
"""convert a list of user ids into redis store ids"""
|
||||
return [self.stream_id(user_id) for user_id in user_ids]
|
||||
|
||||
def get_statuses_for_user(self, user): # pylint: disable=no-self-use
|
||||
"""given a user, what statuses should they see on this stream"""
|
||||
|
@ -145,14 +179,19 @@ class HomeStream(ActivityStream):
|
|||
|
||||
key = "home"
|
||||
|
||||
@tracer.start_as_current_span("HomeStream.get_audience")
|
||||
def get_audience(self, status):
|
||||
audience = super().get_audience(status)
|
||||
trace.get_current_span().set_attribute("stream_id", self.key)
|
||||
audience = super()._get_audience(status)
|
||||
if not audience:
|
||||
return []
|
||||
return audience.filter(
|
||||
Q(id=status.user.id) # if the user is the post's author
|
||||
| Q(following=status.user) # if the user is following the author
|
||||
).distinct()
|
||||
# if the user is following the author
|
||||
audience = audience.filter(following=status.user)
|
||||
# if the user is the post's author
|
||||
status_author = models.User.objects.filter(
|
||||
is_active=True, local=True, id=status.user.id
|
||||
)
|
||||
return list({user.id for user in list(audience) + list(status_author)})
|
||||
|
||||
def get_statuses_for_user(self, user):
|
||||
return models.Status.privacy_filter(
|
||||
|
@ -191,8 +230,20 @@ class BooksStream(ActivityStream):
|
|||
|
||||
key = "books"
|
||||
|
||||
def get_audience(self, status):
|
||||
def _get_audience(self, status):
|
||||
"""anyone with the mentioned book on their shelves"""
|
||||
work = (
|
||||
status.book.parent_work
|
||||
if hasattr(status, "book")
|
||||
else status.mention_books.first().parent_work
|
||||
)
|
||||
|
||||
audience = super()._get_audience(status)
|
||||
if not audience:
|
||||
return []
|
||||
return audience.filter(shelfbook__book__parent_work=work).distinct()
|
||||
|
||||
def get_audience(self, status):
|
||||
# only show public statuses on the books feed,
|
||||
# and only statuses that mention books
|
||||
if status.privacy != "public" or not (
|
||||
|
@ -200,16 +251,7 @@ class BooksStream(ActivityStream):
|
|||
):
|
||||
return []
|
||||
|
||||
work = (
|
||||
status.book.parent_work
|
||||
if hasattr(status, "book")
|
||||
else status.mention_books.first().parent_work
|
||||
)
|
||||
|
||||
audience = super().get_audience(status)
|
||||
if not audience:
|
||||
return []
|
||||
return audience.filter(shelfbook__book__parent_work=work).distinct()
|
||||
return super().get_audience(status)
|
||||
|
||||
def get_statuses_for_user(self, user):
|
||||
"""any public status that mentions the user's books"""
|
||||
|
@ -233,38 +275,38 @@ class BooksStream(ActivityStream):
|
|||
def add_book_statuses(self, user, book):
|
||||
"""add statuses about a book to a user's feed"""
|
||||
work = book.parent_work
|
||||
statuses = (
|
||||
models.Status.privacy_filter(
|
||||
statuses = models.Status.privacy_filter(
|
||||
user,
|
||||
privacy_levels=["public"],
|
||||
)
|
||||
.filter(
|
||||
Q(comment__book__parent_work=work)
|
||||
| Q(quotation__book__parent_work=work)
|
||||
| Q(review__book__parent_work=work)
|
||||
| Q(mention_books__parent_work=work)
|
||||
)
|
||||
.distinct()
|
||||
)
|
||||
self.bulk_add_objects_to_store(statuses, self.stream_id(user))
|
||||
|
||||
book_comments = statuses.filter(Q(comment__book__parent_work=work))
|
||||
book_quotations = statuses.filter(Q(quotation__book__parent_work=work))
|
||||
book_reviews = statuses.filter(Q(review__book__parent_work=work))
|
||||
book_mentions = statuses.filter(Q(mention_books__parent_work=work))
|
||||
|
||||
self.bulk_add_objects_to_store(book_comments, self.stream_id(user.id))
|
||||
self.bulk_add_objects_to_store(book_quotations, self.stream_id(user.id))
|
||||
self.bulk_add_objects_to_store(book_reviews, self.stream_id(user.id))
|
||||
self.bulk_add_objects_to_store(book_mentions, self.stream_id(user.id))
|
||||
|
||||
def remove_book_statuses(self, user, book):
|
||||
"""add statuses about a book to a user's feed"""
|
||||
work = book.parent_work
|
||||
statuses = (
|
||||
models.Status.privacy_filter(
|
||||
statuses = models.Status.privacy_filter(
|
||||
user,
|
||||
privacy_levels=["public"],
|
||||
)
|
||||
.filter(
|
||||
Q(comment__book__parent_work=work)
|
||||
| Q(quotation__book__parent_work=work)
|
||||
| Q(review__book__parent_work=work)
|
||||
| Q(mention_books__parent_work=work)
|
||||
)
|
||||
.distinct()
|
||||
)
|
||||
self.bulk_remove_objects_from_store(statuses, self.stream_id(user))
|
||||
|
||||
book_comments = statuses.filter(Q(comment__book__parent_work=work))
|
||||
book_quotations = statuses.filter(Q(quotation__book__parent_work=work))
|
||||
book_reviews = statuses.filter(Q(review__book__parent_work=work))
|
||||
book_mentions = statuses.filter(Q(mention_books__parent_work=work))
|
||||
|
||||
self.bulk_remove_objects_from_store(book_comments, self.stream_id(user.id))
|
||||
self.bulk_remove_objects_from_store(book_quotations, self.stream_id(user.id))
|
||||
self.bulk_remove_objects_from_store(book_reviews, self.stream_id(user.id))
|
||||
self.bulk_remove_objects_from_store(book_mentions, self.stream_id(user.id))
|
||||
|
||||
|
||||
# determine which streams are enabled in settings.py
|
||||
|
@ -287,6 +329,12 @@ def add_status_on_create(sender, instance, created, *args, **kwargs):
|
|||
remove_status_task.delay(instance.id)
|
||||
return
|
||||
|
||||
# To avoid creating a zillion unnecessary tasks caused by re-saving the model,
|
||||
# check if it's actually ready to send before we go. We're trusting this was
|
||||
# set correctly by the inbox or view
|
||||
if not instance.ready:
|
||||
return
|
||||
|
||||
# when creating new things, gotta wait on the transaction
|
||||
transaction.on_commit(
|
||||
lambda: add_status_on_create_command(sender, instance, created)
|
||||
|
@ -301,6 +349,10 @@ def add_status_on_create_command(sender, instance, created):
|
|||
if instance.published_date < timezone.now() - timedelta(
|
||||
days=1
|
||||
) or instance.created_date < instance.published_date - timedelta(days=1):
|
||||
# a backdated status from a local user is an import, don't add it
|
||||
if instance.user.local:
|
||||
return
|
||||
# an out of date remote status is a low priority but should be added
|
||||
priority = LOW
|
||||
|
||||
add_status_task.apply_async(
|
||||
|
@ -479,7 +531,9 @@ def remove_status_task(status_ids):
|
|||
|
||||
for stream in streams.values():
|
||||
for status in statuses:
|
||||
stream.remove_object_from_related_stores(status)
|
||||
stream.remove_object_from_stores(
|
||||
status, stream.get_stores_for_users(stream.get_audience(status))
|
||||
)
|
||||
|
||||
|
||||
@app.task(queue=HIGH)
|
||||
|
@ -528,10 +582,10 @@ def handle_boost_task(boost_id):
|
|||
|
||||
for stream in streams.values():
|
||||
# people who should see the boost (not people who see the original status)
|
||||
audience = stream.get_stores_for_object(instance)
|
||||
stream.remove_object_from_related_stores(boosted, stores=audience)
|
||||
audience = stream.get_stores_for_users(stream.get_audience(instance))
|
||||
stream.remove_object_from_stores(boosted, audience)
|
||||
for status in old_versions:
|
||||
stream.remove_object_from_related_stores(status, stores=audience)
|
||||
stream.remove_object_from_stores(status, audience)
|
||||
|
||||
|
||||
def get_status_type(status):
|
||||
|
|
|
@ -35,11 +35,12 @@ class BookwyrmConfig(AppConfig):
|
|||
# pylint: disable=no-self-use
|
||||
def ready(self):
|
||||
"""set up OTLP and preview image files, if desired"""
|
||||
if settings.OTEL_EXPORTER_OTLP_ENDPOINT:
|
||||
if settings.OTEL_EXPORTER_OTLP_ENDPOINT or settings.OTEL_EXPORTER_CONSOLE:
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from bookwyrm.telemetry import open_telemetry
|
||||
|
||||
open_telemetry.instrumentDjango()
|
||||
open_telemetry.instrumentPostgres()
|
||||
|
||||
if settings.ENABLE_PREVIEW_IMAGES and settings.FONTS:
|
||||
# Download any fonts that we don't have yet
|
||||
|
|
|
@ -4,9 +4,10 @@ from functools import reduce
|
|||
import operator
|
||||
|
||||
from django.contrib.postgres.search import SearchRank, SearchQuery
|
||||
from django.db.models import OuterRef, Subquery, F, Q
|
||||
from django.db.models import F, Q
|
||||
|
||||
from bookwyrm import models
|
||||
from bookwyrm import connectors
|
||||
from bookwyrm.settings import MEDIA_FULL_URL
|
||||
|
||||
|
||||
|
@ -16,8 +17,15 @@ def search(query, min_confidence=0, filters=None, return_first=False):
|
|||
filters = filters or []
|
||||
if not query:
|
||||
return []
|
||||
# first, try searching unqiue identifiers
|
||||
query = query.strip()
|
||||
|
||||
results = None
|
||||
# first, try searching unique identifiers
|
||||
# unique identifiers never have spaces, title/author usually do
|
||||
if not " " in query:
|
||||
results = search_identifiers(query, *filters, return_first=return_first)
|
||||
|
||||
# if there were no identifier results...
|
||||
if not results:
|
||||
# then try searching title/author
|
||||
results = search_title_author(
|
||||
|
@ -30,26 +38,14 @@ def isbn_search(query):
|
|||
"""search your local database"""
|
||||
if not query:
|
||||
return []
|
||||
|
||||
# Up-case the ISBN string to ensure any 'X' check-digit is correct
|
||||
# If the ISBN has only 9 characters, prepend missing zero
|
||||
query = query.strip().upper().rjust(10, "0")
|
||||
filters = [{f: query} for f in ["isbn_10", "isbn_13"]]
|
||||
results = models.Edition.objects.filter(
|
||||
return models.Edition.objects.filter(
|
||||
reduce(operator.or_, (Q(**f) for f in filters))
|
||||
).distinct()
|
||||
|
||||
# when there are multiple editions of the same work, pick the default.
|
||||
# it would be odd for this to happen.
|
||||
|
||||
default_editions = models.Edition.objects.filter(
|
||||
parent_work=OuterRef("parent_work")
|
||||
).order_by("-edition_rank")
|
||||
results = (
|
||||
results.annotate(default_id=Subquery(default_editions.values("id")[:1])).filter(
|
||||
default_id=F("id")
|
||||
)
|
||||
or results
|
||||
)
|
||||
return results
|
||||
|
||||
|
||||
def format_search_result(search_result):
|
||||
"""convert a book object into a search result object"""
|
||||
|
@ -72,6 +68,10 @@ def format_search_result(search_result):
|
|||
|
||||
def search_identifiers(query, *filters, return_first=False):
|
||||
"""tries remote_id, isbn; defined as dedupe fields on the model"""
|
||||
if connectors.maybe_isbn(query):
|
||||
# Oh did you think the 'S' in ISBN stood for 'standard'?
|
||||
normalized_isbn = query.strip().upper().rjust(10, "0")
|
||||
query = normalized_isbn
|
||||
# pylint: disable=W0212
|
||||
or_filters = [
|
||||
{f.name: query}
|
||||
|
@ -81,22 +81,7 @@ def search_identifiers(query, *filters, return_first=False):
|
|||
results = models.Edition.objects.filter(
|
||||
*filters, reduce(operator.or_, (Q(**f) for f in or_filters))
|
||||
).distinct()
|
||||
if results.count() <= 1:
|
||||
if return_first:
|
||||
return results.first()
|
||||
return results
|
||||
|
||||
# when there are multiple editions of the same work, pick the default.
|
||||
# it would be odd for this to happen.
|
||||
default_editions = models.Edition.objects.filter(
|
||||
parent_work=OuterRef("parent_work")
|
||||
).order_by("-edition_rank")
|
||||
results = (
|
||||
results.annotate(default_id=Subquery(default_editions.values("id")[:1])).filter(
|
||||
default_id=F("id")
|
||||
)
|
||||
or results
|
||||
)
|
||||
if return_first:
|
||||
return results.first()
|
||||
return results
|
||||
|
@ -113,19 +98,16 @@ def search_title_author(query, min_confidence, *filters, return_first=False):
|
|||
)
|
||||
|
||||
# when there are multiple editions of the same work, pick the closest
|
||||
editions_of_work = results.values("parent_work__id").values_list("parent_work__id")
|
||||
editions_of_work = results.values_list("parent_work__id", flat=True).distinct()
|
||||
|
||||
# filter out multiple editions of the same work
|
||||
list_results = []
|
||||
for work_id in set(editions_of_work):
|
||||
editions = results.filter(parent_work=work_id)
|
||||
default = editions.order_by("-edition_rank").first()
|
||||
default_rank = default.rank if default else 0
|
||||
# if mutliple books have the top rank, pick the default edition
|
||||
if default_rank == editions.first().rank:
|
||||
result = default
|
||||
else:
|
||||
result = editions.first()
|
||||
for work_id in set(editions_of_work[:30]):
|
||||
result = (
|
||||
results.filter(parent_work=work_id)
|
||||
.order_by("-rank", "-edition_rank")
|
||||
.first()
|
||||
)
|
||||
|
||||
if return_first:
|
||||
return result
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
""" bring connectors into the namespace """
|
||||
from .settings import CONNECTORS
|
||||
from .abstract_connector import ConnectorException
|
||||
from .abstract_connector import get_data, get_image
|
||||
from .abstract_connector import get_data, get_image, maybe_isbn
|
||||
|
||||
from .connector_manager import search, first_search_result
|
||||
|
|
|
@ -1,15 +1,19 @@
|
|||
""" functionality outline for a book data connector """
|
||||
from abc import ABC, abstractmethod
|
||||
from urllib.parse import quote_plus
|
||||
import imghdr
|
||||
import logging
|
||||
import re
|
||||
import asyncio
|
||||
import requests
|
||||
from requests.exceptions import RequestException
|
||||
import aiohttp
|
||||
|
||||
from django.core.files.base import ContentFile
|
||||
from django.db import transaction
|
||||
import requests
|
||||
from requests.exceptions import RequestException
|
||||
|
||||
from bookwyrm import activitypub, models, settings
|
||||
from bookwyrm.settings import USER_AGENT
|
||||
from .connector_manager import load_more_data, ConnectorException, raise_not_valid_url
|
||||
from .format_mappings import format_mappings
|
||||
|
||||
|
@ -42,18 +46,53 @@ class AbstractMinimalConnector(ABC):
|
|||
"""format the query url"""
|
||||
# Check if the query resembles an ISBN
|
||||
if maybe_isbn(query) and self.isbn_search_url and self.isbn_search_url != "":
|
||||
return f"{self.isbn_search_url}{query}"
|
||||
|
||||
# Up-case the ISBN string to ensure any 'X' check-digit is correct
|
||||
# If the ISBN has only 9 characters, prepend missing zero
|
||||
normalized_query = query.strip().upper().rjust(10, "0")
|
||||
return f"{self.isbn_search_url}{normalized_query}"
|
||||
# NOTE: previously, we tried searching isbn and if that produces no results,
|
||||
# searched as free text. This, instead, only searches isbn if it's isbn-y
|
||||
return f"{self.search_url}{query}"
|
||||
return f"{self.search_url}{quote_plus(query)}"
|
||||
|
||||
def process_search_response(self, query, data, min_confidence):
|
||||
"""Format the search results based on the formt of the query"""
|
||||
"""Format the search results based on the format of the query"""
|
||||
if maybe_isbn(query):
|
||||
return list(self.parse_isbn_search_data(data))[:10]
|
||||
return list(self.parse_search_data(data, min_confidence))[:10]
|
||||
|
||||
async def get_results(self, session, url, min_confidence, query):
|
||||
"""try this specific connector"""
|
||||
# pylint: disable=line-too-long
|
||||
headers = {
|
||||
"Accept": (
|
||||
'application/json, application/activity+json, application/ld+json; profile="https://www.w3.org/ns/activitystreams"; charset=utf-8'
|
||||
),
|
||||
"User-Agent": USER_AGENT,
|
||||
}
|
||||
params = {"min_confidence": min_confidence}
|
||||
try:
|
||||
async with session.get(url, headers=headers, params=params) as response:
|
||||
if not response.ok:
|
||||
logger.info("Unable to connect to %s: %s", url, response.reason)
|
||||
return
|
||||
|
||||
try:
|
||||
raw_data = await response.json()
|
||||
except aiohttp.client_exceptions.ContentTypeError as err:
|
||||
logger.exception(err)
|
||||
return
|
||||
|
||||
return {
|
||||
"connector": self,
|
||||
"results": self.process_search_response(
|
||||
query, raw_data, min_confidence
|
||||
),
|
||||
}
|
||||
except asyncio.TimeoutError:
|
||||
logger.info("Connection timed out for url: %s", url)
|
||||
except aiohttp.ClientError as err:
|
||||
logger.info(err)
|
||||
|
||||
@abstractmethod
|
||||
def get_or_create_book(self, remote_id):
|
||||
"""pull up a book record by whatever means possible"""
|
||||
|
@ -220,7 +259,7 @@ def dict_from_mappings(data, mappings):
|
|||
return result
|
||||
|
||||
|
||||
def get_data(url, params=None, timeout=10):
|
||||
def get_data(url, params=None, timeout=settings.QUERY_TIMEOUT):
|
||||
"""wrapper for request.get"""
|
||||
# check if the url is blocked
|
||||
raise_not_valid_url(url)
|
||||
|
@ -242,6 +281,10 @@ def get_data(url, params=None, timeout=10):
|
|||
raise ConnectorException(err)
|
||||
|
||||
if not resp.ok:
|
||||
if resp.status_code == 401:
|
||||
# this is probably an AUTHORIZED_FETCH issue
|
||||
resp.raise_for_status()
|
||||
else:
|
||||
raise ConnectorException()
|
||||
try:
|
||||
data = resp.json()
|
||||
|
@ -314,7 +357,7 @@ def infer_physical_format(format_text):
|
|||
|
||||
|
||||
def unique_physical_format(format_text):
|
||||
"""only store the format if it isn't diretly in the format mappings"""
|
||||
"""only store the format if it isn't directly in the format mappings"""
|
||||
format_text = format_text.lower()
|
||||
if format_text in format_mappings:
|
||||
# try a direct match, so saving this would be redundant
|
||||
|
@ -325,4 +368,11 @@ def unique_physical_format(format_text):
|
|||
def maybe_isbn(query):
|
||||
"""check if a query looks like an isbn"""
|
||||
isbn = re.sub(r"[\W_]", "", query) # removes filler characters
|
||||
return len(isbn) in [10, 13] # ISBN10 or ISBN13
|
||||
# ISBNs must be numeric except an ISBN10 checkdigit can be 'X'
|
||||
if not isbn.upper().rstrip("X").isnumeric():
|
||||
return False
|
||||
return len(isbn) in [
|
||||
9,
|
||||
10,
|
||||
13,
|
||||
] # ISBN10 or ISBN13, or maybe ISBN10 missing a leading zero
|
||||
|
|
|
@ -12,8 +12,8 @@ from django.db.models import signals
|
|||
from requests import HTTPError
|
||||
|
||||
from bookwyrm import book_search, models
|
||||
from bookwyrm.settings import SEARCH_TIMEOUT, USER_AGENT
|
||||
from bookwyrm.tasks import app
|
||||
from bookwyrm.settings import SEARCH_TIMEOUT
|
||||
from bookwyrm.tasks import app, LOW
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -22,40 +22,6 @@ class ConnectorException(HTTPError):
|
|||
"""when the connector can't do what was asked"""
|
||||
|
||||
|
||||
async def get_results(session, url, min_confidence, query, connector):
|
||||
"""try this specific connector"""
|
||||
# pylint: disable=line-too-long
|
||||
headers = {
|
||||
"Accept": (
|
||||
'application/json, application/activity+json, application/ld+json; profile="https://www.w3.org/ns/activitystreams"; charset=utf-8'
|
||||
),
|
||||
"User-Agent": USER_AGENT,
|
||||
}
|
||||
params = {"min_confidence": min_confidence}
|
||||
try:
|
||||
async with session.get(url, headers=headers, params=params) as response:
|
||||
if not response.ok:
|
||||
logger.info("Unable to connect to %s: %s", url, response.reason)
|
||||
return
|
||||
|
||||
try:
|
||||
raw_data = await response.json()
|
||||
except aiohttp.client_exceptions.ContentTypeError as err:
|
||||
logger.exception(err)
|
||||
return
|
||||
|
||||
return {
|
||||
"connector": connector,
|
||||
"results": connector.process_search_response(
|
||||
query, raw_data, min_confidence
|
||||
),
|
||||
}
|
||||
except asyncio.TimeoutError:
|
||||
logger.info("Connection timed out for url: %s", url)
|
||||
except aiohttp.ClientError as err:
|
||||
logger.exception(err)
|
||||
|
||||
|
||||
async def async_connector_search(query, items, min_confidence):
|
||||
"""Try a number of requests simultaneously"""
|
||||
timeout = aiohttp.ClientTimeout(total=SEARCH_TIMEOUT)
|
||||
|
@ -64,7 +30,7 @@ async def async_connector_search(query, items, min_confidence):
|
|||
for url, connector in items:
|
||||
tasks.append(
|
||||
asyncio.ensure_future(
|
||||
get_results(session, url, min_confidence, query, connector)
|
||||
connector.get_results(session, url, min_confidence, query)
|
||||
)
|
||||
)
|
||||
|
||||
|
@ -73,7 +39,7 @@ async def async_connector_search(query, items, min_confidence):
|
|||
|
||||
|
||||
def search(query, min_confidence=0.1, return_first=False):
|
||||
"""find books based on arbitary keywords"""
|
||||
"""find books based on arbitrary keywords"""
|
||||
if not query:
|
||||
return []
|
||||
results = []
|
||||
|
@ -143,7 +109,7 @@ def get_or_create_connector(remote_id):
|
|||
return load_connector(connector_info)
|
||||
|
||||
|
||||
@app.task(queue="low_priority")
|
||||
@app.task(queue=LOW)
|
||||
def load_more_data(connector_id, book_id):
|
||||
"""background the work of getting all 10,000 editions of LoTR"""
|
||||
connector_info = models.Connector.objects.get(id=connector_id)
|
||||
|
@ -152,7 +118,7 @@ def load_more_data(connector_id, book_id):
|
|||
connector.expand_book_data(book)
|
||||
|
||||
|
||||
@app.task(queue="low_priority")
|
||||
@app.task(queue=LOW)
|
||||
def create_edition_task(connector_id, work_id, data):
|
||||
"""separate task for each of the 10,000 editions of LoTR"""
|
||||
connector_info = models.Connector.objects.get(id=connector_id)
|
||||
|
|
|
@ -97,7 +97,7 @@ class Connector(AbstractConnector):
|
|||
)
|
||||
|
||||
def parse_isbn_search_data(self, data):
|
||||
"""got some daaaata"""
|
||||
"""got some data"""
|
||||
results = data.get("entities")
|
||||
if not results:
|
||||
return
|
||||
|
@ -165,8 +165,8 @@ class Connector(AbstractConnector):
|
|||
edition_data = self.get_book_data(edition_data)
|
||||
except ConnectorException:
|
||||
# who, indeed, knows
|
||||
return
|
||||
super().create_edition_from_data(work, edition_data, instance=instance)
|
||||
return None
|
||||
return super().create_edition_from_data(work, edition_data, instance=instance)
|
||||
|
||||
def get_cover_url(self, cover_blob, *_):
|
||||
"""format the relative cover url into an absolute one:
|
||||
|
|
|
@ -3,7 +3,7 @@ from django.core.mail import EmailMultiAlternatives
|
|||
from django.template.loader import get_template
|
||||
|
||||
from bookwyrm import models, settings
|
||||
from bookwyrm.tasks import app
|
||||
from bookwyrm.tasks import app, HIGH
|
||||
from bookwyrm.settings import DOMAIN
|
||||
|
||||
|
||||
|
@ -18,12 +18,18 @@ def email_data():
|
|||
}
|
||||
|
||||
|
||||
def test_email(user):
|
||||
"""Just an admin checking if emails are sending"""
|
||||
data = email_data()
|
||||
send_email(user.email, *format_email("test", data))
|
||||
|
||||
|
||||
def email_confirmation_email(user):
|
||||
"""newly registered users confirm email address"""
|
||||
data = email_data()
|
||||
data["confirmation_code"] = user.confirmation_code
|
||||
data["confirmation_link"] = user.confirmation_link
|
||||
send_email.delay(user.email, *format_email("confirm", data))
|
||||
send_email(user.email, *format_email("confirm", data))
|
||||
|
||||
|
||||
def invite_email(invite_request):
|
||||
|
@ -38,7 +44,7 @@ def password_reset_email(reset_code):
|
|||
data = email_data()
|
||||
data["reset_link"] = reset_code.link
|
||||
data["user"] = reset_code.user.display_name
|
||||
send_email.delay(reset_code.user.email, *format_email("password_reset", data))
|
||||
send_email(reset_code.user.email, *format_email("password_reset", data))
|
||||
|
||||
|
||||
def moderation_report_email(report):
|
||||
|
@ -48,6 +54,7 @@ def moderation_report_email(report):
|
|||
if report.user:
|
||||
data["reportee"] = report.user.localname or report.user.username
|
||||
data["report_link"] = report.remote_id
|
||||
data["link_domain"] = report.links.exists()
|
||||
|
||||
for admin in models.User.objects.filter(
|
||||
groups__name__in=["admin", "moderator"]
|
||||
|
@ -68,7 +75,7 @@ def format_email(email_name, data):
|
|||
return (subject, html_content, text_content)
|
||||
|
||||
|
||||
@app.task(queue="high_priority")
|
||||
@app.task(queue=HIGH)
|
||||
def send_email(recipient, subject, html_content, text_content):
|
||||
"""use a task to send the email"""
|
||||
email = EmailMultiAlternatives(
|
||||
|
|
|
@ -2,19 +2,20 @@
|
|||
import datetime
|
||||
|
||||
from django import forms
|
||||
from django.core.exceptions import PermissionDenied
|
||||
from django.forms import widgets
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django_celery_beat.models import IntervalSchedule
|
||||
|
||||
from bookwyrm import models
|
||||
from .custom_form import CustomForm
|
||||
from .custom_form import CustomForm, StyledForm
|
||||
|
||||
|
||||
# pylint: disable=missing-class-docstring
|
||||
class ExpiryWidget(widgets.Select):
|
||||
def value_from_datadict(self, data, files, name):
|
||||
"""human-readable exiration time buckets"""
|
||||
"""human-readable expiration time buckets"""
|
||||
selected_string = super().value_from_datadict(data, files, name)
|
||||
|
||||
if selected_string == "day":
|
||||
|
@ -54,11 +55,46 @@ class CreateInviteForm(CustomForm):
|
|||
class SiteForm(CustomForm):
|
||||
class Meta:
|
||||
model = models.SiteSettings
|
||||
exclude = ["admin_code", "install_mode"]
|
||||
fields = [
|
||||
"name",
|
||||
"instance_tagline",
|
||||
"instance_description",
|
||||
"instance_short_description",
|
||||
"default_theme",
|
||||
"code_of_conduct",
|
||||
"privacy_policy",
|
||||
"impressum",
|
||||
"show_impressum",
|
||||
"logo",
|
||||
"logo_small",
|
||||
"favicon",
|
||||
"support_link",
|
||||
"support_title",
|
||||
"admin_email",
|
||||
"footer_item",
|
||||
]
|
||||
widgets = {
|
||||
"instance_short_description": forms.TextInput(
|
||||
attrs={"aria-describedby": "desc_instance_short_description"}
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
class RegistrationForm(CustomForm):
|
||||
class Meta:
|
||||
model = models.SiteSettings
|
||||
fields = [
|
||||
"allow_registration",
|
||||
"allow_invite_requests",
|
||||
"registration_closed_text",
|
||||
"invite_request_text",
|
||||
"invite_request_question",
|
||||
"invite_question_text",
|
||||
"require_confirm_email",
|
||||
"default_user_auth_group",
|
||||
]
|
||||
|
||||
widgets = {
|
||||
"require_confirm_email": forms.CheckboxInput(
|
||||
attrs={"aria-describedby": "desc_require_confirm_email"}
|
||||
),
|
||||
|
@ -68,6 +104,23 @@ class SiteForm(CustomForm):
|
|||
}
|
||||
|
||||
|
||||
class RegistrationLimitedForm(CustomForm):
|
||||
class Meta:
|
||||
model = models.SiteSettings
|
||||
fields = [
|
||||
"registration_closed_text",
|
||||
"invite_request_text",
|
||||
"invite_request_question",
|
||||
"invite_question_text",
|
||||
]
|
||||
|
||||
widgets = {
|
||||
"invite_request_text": forms.Textarea(
|
||||
attrs={"aria-describedby": "desc_invite_request_text"}
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
class ThemeForm(CustomForm):
|
||||
class Meta:
|
||||
model = models.Theme
|
||||
|
@ -130,7 +183,7 @@ class AutoModRuleForm(CustomForm):
|
|||
fields = ["string_match", "flag_users", "flag_statuses", "created_by"]
|
||||
|
||||
|
||||
class IntervalScheduleForm(CustomForm):
|
||||
class IntervalScheduleForm(StyledForm):
|
||||
class Meta:
|
||||
model = IntervalSchedule
|
||||
fields = ["every", "period"]
|
||||
|
@ -139,3 +192,10 @@ class IntervalScheduleForm(CustomForm):
|
|||
"every": forms.NumberInput(attrs={"aria-describedby": "desc_every"}),
|
||||
"period": forms.Select(attrs={"aria-describedby": "desc_period"}),
|
||||
}
|
||||
|
||||
# pylint: disable=arguments-differ
|
||||
def save(self, request, *args, **kwargs):
|
||||
"""This is an outside model so the perms check works differently"""
|
||||
if not request.user.has_perm("bookwyrm.moderate_user"):
|
||||
raise PermissionDenied()
|
||||
return super().save(*args, **kwargs)
|
||||
|
|
|
@ -15,12 +15,14 @@ class AuthorForm(CustomForm):
|
|||
"aliases",
|
||||
"bio",
|
||||
"wikipedia_link",
|
||||
"website",
|
||||
"born",
|
||||
"died",
|
||||
"openlibrary_key",
|
||||
"inventaire_id",
|
||||
"librarything_key",
|
||||
"goodreads_key",
|
||||
"isfdb",
|
||||
"isni",
|
||||
]
|
||||
widgets = {
|
||||
|
@ -30,10 +32,11 @@ class AuthorForm(CustomForm):
|
|||
"wikipedia_link": forms.TextInput(
|
||||
attrs={"aria-describedby": "desc_wikipedia_link"}
|
||||
),
|
||||
"website": forms.TextInput(attrs={"aria-describedby": "desc_website"}),
|
||||
"born": forms.SelectDateWidget(attrs={"aria-describedby": "desc_born"}),
|
||||
"died": forms.SelectDateWidget(attrs={"aria-describedby": "desc_died"}),
|
||||
"oepnlibrary_key": forms.TextInput(
|
||||
attrs={"aria-describedby": "desc_oepnlibrary_key"}
|
||||
"openlibrary_key": forms.TextInput(
|
||||
attrs={"aria-describedby": "desc_openlibrary_key"}
|
||||
),
|
||||
"inventaire_id": forms.TextInput(
|
||||
attrs={"aria-describedby": "desc_inventaire_id"}
|
||||
|
|
|
@ -18,19 +18,30 @@ class CoverForm(CustomForm):
|
|||
class EditionForm(CustomForm):
|
||||
class Meta:
|
||||
model = models.Edition
|
||||
exclude = [
|
||||
"remote_id",
|
||||
"origin_id",
|
||||
"created_date",
|
||||
"updated_date",
|
||||
"edition_rank",
|
||||
"authors",
|
||||
"parent_work",
|
||||
"shelves",
|
||||
"connector",
|
||||
"search_vector",
|
||||
"links",
|
||||
"file_links",
|
||||
fields = [
|
||||
"title",
|
||||
"subtitle",
|
||||
"description",
|
||||
"series",
|
||||
"series_number",
|
||||
"languages",
|
||||
"subjects",
|
||||
"publishers",
|
||||
"first_published_date",
|
||||
"published_date",
|
||||
"cover",
|
||||
"physical_format",
|
||||
"physical_format_detail",
|
||||
"pages",
|
||||
"isbn_13",
|
||||
"isbn_10",
|
||||
"openlibrary_key",
|
||||
"inventaire_id",
|
||||
"goodreads_key",
|
||||
"oclc_number",
|
||||
"asin",
|
||||
"aasin",
|
||||
"isfdb",
|
||||
]
|
||||
widgets = {
|
||||
"title": forms.TextInput(attrs={"aria-describedby": "desc_title"}),
|
||||
|
@ -73,10 +84,15 @@ class EditionForm(CustomForm):
|
|||
"inventaire_id": forms.TextInput(
|
||||
attrs={"aria-describedby": "desc_inventaire_id"}
|
||||
),
|
||||
"goodreads_key": forms.TextInput(
|
||||
attrs={"aria-describedby": "desc_goodreads_key"}
|
||||
),
|
||||
"oclc_number": forms.TextInput(
|
||||
attrs={"aria-describedby": "desc_oclc_number"}
|
||||
),
|
||||
"ASIN": forms.TextInput(attrs={"aria-describedby": "desc_ASIN"}),
|
||||
"AASIN": forms.TextInput(attrs={"aria-describedby": "desc_AASIN"}),
|
||||
"isfdb": forms.TextInput(attrs={"aria-describedby": "desc_isfdb"}),
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@ from django.forms import ModelForm
|
|||
from django.forms.widgets import Textarea
|
||||
|
||||
|
||||
class CustomForm(ModelForm):
|
||||
class StyledForm(ModelForm):
|
||||
"""add css classes to the forms"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
|
@ -16,7 +16,7 @@ class CustomForm(ModelForm):
|
|||
css_classes["checkbox"] = "checkbox"
|
||||
css_classes["textarea"] = "textarea"
|
||||
# pylint: disable=super-with-arguments
|
||||
super(CustomForm, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
for visible in self.visible_fields():
|
||||
if hasattr(visible.field.widget, "input_type"):
|
||||
input_type = visible.field.widget.input_type
|
||||
|
@ -24,3 +24,13 @@ class CustomForm(ModelForm):
|
|||
input_type = "textarea"
|
||||
visible.field.widget.attrs["rows"] = 5
|
||||
visible.field.widget.attrs["class"] = css_classes[input_type]
|
||||
|
||||
|
||||
class CustomForm(StyledForm):
|
||||
"""Check permissions on save"""
|
||||
|
||||
# pylint: disable=arguments-differ
|
||||
def save(self, request, *args, **kwargs):
|
||||
"""Save and check perms"""
|
||||
self.instance.raise_not_editable(request.user)
|
||||
return super().save(*args, **kwargs)
|
||||
|
|
|
@ -1,11 +1,13 @@
|
|||
""" using django model forms """
|
||||
from django import forms
|
||||
from django.contrib.auth.password_validation import validate_password
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from bookwyrm import models
|
||||
from bookwyrm.models.fields import ClearableFileInputWithWarning
|
||||
from .custom_form import CustomForm
|
||||
|
||||
|
||||
# pylint: disable=missing-class-docstring
|
||||
class EditUserForm(CustomForm):
|
||||
class Meta:
|
||||
|
@ -66,3 +68,51 @@ class DeleteUserForm(CustomForm):
|
|||
class Meta:
|
||||
model = models.User
|
||||
fields = ["password"]
|
||||
|
||||
|
||||
class ChangePasswordForm(CustomForm):
|
||||
current_password = forms.CharField(widget=forms.PasswordInput)
|
||||
confirm_password = forms.CharField(widget=forms.PasswordInput)
|
||||
|
||||
class Meta:
|
||||
model = models.User
|
||||
fields = ["password"]
|
||||
widgets = {
|
||||
"password": forms.PasswordInput(),
|
||||
}
|
||||
|
||||
def clean(self):
|
||||
"""Make sure passwords match and are valid"""
|
||||
current_password = self.data.get("current_password")
|
||||
if not self.instance.check_password(current_password):
|
||||
self.add_error("current_password", _("Incorrect password"))
|
||||
|
||||
cleaned_data = super().clean()
|
||||
new_password = cleaned_data.get("password")
|
||||
confirm_password = self.data.get("confirm_password")
|
||||
|
||||
if new_password != confirm_password:
|
||||
self.add_error("confirm_password", _("Password does not match"))
|
||||
|
||||
try:
|
||||
validate_password(new_password)
|
||||
except ValidationError as err:
|
||||
self.add_error("password", err)
|
||||
|
||||
|
||||
class ConfirmPasswordForm(CustomForm):
|
||||
password = forms.CharField(widget=forms.PasswordInput)
|
||||
|
||||
class Meta:
|
||||
model = models.User
|
||||
fields = ["password"]
|
||||
widgets = {
|
||||
"password": forms.PasswordInput(),
|
||||
}
|
||||
|
||||
def clean(self):
|
||||
"""Make sure password is correct"""
|
||||
password = self.data.get("password")
|
||||
|
||||
if not self.instance.check_password(password):
|
||||
self.add_error("password", _("Incorrect Password"))
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
""" using django model forms """
|
||||
import datetime
|
||||
from django import forms
|
||||
from django.forms import widgets
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
@ -7,7 +8,6 @@ from bookwyrm import models
|
|||
from bookwyrm.models.user import FeedFilterChoices
|
||||
from .custom_form import CustomForm
|
||||
|
||||
|
||||
# pylint: disable=missing-class-docstring
|
||||
class FeedStatusTypesForm(CustomForm):
|
||||
class Meta:
|
||||
|
@ -58,6 +58,21 @@ class ReadThroughForm(CustomForm):
|
|||
self.add_error(
|
||||
"stopped_date", _("Reading stopped date cannot be before start date.")
|
||||
)
|
||||
current_time = datetime.datetime.now()
|
||||
if (
|
||||
stopped_date is not None
|
||||
and current_time.timestamp() < stopped_date.timestamp()
|
||||
):
|
||||
self.add_error(
|
||||
"stopped_date", _("Reading stopped date cannot be in the future.")
|
||||
)
|
||||
if (
|
||||
finish_date is not None
|
||||
and current_time.timestamp() < finish_date.timestamp()
|
||||
):
|
||||
self.add_error(
|
||||
"finish_date", _("Reading finished date cannot be in the future.")
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = models.ReadThrough
|
||||
|
|
|
@ -1,8 +1,14 @@
|
|||
""" Forms for the landing pages """
|
||||
from django.forms import PasswordInput
|
||||
from django import forms
|
||||
from django.contrib.auth.password_validation import validate_password
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
import pyotp
|
||||
|
||||
from bookwyrm import models
|
||||
from bookwyrm.settings import DOMAIN
|
||||
from bookwyrm.settings import TWO_FACTOR_LOGIN_VALIDITY_WINDOW
|
||||
from .custom_form import CustomForm
|
||||
|
||||
|
||||
|
@ -13,21 +19,40 @@ class LoginForm(CustomForm):
|
|||
fields = ["localname", "password"]
|
||||
help_texts = {f: None for f in fields}
|
||||
widgets = {
|
||||
"password": PasswordInput(),
|
||||
"password": forms.PasswordInput(),
|
||||
}
|
||||
|
||||
def infer_username(self):
|
||||
"""Users may enter their localname, username, or email"""
|
||||
localname = self.data.get("localname")
|
||||
if "@" in localname: # looks like an email address to me
|
||||
try:
|
||||
return models.User.objects.get(email=localname).username
|
||||
except models.User.DoesNotExist: # maybe it's a full username?
|
||||
return localname
|
||||
return f"{localname}@{DOMAIN}"
|
||||
|
||||
def add_invalid_password_error(self):
|
||||
"""We don't want to be too specific about this"""
|
||||
# pylint: disable=attribute-defined-outside-init
|
||||
self.non_field_errors = _("Username or password are incorrect")
|
||||
|
||||
|
||||
class RegisterForm(CustomForm):
|
||||
class Meta:
|
||||
model = models.User
|
||||
fields = ["localname", "email", "password"]
|
||||
help_texts = {f: None for f in fields}
|
||||
widgets = {"password": PasswordInput()}
|
||||
widgets = {"password": forms.PasswordInput()}
|
||||
|
||||
def clean(self):
|
||||
"""Check if the username is taken"""
|
||||
cleaned_data = super().clean()
|
||||
localname = cleaned_data.get("localname").strip()
|
||||
try:
|
||||
validate_password(cleaned_data.get("password"))
|
||||
except ValidationError as err:
|
||||
self.add_error("password", err)
|
||||
if models.User.objects.filter(localname=localname).first():
|
||||
self.add_error("localname", _("User with this username already exists"))
|
||||
|
||||
|
@ -43,3 +68,65 @@ class InviteRequestForm(CustomForm):
|
|||
class Meta:
|
||||
model = models.InviteRequest
|
||||
fields = ["email", "answer"]
|
||||
|
||||
|
||||
class PasswordResetForm(CustomForm):
|
||||
confirm_password = forms.CharField(widget=forms.PasswordInput)
|
||||
|
||||
class Meta:
|
||||
model = models.User
|
||||
fields = ["password"]
|
||||
widgets = {
|
||||
"password": forms.PasswordInput(),
|
||||
}
|
||||
|
||||
def clean(self):
|
||||
"""Make sure the passwords match and are valid"""
|
||||
cleaned_data = super().clean()
|
||||
new_password = cleaned_data.get("password")
|
||||
confirm_password = self.data.get("confirm_password")
|
||||
|
||||
if new_password != confirm_password:
|
||||
self.add_error("confirm_password", _("Password does not match"))
|
||||
|
||||
try:
|
||||
validate_password(new_password)
|
||||
except ValidationError as err:
|
||||
self.add_error("password", err)
|
||||
|
||||
|
||||
class Confirm2FAForm(CustomForm):
|
||||
otp = forms.CharField(
|
||||
max_length=6, min_length=6, widget=forms.TextInput(attrs={"autofocus": True})
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = models.User
|
||||
fields = ["otp_secret", "hotp_count"]
|
||||
|
||||
def clean_otp(self):
|
||||
"""Check otp matches"""
|
||||
otp = self.data.get("otp")
|
||||
totp = pyotp.TOTP(self.instance.otp_secret)
|
||||
|
||||
if not totp.verify(otp, valid_window=TWO_FACTOR_LOGIN_VALIDITY_WINDOW):
|
||||
|
||||
if self.instance.hotp_secret:
|
||||
# maybe it's a backup code?
|
||||
hotp = pyotp.HOTP(self.instance.hotp_secret)
|
||||
hotp_count = (
|
||||
self.instance.hotp_count
|
||||
if self.instance.hotp_count is not None
|
||||
else 0
|
||||
)
|
||||
|
||||
if not hotp.verify(otp, hotp_count):
|
||||
self.add_error("otp", _("Incorrect code"))
|
||||
|
||||
# increment the user hotp_count
|
||||
else:
|
||||
self.instance.hotp_count = hotp_count + 1
|
||||
self.instance.save(broadcast=False, update_fields=["hotp_count"])
|
||||
|
||||
else:
|
||||
self.add_error("otp", _("Incorrect code"))
|
||||
|
|
|
@ -36,9 +36,12 @@ class FileLinkForm(CustomForm):
|
|||
"This domain is blocked. Please contact your administrator if you think this is an error."
|
||||
),
|
||||
)
|
||||
elif models.FileLink.objects.filter(
|
||||
if (
|
||||
not self.instance
|
||||
and models.FileLink.objects.filter(
|
||||
url=url, book=book, filetype=filetype
|
||||
).exists():
|
||||
).exists()
|
||||
):
|
||||
# pylint: disable=line-too-long
|
||||
self.add_error(
|
||||
"url",
|
||||
|
|
|
@ -53,6 +53,7 @@ class QuotationForm(CustomForm):
|
|||
"sensitive",
|
||||
"privacy",
|
||||
"position",
|
||||
"endposition",
|
||||
"position_mode",
|
||||
]
|
||||
|
||||
|
|
|
@ -1,15 +1,8 @@
|
|||
""" handle reading a csv from an external service, defaults are from Goodreads """
|
||||
import csv
|
||||
import logging
|
||||
|
||||
from datetime import timedelta
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from bookwyrm import models
|
||||
from bookwyrm.models import ImportJob, ImportItem
|
||||
from bookwyrm.tasks import app, LOW
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
from bookwyrm.models import ImportJob, ImportItem, SiteSettings
|
||||
|
||||
|
||||
class Importer:
|
||||
|
@ -24,8 +17,8 @@ class Importer:
|
|||
("id", ["id", "book id"]),
|
||||
("title", ["title"]),
|
||||
("authors", ["author", "authors", "primary author"]),
|
||||
("isbn_10", ["isbn10", "isbn"]),
|
||||
("isbn_13", ["isbn13", "isbn", "isbns"]),
|
||||
("isbn_10", ["isbn10", "isbn", "isbn/uid"]),
|
||||
("isbn_13", ["isbn13", "isbn", "isbns", "isbn/uid"]),
|
||||
("shelf", ["shelf", "exclusive shelf", "read status", "bookshelf"]),
|
||||
("review_name", ["review name"]),
|
||||
("review_body", ["my review", "review"]),
|
||||
|
@ -41,10 +34,15 @@ class Importer:
|
|||
"reading": ["currently-reading", "reading", "currently reading"],
|
||||
}
|
||||
|
||||
# pylint: disable=too-many-locals
|
||||
def create_job(self, user, csv_file, include_reviews, privacy):
|
||||
"""check over a csv and creates a database entry for the job"""
|
||||
csv_reader = csv.DictReader(csv_file, delimiter=self.delimiter)
|
||||
rows = enumerate(list(csv_reader))
|
||||
rows = list(csv_reader)
|
||||
if len(rows) < 1:
|
||||
raise ValueError("CSV file is empty")
|
||||
rows = enumerate(rows)
|
||||
|
||||
job = ImportJob.objects.create(
|
||||
user=user,
|
||||
include_reviews=include_reviews,
|
||||
|
@ -53,7 +51,13 @@ class Importer:
|
|||
source=self.service,
|
||||
)
|
||||
|
||||
enforce_limit, allowed_imports = self.get_import_limit(user)
|
||||
if enforce_limit and allowed_imports <= 0:
|
||||
job.complete_job()
|
||||
return job
|
||||
for index, entry in rows:
|
||||
if enforce_limit and index >= allowed_imports:
|
||||
break
|
||||
self.create_item(job, index, entry)
|
||||
return job
|
||||
|
||||
|
@ -103,6 +107,24 @@ class Importer:
|
|||
"""use the dataclass to create the formatted row of data"""
|
||||
return {k: entry.get(v) for k, v in mappings.items()}
|
||||
|
||||
def get_import_limit(self, user): # pylint: disable=no-self-use
|
||||
"""check if import limit is set and return how many imports are left"""
|
||||
site_settings = SiteSettings.objects.get()
|
||||
import_size_limit = site_settings.import_size_limit
|
||||
import_limit_reset = site_settings.import_limit_reset
|
||||
enforce_limit = import_size_limit and import_limit_reset
|
||||
allowed_imports = 0
|
||||
|
||||
if enforce_limit:
|
||||
time_range = timezone.now() - timedelta(days=import_limit_reset)
|
||||
import_jobs = ImportJob.objects.filter(
|
||||
user=user, created_date__gte=time_range
|
||||
)
|
||||
# pylint: disable=consider-using-generator
|
||||
imported_books = sum([job.successful_item_count for job in import_jobs])
|
||||
allowed_imports = import_size_limit - imported_books
|
||||
return enforce_limit, allowed_imports
|
||||
|
||||
def create_retry_job(self, user, original_job, items):
|
||||
"""retry items that didn't import"""
|
||||
job = ImportJob.objects.create(
|
||||
|
@ -114,131 +136,13 @@ class Importer:
|
|||
mappings=original_job.mappings,
|
||||
retry=True,
|
||||
)
|
||||
for item in items:
|
||||
enforce_limit, allowed_imports = self.get_import_limit(user)
|
||||
if enforce_limit and allowed_imports <= 0:
|
||||
job.complete_job()
|
||||
return job
|
||||
for index, item in enumerate(items):
|
||||
if enforce_limit and index >= allowed_imports:
|
||||
break
|
||||
# this will re-normalize the raw data
|
||||
self.create_item(job, item.index, item.data)
|
||||
return job
|
||||
|
||||
def start_import(self, job): # pylint: disable=no-self-use
|
||||
"""initalizes a csv import job"""
|
||||
result = start_import_task.delay(job.id)
|
||||
job.task_id = result.id
|
||||
job.save()
|
||||
|
||||
|
||||
@app.task(queue="low_priority")
|
||||
def start_import_task(job_id):
|
||||
"""trigger the child tasks for each row"""
|
||||
job = ImportJob.objects.get(id=job_id)
|
||||
# these are sub-tasks so that one big task doesn't use up all the memory in celery
|
||||
for item in job.items.values_list("id", flat=True).all():
|
||||
import_item_task.delay(item)
|
||||
|
||||
|
||||
@app.task(queue="low_priority")
|
||||
def import_item_task(item_id):
|
||||
"""resolve a row into a book"""
|
||||
item = models.ImportItem.objects.get(id=item_id)
|
||||
try:
|
||||
item.resolve()
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
item.fail_reason = _("Error loading book")
|
||||
item.save()
|
||||
item.update_job()
|
||||
raise err
|
||||
|
||||
if item.book:
|
||||
# shelves book and handles reviews
|
||||
handle_imported_book(item)
|
||||
else:
|
||||
item.fail_reason = _("Could not find a match for book")
|
||||
|
||||
item.save()
|
||||
item.update_job()
|
||||
|
||||
|
||||
def handle_imported_book(item):
|
||||
"""process a csv and then post about it"""
|
||||
job = item.job
|
||||
user = job.user
|
||||
if isinstance(item.book, models.Work):
|
||||
item.book = item.book.default_edition
|
||||
if not item.book:
|
||||
item.fail_reason = _("Error loading book")
|
||||
item.save()
|
||||
return
|
||||
if not isinstance(item.book, models.Edition):
|
||||
item.book = item.book.edition
|
||||
|
||||
existing_shelf = models.ShelfBook.objects.filter(book=item.book, user=user).exists()
|
||||
|
||||
# shelve the book if it hasn't been shelved already
|
||||
if item.shelf and not existing_shelf:
|
||||
desired_shelf = models.Shelf.objects.get(identifier=item.shelf, user=user)
|
||||
shelved_date = item.date_added or timezone.now()
|
||||
models.ShelfBook(
|
||||
book=item.book, shelf=desired_shelf, user=user, shelved_date=shelved_date
|
||||
).save(priority=LOW)
|
||||
|
||||
for read in item.reads:
|
||||
# check for an existing readthrough with the same dates
|
||||
if models.ReadThrough.objects.filter(
|
||||
user=user,
|
||||
book=item.book,
|
||||
start_date=read.start_date,
|
||||
finish_date=read.finish_date,
|
||||
).exists():
|
||||
continue
|
||||
read.book = item.book
|
||||
read.user = user
|
||||
read.save()
|
||||
|
||||
if job.include_reviews and (item.rating or item.review) and not item.linked_review:
|
||||
# we don't know the publication date of the review,
|
||||
# but "now" is a bad guess
|
||||
published_date_guess = item.date_read or item.date_added
|
||||
if item.review:
|
||||
# pylint: disable=consider-using-f-string
|
||||
review_title = "Review of {!r} on {!r}".format(
|
||||
item.book.title,
|
||||
job.source,
|
||||
)
|
||||
review = models.Review.objects.filter(
|
||||
user=user,
|
||||
book=item.book,
|
||||
name=review_title,
|
||||
rating=item.rating,
|
||||
published_date=published_date_guess,
|
||||
).first()
|
||||
if not review:
|
||||
review = models.Review(
|
||||
user=user,
|
||||
book=item.book,
|
||||
name=review_title,
|
||||
content=item.review,
|
||||
rating=item.rating,
|
||||
published_date=published_date_guess,
|
||||
privacy=job.privacy,
|
||||
)
|
||||
review.save(software="bookwyrm", priority=LOW)
|
||||
else:
|
||||
# just a rating
|
||||
review = models.ReviewRating.objects.filter(
|
||||
user=user,
|
||||
book=item.book,
|
||||
published_date=published_date_guess,
|
||||
rating=item.rating,
|
||||
).first()
|
||||
if not review:
|
||||
review = models.ReviewRating(
|
||||
user=user,
|
||||
book=item.book,
|
||||
rating=item.rating,
|
||||
published_date=published_date_guess,
|
||||
privacy=job.privacy,
|
||||
)
|
||||
review.save(software="bookwyrm", priority=LOW)
|
||||
|
||||
# only broadcast this review to other bookwyrm instances
|
||||
item.linked_review = review
|
||||
item.save()
|
||||
|
|
|
@ -19,7 +19,7 @@ class LibrarythingImporter(Importer):
|
|||
normalized = {k: remove_brackets(entry.get(v)) for k, v in mappings.items()}
|
||||
isbn_13 = normalized.get("isbn_13")
|
||||
isbn_13 = isbn_13.split(", ") if isbn_13 else []
|
||||
normalized["isbn_13"] = isbn_13[1] if len(isbn_13) > 0 else None
|
||||
normalized["isbn_13"] = isbn_13[1] if len(isbn_13) > 1 else None
|
||||
return normalized
|
||||
|
||||
def get_shelf(self, normalized_row):
|
||||
|
|
|
@ -24,8 +24,7 @@ class ListsStream(RedisStore):
|
|||
|
||||
def add_list(self, book_list):
|
||||
"""add a list to users' feeds"""
|
||||
# the pipeline contains all the add-to-stream activities
|
||||
self.add_object_to_related_stores(book_list)
|
||||
self.add_object_to_stores(book_list, self.get_stores_for_object(book_list))
|
||||
|
||||
def add_user_lists(self, viewer, user):
|
||||
"""add a user's lists to another user's feed"""
|
||||
|
@ -86,18 +85,19 @@ class ListsStream(RedisStore):
|
|||
if group:
|
||||
audience = audience.filter(
|
||||
Q(id=book_list.user.id) # if the user is the list's owner
|
||||
| Q(following=book_list.user) # if the user is following the pwmer
|
||||
| Q(following=book_list.user) # if the user is following the owner
|
||||
# if a user is in the group
|
||||
| Q(memberships__group__id=book_list.group.id)
|
||||
)
|
||||
else:
|
||||
audience = audience.filter(
|
||||
Q(id=book_list.user.id) # if the user is the list's owner
|
||||
| Q(following=book_list.user) # if the user is following the pwmer
|
||||
| Q(following=book_list.user) # if the user is following the owner
|
||||
)
|
||||
return audience.distinct()
|
||||
|
||||
def get_stores_for_object(self, obj):
|
||||
"""the stores that an object belongs in"""
|
||||
return [self.stream_id(u) for u in self.get_audience(obj)]
|
||||
|
||||
def get_lists_for_user(self, user): # pylint: disable=no-self-use
|
||||
|
@ -233,7 +233,7 @@ def remove_list_task(list_id, re_add=False):
|
|||
|
||||
# delete for every store
|
||||
stores = [ListsStream().stream_id(idx) for idx in stores]
|
||||
ListsStream().remove_object_from_related_stores(list_id, stores=stores)
|
||||
ListsStream().remove_object_from_stores(list_id, stores)
|
||||
|
||||
if re_add:
|
||||
add_list_task.delay(list_id)
|
||||
|
|
48
bookwyrm/management/commands/compile_themes.py
Normal file
48
bookwyrm/management/commands/compile_themes.py
Normal file
|
@ -0,0 +1,48 @@
|
|||
""" Our own command to all scss themes """
|
||||
import glob
|
||||
import os
|
||||
|
||||
import sass
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from sass_processor.apps import APPS_INCLUDE_DIRS
|
||||
from sass_processor.processor import SassProcessor
|
||||
from sass_processor.utils import get_custom_functions
|
||||
|
||||
from bookwyrm import settings
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""command-line options"""
|
||||
|
||||
help = "SCSS compile all BookWyrm themes"
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def handle(self, *args, **options):
|
||||
"""compile"""
|
||||
themes_dir = os.path.join(
|
||||
settings.BASE_DIR, "bookwyrm", "static", "css", "themes", "*.scss"
|
||||
)
|
||||
for theme_scss in glob.glob(themes_dir):
|
||||
basename, _ = os.path.splitext(theme_scss)
|
||||
theme_css = f"{basename}.css"
|
||||
self.compile_sass(theme_scss, theme_css)
|
||||
|
||||
def compile_sass(self, sass_path, css_path):
|
||||
compile_kwargs = {
|
||||
"filename": sass_path,
|
||||
"include_paths": SassProcessor.include_paths + APPS_INCLUDE_DIRS,
|
||||
"custom_functions": get_custom_functions(),
|
||||
"precision": getattr(settings, "SASS_PRECISION", 8),
|
||||
"output_style": getattr(
|
||||
settings,
|
||||
"SASS_OUTPUT_STYLE",
|
||||
"nested" if settings.DEBUG else "compressed",
|
||||
),
|
||||
}
|
||||
|
||||
content = sass.compile(**compile_kwargs)
|
||||
with open(css_path, "w") as f:
|
||||
f.write(content)
|
||||
self.stdout.write("Compiled SASS/SCSS file: '{0}'\n".format(sass_path))
|
19
bookwyrm/management/commands/confirm_email.py
Normal file
19
bookwyrm/management/commands/confirm_email.py
Normal file
|
@ -0,0 +1,19 @@
|
|||
""" manually confirm e-mail of user """
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from bookwyrm import models
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""command-line options"""
|
||||
|
||||
help = "Manually confirm email for user"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument("username")
|
||||
|
||||
def handle(self, *args, **options):
|
||||
name = options["username"]
|
||||
user = models.User.objects.get(localname=name)
|
||||
user.reactivate()
|
||||
self.stdout.write(self.style.SUCCESS("User's email is now confirmed."))
|
|
@ -3,38 +3,7 @@ merge book data objects """
|
|||
from django.core.management.base import BaseCommand
|
||||
from django.db.models import Count
|
||||
from bookwyrm import models
|
||||
|
||||
|
||||
def update_related(canonical, obj):
|
||||
"""update all the models with fk to the object being removed"""
|
||||
# move related models to canonical
|
||||
related_models = [
|
||||
(r.remote_field.name, r.related_model) for r in canonical._meta.related_objects
|
||||
]
|
||||
for (related_field, related_model) in related_models:
|
||||
related_objs = related_model.objects.filter(**{related_field: obj})
|
||||
for related_obj in related_objs:
|
||||
print("replacing in", related_model.__name__, related_field, related_obj.id)
|
||||
try:
|
||||
setattr(related_obj, related_field, canonical)
|
||||
related_obj.save()
|
||||
except TypeError:
|
||||
getattr(related_obj, related_field).add(canonical)
|
||||
getattr(related_obj, related_field).remove(obj)
|
||||
|
||||
|
||||
def copy_data(canonical, obj):
|
||||
"""try to get the most data possible"""
|
||||
for data_field in obj._meta.get_fields():
|
||||
if not hasattr(data_field, "activitypub_field"):
|
||||
continue
|
||||
data_value = getattr(obj, data_field.name)
|
||||
if not data_value:
|
||||
continue
|
||||
if not getattr(canonical, data_field.name):
|
||||
print("setting data field", data_field.name, data_value)
|
||||
setattr(canonical, data_field.name, data_value)
|
||||
canonical.save()
|
||||
from bookwyrm.management.merge import merge_objects
|
||||
|
||||
|
||||
def dedupe_model(model):
|
||||
|
@ -61,19 +30,16 @@ def dedupe_model(model):
|
|||
print("keeping", canonical.remote_id)
|
||||
for obj in objs[1:]:
|
||||
print(obj.remote_id)
|
||||
copy_data(canonical, obj)
|
||||
update_related(canonical, obj)
|
||||
# remove the outdated entry
|
||||
obj.delete()
|
||||
merge_objects(canonical, obj)
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""dedplucate allllll the book data models"""
|
||||
"""deduplicate allllll the book data models"""
|
||||
|
||||
help = "merges duplicate book data"
|
||||
# pylint: disable=no-self-use,unused-argument
|
||||
def handle(self, *args, **options):
|
||||
"""run deudplications"""
|
||||
"""run deduplications"""
|
||||
dedupe_model(models.Edition)
|
||||
dedupe_model(models.Work)
|
||||
dedupe_model(models.Author)
|
||||
|
|
|
@ -4,12 +4,7 @@ import redis
|
|||
|
||||
from bookwyrm import settings
|
||||
|
||||
r = redis.Redis(
|
||||
host=settings.REDIS_ACTIVITY_HOST,
|
||||
port=settings.REDIS_ACTIVITY_PORT,
|
||||
password=settings.REDIS_ACTIVITY_PASSWORD,
|
||||
db=settings.REDIS_ACTIVITY_DB_INDEX,
|
||||
)
|
||||
r = redis.from_url(settings.REDIS_ACTIVITY_URL)
|
||||
|
||||
|
||||
def erase_streams():
|
||||
|
|
|
@ -8,54 +8,64 @@ from bookwyrm import models
|
|||
|
||||
def init_groups():
|
||||
"""permission levels"""
|
||||
groups = ["admin", "moderator", "editor"]
|
||||
groups = ["admin", "owner", "moderator", "editor"]
|
||||
for group in groups:
|
||||
Group.objects.create(name=group)
|
||||
Group.objects.get_or_create(name=group)
|
||||
|
||||
|
||||
def init_permissions():
|
||||
"""permission types"""
|
||||
permissions = [
|
||||
{
|
||||
"codename": "manage_registration",
|
||||
"name": "allow or prevent user registration",
|
||||
"groups": ["admin"],
|
||||
},
|
||||
{
|
||||
"codename": "system_administration",
|
||||
"name": "technical controls",
|
||||
"groups": ["admin"],
|
||||
},
|
||||
{
|
||||
"codename": "edit_instance_settings",
|
||||
"name": "change the instance info",
|
||||
"groups": ["admin"],
|
||||
"groups": ["admin", "owner"],
|
||||
},
|
||||
{
|
||||
"codename": "set_user_group",
|
||||
"name": "change what group a user is in",
|
||||
"groups": ["admin", "moderator"],
|
||||
"groups": ["admin", "owner", "moderator"],
|
||||
},
|
||||
{
|
||||
"codename": "control_federation",
|
||||
"name": "control who to federate with",
|
||||
"groups": ["admin", "moderator"],
|
||||
"groups": ["admin", "owner", "moderator"],
|
||||
},
|
||||
{
|
||||
"codename": "create_invites",
|
||||
"name": "issue invitations to join",
|
||||
"groups": ["admin", "moderator"],
|
||||
"groups": ["admin", "owner", "moderator"],
|
||||
},
|
||||
{
|
||||
"codename": "moderate_user",
|
||||
"name": "deactivate or silence a user",
|
||||
"groups": ["admin", "moderator"],
|
||||
"groups": ["admin", "owner", "moderator"],
|
||||
},
|
||||
{
|
||||
"codename": "moderate_post",
|
||||
"name": "delete other users' posts",
|
||||
"groups": ["admin", "moderator"],
|
||||
"groups": ["admin", "owner", "moderator"],
|
||||
},
|
||||
{
|
||||
"codename": "edit_book",
|
||||
"name": "edit book info",
|
||||
"groups": ["admin", "moderator", "editor"],
|
||||
"groups": ["admin", "owner", "moderator", "editor"],
|
||||
},
|
||||
]
|
||||
|
||||
content_type = ContentType.objects.get_for_model(models.User)
|
||||
for permission in permissions:
|
||||
permission_obj = Permission.objects.create(
|
||||
permission_obj, _ = Permission.objects.get_or_create(
|
||||
codename=permission["codename"],
|
||||
name=permission["name"],
|
||||
content_type=content_type,
|
||||
|
@ -107,10 +117,12 @@ def init_connectors():
|
|||
|
||||
def init_settings():
|
||||
"""info about the instance"""
|
||||
group_editor = Group.objects.filter(name="editor").first()
|
||||
models.SiteSettings.objects.create(
|
||||
support_link="https://www.patreon.com/bookwyrm",
|
||||
support_title="Patreon",
|
||||
install_mode=True,
|
||||
default_user_auth_group=group_editor,
|
||||
)
|
||||
|
||||
|
||||
|
|
12
bookwyrm/management/commands/merge_authors.py
Normal file
12
bookwyrm/management/commands/merge_authors.py
Normal file
|
@ -0,0 +1,12 @@
|
|||
""" PROCEED WITH CAUTION: uses deduplication fields to permanently
|
||||
merge author data objects """
|
||||
from bookwyrm import models
|
||||
from bookwyrm.management.merge_command import MergeCommand
|
||||
|
||||
|
||||
class Command(MergeCommand):
|
||||
"""merges two authors by ID"""
|
||||
|
||||
help = "merges specified authors into one"
|
||||
|
||||
MODEL = models.Author
|
12
bookwyrm/management/commands/merge_editions.py
Normal file
12
bookwyrm/management/commands/merge_editions.py
Normal file
|
@ -0,0 +1,12 @@
|
|||
""" PROCEED WITH CAUTION: uses deduplication fields to permanently
|
||||
merge edition data objects """
|
||||
from bookwyrm import models
|
||||
from bookwyrm.management.merge_command import MergeCommand
|
||||
|
||||
|
||||
class Command(MergeCommand):
|
||||
"""merges two editions by ID"""
|
||||
|
||||
help = "merges specified editions into one"
|
||||
|
||||
MODEL = models.Edition
|
22
bookwyrm/management/commands/remove_2fa.py
Normal file
22
bookwyrm/management/commands/remove_2fa.py
Normal file
|
@ -0,0 +1,22 @@
|
|||
"""deactivate two factor auth"""
|
||||
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from bookwyrm import models
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""command-line options"""
|
||||
|
||||
help = "Remove Two Factor Authorisation from user"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument("username")
|
||||
|
||||
def handle(self, *args, **options):
|
||||
name = options["username"]
|
||||
user = models.User.objects.get(localname=name)
|
||||
user.two_factor_auth = False
|
||||
user.save(broadcast=False, update_fields=["two_factor_auth"])
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("Two Factor Authorisation was removed from user")
|
||||
)
|
|
@ -33,10 +33,10 @@ def remove_editions():
|
|||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""dedplucate allllll the book data models"""
|
||||
"""deduplicate allllll the book data models"""
|
||||
|
||||
help = "merges duplicate book data"
|
||||
# pylint: disable=no-self-use,unused-argument
|
||||
def handle(self, *args, **options):
|
||||
"""run deudplications"""
|
||||
"""run deduplications"""
|
||||
remove_editions()
|
||||
|
|
|
@ -0,0 +1,40 @@
|
|||
""" Remove preview images for remote users """
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db.models import Q
|
||||
|
||||
from bookwyrm import models, preview_images
|
||||
|
||||
|
||||
# pylint: disable=line-too-long
|
||||
class Command(BaseCommand):
|
||||
"""Remove preview images for remote users"""
|
||||
|
||||
help = "Remove preview images for remote users"
|
||||
|
||||
# pylint: disable=no-self-use,unused-argument
|
||||
def handle(self, *args, **options):
|
||||
"""generate preview images"""
|
||||
self.stdout.write(
|
||||
" | Hello! I will be removing preview images from remote users."
|
||||
)
|
||||
self.stdout.write(
|
||||
"🧑🚒 ⎨ This might take quite long if your instance has a lot of remote users."
|
||||
)
|
||||
self.stdout.write(" | ✧ Thank you for your patience ✧")
|
||||
|
||||
users = models.User.objects.filter(local=False).exclude(
|
||||
Q(preview_image="") | Q(preview_image=None)
|
||||
)
|
||||
|
||||
if len(users) > 0:
|
||||
self.stdout.write(
|
||||
f" → Remote user preview images ({len(users)}): ", ending=""
|
||||
)
|
||||
for user in users:
|
||||
preview_images.remove_user_preview_image_task.delay(user.id)
|
||||
self.stdout.write(".", ending="")
|
||||
self.stdout.write(" OK 🖼")
|
||||
else:
|
||||
self.stdout.write(f" | There was no remote users with preview images.")
|
||||
|
||||
self.stdout.write("🧑🚒 ⎨ I’m all done! ✧ Enjoy ✧")
|
31
bookwyrm/management/commands/revoke_preview_image_tasks.py
Normal file
31
bookwyrm/management/commands/revoke_preview_image_tasks.py
Normal file
|
@ -0,0 +1,31 @@
|
|||
""" Actually let's not generate those preview images """
|
||||
import json
|
||||
from django.core.management.base import BaseCommand
|
||||
from bookwyrm.tasks import app
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Find and revoke image tasks"""
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def handle(self, *args, **options):
|
||||
"""revoke nonessential low priority tasks"""
|
||||
types = [
|
||||
"bookwyrm.preview_images.generate_edition_preview_image_task",
|
||||
"bookwyrm.preview_images.generate_user_preview_image_task",
|
||||
]
|
||||
self.stdout.write(" | Finding tasks of types:")
|
||||
self.stdout.write("\n".join(types))
|
||||
with app.pool.acquire(block=True) as conn:
|
||||
tasks = conn.default_channel.client.lrange("low_priority", 0, -1)
|
||||
self.stdout.write(f" | Found {len(tasks)} task(s) in low priority queue")
|
||||
|
||||
revoke_ids = []
|
||||
for task in tasks:
|
||||
task_json = json.loads(task)
|
||||
task_type = task_json.get("headers", {}).get("task")
|
||||
if task_type in types:
|
||||
revoke_ids.append(task_json.get("headers", {}).get("id"))
|
||||
self.stdout.write(".", ending="")
|
||||
self.stdout.write(f"\n | Revoking {len(revoke_ids)} task(s)")
|
||||
app.control.revoke(revoke_ids)
|
50
bookwyrm/management/merge.py
Normal file
50
bookwyrm/management/merge.py
Normal file
|
@ -0,0 +1,50 @@
|
|||
from django.db.models import ManyToManyField
|
||||
|
||||
|
||||
def update_related(canonical, obj):
|
||||
"""update all the models with fk to the object being removed"""
|
||||
# move related models to canonical
|
||||
related_models = [
|
||||
(r.remote_field.name, r.related_model) for r in canonical._meta.related_objects
|
||||
]
|
||||
for (related_field, related_model) in related_models:
|
||||
# Skip the ManyToMany fields that aren’t auto-created. These
|
||||
# should have a corresponding OneToMany field in the model for
|
||||
# the linking table anyway. If we update it through that model
|
||||
# instead then we won’t lose the extra fields in the linking
|
||||
# table.
|
||||
related_field_obj = related_model._meta.get_field(related_field)
|
||||
if isinstance(related_field_obj, ManyToManyField):
|
||||
through = related_field_obj.remote_field.through
|
||||
if not through._meta.auto_created:
|
||||
continue
|
||||
related_objs = related_model.objects.filter(**{related_field: obj})
|
||||
for related_obj in related_objs:
|
||||
print("replacing in", related_model.__name__, related_field, related_obj.id)
|
||||
try:
|
||||
setattr(related_obj, related_field, canonical)
|
||||
related_obj.save()
|
||||
except TypeError:
|
||||
getattr(related_obj, related_field).add(canonical)
|
||||
getattr(related_obj, related_field).remove(obj)
|
||||
|
||||
|
||||
def copy_data(canonical, obj):
|
||||
"""try to get the most data possible"""
|
||||
for data_field in obj._meta.get_fields():
|
||||
if not hasattr(data_field, "activitypub_field"):
|
||||
continue
|
||||
data_value = getattr(obj, data_field.name)
|
||||
if not data_value:
|
||||
continue
|
||||
if not getattr(canonical, data_field.name):
|
||||
print("setting data field", data_field.name, data_value)
|
||||
setattr(canonical, data_field.name, data_value)
|
||||
canonical.save()
|
||||
|
||||
|
||||
def merge_objects(canonical, obj):
|
||||
copy_data(canonical, obj)
|
||||
update_related(canonical, obj)
|
||||
# remove the outdated entry
|
||||
obj.delete()
|
29
bookwyrm/management/merge_command.py
Normal file
29
bookwyrm/management/merge_command.py
Normal file
|
@ -0,0 +1,29 @@
|
|||
from bookwyrm.management.merge import merge_objects
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
|
||||
class MergeCommand(BaseCommand):
|
||||
"""base class for merge commands"""
|
||||
|
||||
def add_arguments(self, parser):
|
||||
"""add the arguments for this command"""
|
||||
parser.add_argument("--canonical", type=int, required=True)
|
||||
parser.add_argument("--other", type=int, required=True)
|
||||
|
||||
# pylint: disable=no-self-use,unused-argument
|
||||
def handle(self, *args, **options):
|
||||
"""merge the two objects"""
|
||||
model = self.MODEL
|
||||
|
||||
try:
|
||||
canonical = model.objects.get(id=options["canonical"])
|
||||
except model.DoesNotExist:
|
||||
print("canonical book doesn’t exist!")
|
||||
return
|
||||
try:
|
||||
other = model.objects.get(id=options["other"])
|
||||
except model.DoesNotExist:
|
||||
print("other book doesn’t exist!")
|
||||
return
|
||||
|
||||
merge_objects(canonical, other)
|
|
@ -1467,7 +1467,7 @@ class Migration(migrations.Migration):
|
|||
(
|
||||
"expiry",
|
||||
models.DateTimeField(
|
||||
default=bookwyrm.models.site.get_passowrd_reset_expiry
|
||||
default=bookwyrm.models.site.get_password_reset_expiry
|
||||
),
|
||||
),
|
||||
(
|
||||
|
|
|
@ -6,7 +6,7 @@ from bookwyrm.connectors.abstract_connector import infer_physical_format
|
|||
|
||||
|
||||
def infer_format(app_registry, schema_editor):
|
||||
"""set the new phsyical format field based on existing format data"""
|
||||
"""set the new physical format field based on existing format data"""
|
||||
db_alias = schema_editor.connection.alias
|
||||
|
||||
editions = (
|
||||
|
|
|
@ -5,7 +5,7 @@ from bookwyrm.settings import DOMAIN
|
|||
|
||||
|
||||
def remove_self_connector(app_registry, schema_editor):
|
||||
"""set the new phsyical format field based on existing format data"""
|
||||
"""set the new physical format field based on existing format data"""
|
||||
db_alias = schema_editor.connection.alias
|
||||
app_registry.get_model("bookwyrm", "Connector").objects.using(db_alias).filter(
|
||||
connector_file="self_connector"
|
||||
|
|
|
@ -14,6 +14,8 @@ class Migration(migrations.Migration):
|
|||
migrations.AlterField(
|
||||
model_name="annualgoal",
|
||||
name="year",
|
||||
field=models.IntegerField(default=bookwyrm.models.user.get_current_year),
|
||||
field=models.IntegerField(
|
||||
default=bookwyrm.models.annual_goal.get_current_year
|
||||
),
|
||||
),
|
||||
]
|
||||
|
|
40
bookwyrm/migrations/0154_alter_user_preferred_language.py
Normal file
40
bookwyrm/migrations/0154_alter_user_preferred_language.py
Normal file
|
@ -0,0 +1,40 @@
|
|||
# Generated by Django 3.2.14 on 2022-07-15 19:24
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0153_merge_20220706_2141"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="user",
|
||||
name="preferred_language",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
choices=[
|
||||
("en-us", "English"),
|
||||
("ca-es", "Català (Catalan)"),
|
||||
("de-de", "Deutsch (German)"),
|
||||
("es-es", "Español (Spanish)"),
|
||||
("gl-es", "Galego (Galician)"),
|
||||
("it-it", "Italiano (Italian)"),
|
||||
("fi-fi", "Suomi (Finnish)"),
|
||||
("fr-fr", "Français (French)"),
|
||||
("lt-lt", "Lietuvių (Lithuanian)"),
|
||||
("no-no", "Norsk (Norwegian)"),
|
||||
("pt-br", "Português do Brasil (Brazilian Portuguese)"),
|
||||
("pt-pt", "Português Europeu (European Portuguese)"),
|
||||
("ro-ro", "Română (Romanian)"),
|
||||
("sv-se", "Svenska (Swedish)"),
|
||||
("zh-hans", "简体中文 (Simplified Chinese)"),
|
||||
("zh-hant", "繁體中文 (Traditional Chinese)"),
|
||||
],
|
||||
max_length=255,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
]
|
25
bookwyrm/migrations/0155_user_show_guided_tour.py
Normal file
25
bookwyrm/migrations/0155_user_show_guided_tour.py
Normal file
|
@ -0,0 +1,25 @@
|
|||
# Generated by Django 3.2.14 on 2022-07-09 23:33
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
def existing_users_default(apps, schema_editor):
|
||||
db_alias = schema_editor.connection.alias
|
||||
user_model = apps.get_model("bookwyrm", "User")
|
||||
user_model.objects.using(db_alias).filter(local=True).update(show_guided_tour=False)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0154_alter_user_preferred_language"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="show_guided_tour",
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
migrations.RunPython(existing_users_default, migrations.RunPython.noop),
|
||||
]
|
41
bookwyrm/migrations/0156_alter_user_preferred_language.py
Normal file
41
bookwyrm/migrations/0156_alter_user_preferred_language.py
Normal file
|
@ -0,0 +1,41 @@
|
|||
# Generated by Django 3.2.14 on 2022-08-02 18:05
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0155_user_show_guided_tour"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="user",
|
||||
name="preferred_language",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
choices=[
|
||||
("en-us", "English"),
|
||||
("ca-es", "Català (Catalan)"),
|
||||
("de-de", "Deutsch (German)"),
|
||||
("es-es", "Español (Spanish)"),
|
||||
("gl-es", "Galego (Galician)"),
|
||||
("it-it", "Italiano (Italian)"),
|
||||
("fi-fi", "Suomi (Finnish)"),
|
||||
("fr-fr", "Français (French)"),
|
||||
("lt-lt", "Lietuvių (Lithuanian)"),
|
||||
("no-no", "Norsk (Norwegian)"),
|
||||
("pl-pl", "Polski (Polish)"),
|
||||
("pt-br", "Português do Brasil (Brazilian Portuguese)"),
|
||||
("pt-pt", "Português Europeu (European Portuguese)"),
|
||||
("ro-ro", "Română (Romanian)"),
|
||||
("sv-se", "Svenska (Swedish)"),
|
||||
("zh-hans", "简体中文 (Simplified Chinese)"),
|
||||
("zh-hant", "繁體中文 (Traditional Chinese)"),
|
||||
],
|
||||
max_length=255,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
]
|
647
bookwyrm/migrations/0157_auto_20220909_2338.py
Normal file
647
bookwyrm/migrations/0157_auto_20220909_2338.py
Normal file
|
@ -0,0 +1,647 @@
|
|||
# Generated by Django 3.2.15 on 2022-09-09 23:38
|
||||
|
||||
import bookwyrm.models.fields
|
||||
import django.core.validators
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0156_alter_user_preferred_language"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="review",
|
||||
name="rating",
|
||||
field=bookwyrm.models.fields.DecimalField(
|
||||
blank=True,
|
||||
decimal_places=2,
|
||||
default=None,
|
||||
max_digits=3,
|
||||
null=True,
|
||||
validators=[
|
||||
django.core.validators.MinValueValidator(0.5),
|
||||
django.core.validators.MaxValueValidator(5),
|
||||
],
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="user",
|
||||
name="preferred_timezone",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("Africa/Abidjan", "Africa/Abidjan"),
|
||||
("Africa/Accra", "Africa/Accra"),
|
||||
("Africa/Addis_Ababa", "Africa/Addis_Ababa"),
|
||||
("Africa/Algiers", "Africa/Algiers"),
|
||||
("Africa/Asmara", "Africa/Asmara"),
|
||||
("Africa/Asmera", "Africa/Asmera"),
|
||||
("Africa/Bamako", "Africa/Bamako"),
|
||||
("Africa/Bangui", "Africa/Bangui"),
|
||||
("Africa/Banjul", "Africa/Banjul"),
|
||||
("Africa/Bissau", "Africa/Bissau"),
|
||||
("Africa/Blantyre", "Africa/Blantyre"),
|
||||
("Africa/Brazzaville", "Africa/Brazzaville"),
|
||||
("Africa/Bujumbura", "Africa/Bujumbura"),
|
||||
("Africa/Cairo", "Africa/Cairo"),
|
||||
("Africa/Casablanca", "Africa/Casablanca"),
|
||||
("Africa/Ceuta", "Africa/Ceuta"),
|
||||
("Africa/Conakry", "Africa/Conakry"),
|
||||
("Africa/Dakar", "Africa/Dakar"),
|
||||
("Africa/Dar_es_Salaam", "Africa/Dar_es_Salaam"),
|
||||
("Africa/Djibouti", "Africa/Djibouti"),
|
||||
("Africa/Douala", "Africa/Douala"),
|
||||
("Africa/El_Aaiun", "Africa/El_Aaiun"),
|
||||
("Africa/Freetown", "Africa/Freetown"),
|
||||
("Africa/Gaborone", "Africa/Gaborone"),
|
||||
("Africa/Harare", "Africa/Harare"),
|
||||
("Africa/Johannesburg", "Africa/Johannesburg"),
|
||||
("Africa/Juba", "Africa/Juba"),
|
||||
("Africa/Kampala", "Africa/Kampala"),
|
||||
("Africa/Khartoum", "Africa/Khartoum"),
|
||||
("Africa/Kigali", "Africa/Kigali"),
|
||||
("Africa/Kinshasa", "Africa/Kinshasa"),
|
||||
("Africa/Lagos", "Africa/Lagos"),
|
||||
("Africa/Libreville", "Africa/Libreville"),
|
||||
("Africa/Lome", "Africa/Lome"),
|
||||
("Africa/Luanda", "Africa/Luanda"),
|
||||
("Africa/Lubumbashi", "Africa/Lubumbashi"),
|
||||
("Africa/Lusaka", "Africa/Lusaka"),
|
||||
("Africa/Malabo", "Africa/Malabo"),
|
||||
("Africa/Maputo", "Africa/Maputo"),
|
||||
("Africa/Maseru", "Africa/Maseru"),
|
||||
("Africa/Mbabane", "Africa/Mbabane"),
|
||||
("Africa/Mogadishu", "Africa/Mogadishu"),
|
||||
("Africa/Monrovia", "Africa/Monrovia"),
|
||||
("Africa/Nairobi", "Africa/Nairobi"),
|
||||
("Africa/Ndjamena", "Africa/Ndjamena"),
|
||||
("Africa/Niamey", "Africa/Niamey"),
|
||||
("Africa/Nouakchott", "Africa/Nouakchott"),
|
||||
("Africa/Ouagadougou", "Africa/Ouagadougou"),
|
||||
("Africa/Porto-Novo", "Africa/Porto-Novo"),
|
||||
("Africa/Sao_Tome", "Africa/Sao_Tome"),
|
||||
("Africa/Timbuktu", "Africa/Timbuktu"),
|
||||
("Africa/Tripoli", "Africa/Tripoli"),
|
||||
("Africa/Tunis", "Africa/Tunis"),
|
||||
("Africa/Windhoek", "Africa/Windhoek"),
|
||||
("America/Adak", "America/Adak"),
|
||||
("America/Anchorage", "America/Anchorage"),
|
||||
("America/Anguilla", "America/Anguilla"),
|
||||
("America/Antigua", "America/Antigua"),
|
||||
("America/Araguaina", "America/Araguaina"),
|
||||
(
|
||||
"America/Argentina/Buenos_Aires",
|
||||
"America/Argentina/Buenos_Aires",
|
||||
),
|
||||
("America/Argentina/Catamarca", "America/Argentina/Catamarca"),
|
||||
(
|
||||
"America/Argentina/ComodRivadavia",
|
||||
"America/Argentina/ComodRivadavia",
|
||||
),
|
||||
("America/Argentina/Cordoba", "America/Argentina/Cordoba"),
|
||||
("America/Argentina/Jujuy", "America/Argentina/Jujuy"),
|
||||
("America/Argentina/La_Rioja", "America/Argentina/La_Rioja"),
|
||||
("America/Argentina/Mendoza", "America/Argentina/Mendoza"),
|
||||
(
|
||||
"America/Argentina/Rio_Gallegos",
|
||||
"America/Argentina/Rio_Gallegos",
|
||||
),
|
||||
("America/Argentina/Salta", "America/Argentina/Salta"),
|
||||
("America/Argentina/San_Juan", "America/Argentina/San_Juan"),
|
||||
("America/Argentina/San_Luis", "America/Argentina/San_Luis"),
|
||||
("America/Argentina/Tucuman", "America/Argentina/Tucuman"),
|
||||
("America/Argentina/Ushuaia", "America/Argentina/Ushuaia"),
|
||||
("America/Aruba", "America/Aruba"),
|
||||
("America/Asuncion", "America/Asuncion"),
|
||||
("America/Atikokan", "America/Atikokan"),
|
||||
("America/Atka", "America/Atka"),
|
||||
("America/Bahia", "America/Bahia"),
|
||||
("America/Bahia_Banderas", "America/Bahia_Banderas"),
|
||||
("America/Barbados", "America/Barbados"),
|
||||
("America/Belem", "America/Belem"),
|
||||
("America/Belize", "America/Belize"),
|
||||
("America/Blanc-Sablon", "America/Blanc-Sablon"),
|
||||
("America/Boa_Vista", "America/Boa_Vista"),
|
||||
("America/Bogota", "America/Bogota"),
|
||||
("America/Boise", "America/Boise"),
|
||||
("America/Buenos_Aires", "America/Buenos_Aires"),
|
||||
("America/Cambridge_Bay", "America/Cambridge_Bay"),
|
||||
("America/Campo_Grande", "America/Campo_Grande"),
|
||||
("America/Cancun", "America/Cancun"),
|
||||
("America/Caracas", "America/Caracas"),
|
||||
("America/Catamarca", "America/Catamarca"),
|
||||
("America/Cayenne", "America/Cayenne"),
|
||||
("America/Cayman", "America/Cayman"),
|
||||
("America/Chicago", "America/Chicago"),
|
||||
("America/Chihuahua", "America/Chihuahua"),
|
||||
("America/Coral_Harbour", "America/Coral_Harbour"),
|
||||
("America/Cordoba", "America/Cordoba"),
|
||||
("America/Costa_Rica", "America/Costa_Rica"),
|
||||
("America/Creston", "America/Creston"),
|
||||
("America/Cuiaba", "America/Cuiaba"),
|
||||
("America/Curacao", "America/Curacao"),
|
||||
("America/Danmarkshavn", "America/Danmarkshavn"),
|
||||
("America/Dawson", "America/Dawson"),
|
||||
("America/Dawson_Creek", "America/Dawson_Creek"),
|
||||
("America/Denver", "America/Denver"),
|
||||
("America/Detroit", "America/Detroit"),
|
||||
("America/Dominica", "America/Dominica"),
|
||||
("America/Edmonton", "America/Edmonton"),
|
||||
("America/Eirunepe", "America/Eirunepe"),
|
||||
("America/El_Salvador", "America/El_Salvador"),
|
||||
("America/Ensenada", "America/Ensenada"),
|
||||
("America/Fort_Nelson", "America/Fort_Nelson"),
|
||||
("America/Fort_Wayne", "America/Fort_Wayne"),
|
||||
("America/Fortaleza", "America/Fortaleza"),
|
||||
("America/Glace_Bay", "America/Glace_Bay"),
|
||||
("America/Godthab", "America/Godthab"),
|
||||
("America/Goose_Bay", "America/Goose_Bay"),
|
||||
("America/Grand_Turk", "America/Grand_Turk"),
|
||||
("America/Grenada", "America/Grenada"),
|
||||
("America/Guadeloupe", "America/Guadeloupe"),
|
||||
("America/Guatemala", "America/Guatemala"),
|
||||
("America/Guayaquil", "America/Guayaquil"),
|
||||
("America/Guyana", "America/Guyana"),
|
||||
("America/Halifax", "America/Halifax"),
|
||||
("America/Havana", "America/Havana"),
|
||||
("America/Hermosillo", "America/Hermosillo"),
|
||||
("America/Indiana/Indianapolis", "America/Indiana/Indianapolis"),
|
||||
("America/Indiana/Knox", "America/Indiana/Knox"),
|
||||
("America/Indiana/Marengo", "America/Indiana/Marengo"),
|
||||
("America/Indiana/Petersburg", "America/Indiana/Petersburg"),
|
||||
("America/Indiana/Tell_City", "America/Indiana/Tell_City"),
|
||||
("America/Indiana/Vevay", "America/Indiana/Vevay"),
|
||||
("America/Indiana/Vincennes", "America/Indiana/Vincennes"),
|
||||
("America/Indiana/Winamac", "America/Indiana/Winamac"),
|
||||
("America/Indianapolis", "America/Indianapolis"),
|
||||
("America/Inuvik", "America/Inuvik"),
|
||||
("America/Iqaluit", "America/Iqaluit"),
|
||||
("America/Jamaica", "America/Jamaica"),
|
||||
("America/Jujuy", "America/Jujuy"),
|
||||
("America/Juneau", "America/Juneau"),
|
||||
("America/Kentucky/Louisville", "America/Kentucky/Louisville"),
|
||||
("America/Kentucky/Monticello", "America/Kentucky/Monticello"),
|
||||
("America/Knox_IN", "America/Knox_IN"),
|
||||
("America/Kralendijk", "America/Kralendijk"),
|
||||
("America/La_Paz", "America/La_Paz"),
|
||||
("America/Lima", "America/Lima"),
|
||||
("America/Los_Angeles", "America/Los_Angeles"),
|
||||
("America/Louisville", "America/Louisville"),
|
||||
("America/Lower_Princes", "America/Lower_Princes"),
|
||||
("America/Maceio", "America/Maceio"),
|
||||
("America/Managua", "America/Managua"),
|
||||
("America/Manaus", "America/Manaus"),
|
||||
("America/Marigot", "America/Marigot"),
|
||||
("America/Martinique", "America/Martinique"),
|
||||
("America/Matamoros", "America/Matamoros"),
|
||||
("America/Mazatlan", "America/Mazatlan"),
|
||||
("America/Mendoza", "America/Mendoza"),
|
||||
("America/Menominee", "America/Menominee"),
|
||||
("America/Merida", "America/Merida"),
|
||||
("America/Metlakatla", "America/Metlakatla"),
|
||||
("America/Mexico_City", "America/Mexico_City"),
|
||||
("America/Miquelon", "America/Miquelon"),
|
||||
("America/Moncton", "America/Moncton"),
|
||||
("America/Monterrey", "America/Monterrey"),
|
||||
("America/Montevideo", "America/Montevideo"),
|
||||
("America/Montreal", "America/Montreal"),
|
||||
("America/Montserrat", "America/Montserrat"),
|
||||
("America/Nassau", "America/Nassau"),
|
||||
("America/New_York", "America/New_York"),
|
||||
("America/Nipigon", "America/Nipigon"),
|
||||
("America/Nome", "America/Nome"),
|
||||
("America/Noronha", "America/Noronha"),
|
||||
("America/North_Dakota/Beulah", "America/North_Dakota/Beulah"),
|
||||
("America/North_Dakota/Center", "America/North_Dakota/Center"),
|
||||
(
|
||||
"America/North_Dakota/New_Salem",
|
||||
"America/North_Dakota/New_Salem",
|
||||
),
|
||||
("America/Nuuk", "America/Nuuk"),
|
||||
("America/Ojinaga", "America/Ojinaga"),
|
||||
("America/Panama", "America/Panama"),
|
||||
("America/Pangnirtung", "America/Pangnirtung"),
|
||||
("America/Paramaribo", "America/Paramaribo"),
|
||||
("America/Phoenix", "America/Phoenix"),
|
||||
("America/Port-au-Prince", "America/Port-au-Prince"),
|
||||
("America/Port_of_Spain", "America/Port_of_Spain"),
|
||||
("America/Porto_Acre", "America/Porto_Acre"),
|
||||
("America/Porto_Velho", "America/Porto_Velho"),
|
||||
("America/Puerto_Rico", "America/Puerto_Rico"),
|
||||
("America/Punta_Arenas", "America/Punta_Arenas"),
|
||||
("America/Rainy_River", "America/Rainy_River"),
|
||||
("America/Rankin_Inlet", "America/Rankin_Inlet"),
|
||||
("America/Recife", "America/Recife"),
|
||||
("America/Regina", "America/Regina"),
|
||||
("America/Resolute", "America/Resolute"),
|
||||
("America/Rio_Branco", "America/Rio_Branco"),
|
||||
("America/Rosario", "America/Rosario"),
|
||||
("America/Santa_Isabel", "America/Santa_Isabel"),
|
||||
("America/Santarem", "America/Santarem"),
|
||||
("America/Santiago", "America/Santiago"),
|
||||
("America/Santo_Domingo", "America/Santo_Domingo"),
|
||||
("America/Sao_Paulo", "America/Sao_Paulo"),
|
||||
("America/Scoresbysund", "America/Scoresbysund"),
|
||||
("America/Shiprock", "America/Shiprock"),
|
||||
("America/Sitka", "America/Sitka"),
|
||||
("America/St_Barthelemy", "America/St_Barthelemy"),
|
||||
("America/St_Johns", "America/St_Johns"),
|
||||
("America/St_Kitts", "America/St_Kitts"),
|
||||
("America/St_Lucia", "America/St_Lucia"),
|
||||
("America/St_Thomas", "America/St_Thomas"),
|
||||
("America/St_Vincent", "America/St_Vincent"),
|
||||
("America/Swift_Current", "America/Swift_Current"),
|
||||
("America/Tegucigalpa", "America/Tegucigalpa"),
|
||||
("America/Thule", "America/Thule"),
|
||||
("America/Thunder_Bay", "America/Thunder_Bay"),
|
||||
("America/Tijuana", "America/Tijuana"),
|
||||
("America/Toronto", "America/Toronto"),
|
||||
("America/Tortola", "America/Tortola"),
|
||||
("America/Vancouver", "America/Vancouver"),
|
||||
("America/Virgin", "America/Virgin"),
|
||||
("America/Whitehorse", "America/Whitehorse"),
|
||||
("America/Winnipeg", "America/Winnipeg"),
|
||||
("America/Yakutat", "America/Yakutat"),
|
||||
("America/Yellowknife", "America/Yellowknife"),
|
||||
("Antarctica/Casey", "Antarctica/Casey"),
|
||||
("Antarctica/Davis", "Antarctica/Davis"),
|
||||
("Antarctica/DumontDUrville", "Antarctica/DumontDUrville"),
|
||||
("Antarctica/Macquarie", "Antarctica/Macquarie"),
|
||||
("Antarctica/Mawson", "Antarctica/Mawson"),
|
||||
("Antarctica/McMurdo", "Antarctica/McMurdo"),
|
||||
("Antarctica/Palmer", "Antarctica/Palmer"),
|
||||
("Antarctica/Rothera", "Antarctica/Rothera"),
|
||||
("Antarctica/South_Pole", "Antarctica/South_Pole"),
|
||||
("Antarctica/Syowa", "Antarctica/Syowa"),
|
||||
("Antarctica/Troll", "Antarctica/Troll"),
|
||||
("Antarctica/Vostok", "Antarctica/Vostok"),
|
||||
("Arctic/Longyearbyen", "Arctic/Longyearbyen"),
|
||||
("Asia/Aden", "Asia/Aden"),
|
||||
("Asia/Almaty", "Asia/Almaty"),
|
||||
("Asia/Amman", "Asia/Amman"),
|
||||
("Asia/Anadyr", "Asia/Anadyr"),
|
||||
("Asia/Aqtau", "Asia/Aqtau"),
|
||||
("Asia/Aqtobe", "Asia/Aqtobe"),
|
||||
("Asia/Ashgabat", "Asia/Ashgabat"),
|
||||
("Asia/Ashkhabad", "Asia/Ashkhabad"),
|
||||
("Asia/Atyrau", "Asia/Atyrau"),
|
||||
("Asia/Baghdad", "Asia/Baghdad"),
|
||||
("Asia/Bahrain", "Asia/Bahrain"),
|
||||
("Asia/Baku", "Asia/Baku"),
|
||||
("Asia/Bangkok", "Asia/Bangkok"),
|
||||
("Asia/Barnaul", "Asia/Barnaul"),
|
||||
("Asia/Beirut", "Asia/Beirut"),
|
||||
("Asia/Bishkek", "Asia/Bishkek"),
|
||||
("Asia/Brunei", "Asia/Brunei"),
|
||||
("Asia/Calcutta", "Asia/Calcutta"),
|
||||
("Asia/Chita", "Asia/Chita"),
|
||||
("Asia/Choibalsan", "Asia/Choibalsan"),
|
||||
("Asia/Chongqing", "Asia/Chongqing"),
|
||||
("Asia/Chungking", "Asia/Chungking"),
|
||||
("Asia/Colombo", "Asia/Colombo"),
|
||||
("Asia/Dacca", "Asia/Dacca"),
|
||||
("Asia/Damascus", "Asia/Damascus"),
|
||||
("Asia/Dhaka", "Asia/Dhaka"),
|
||||
("Asia/Dili", "Asia/Dili"),
|
||||
("Asia/Dubai", "Asia/Dubai"),
|
||||
("Asia/Dushanbe", "Asia/Dushanbe"),
|
||||
("Asia/Famagusta", "Asia/Famagusta"),
|
||||
("Asia/Gaza", "Asia/Gaza"),
|
||||
("Asia/Harbin", "Asia/Harbin"),
|
||||
("Asia/Hebron", "Asia/Hebron"),
|
||||
("Asia/Ho_Chi_Minh", "Asia/Ho_Chi_Minh"),
|
||||
("Asia/Hong_Kong", "Asia/Hong_Kong"),
|
||||
("Asia/Hovd", "Asia/Hovd"),
|
||||
("Asia/Irkutsk", "Asia/Irkutsk"),
|
||||
("Asia/Istanbul", "Asia/Istanbul"),
|
||||
("Asia/Jakarta", "Asia/Jakarta"),
|
||||
("Asia/Jayapura", "Asia/Jayapura"),
|
||||
("Asia/Jerusalem", "Asia/Jerusalem"),
|
||||
("Asia/Kabul", "Asia/Kabul"),
|
||||
("Asia/Kamchatka", "Asia/Kamchatka"),
|
||||
("Asia/Karachi", "Asia/Karachi"),
|
||||
("Asia/Kashgar", "Asia/Kashgar"),
|
||||
("Asia/Kathmandu", "Asia/Kathmandu"),
|
||||
("Asia/Katmandu", "Asia/Katmandu"),
|
||||
("Asia/Khandyga", "Asia/Khandyga"),
|
||||
("Asia/Kolkata", "Asia/Kolkata"),
|
||||
("Asia/Krasnoyarsk", "Asia/Krasnoyarsk"),
|
||||
("Asia/Kuala_Lumpur", "Asia/Kuala_Lumpur"),
|
||||
("Asia/Kuching", "Asia/Kuching"),
|
||||
("Asia/Kuwait", "Asia/Kuwait"),
|
||||
("Asia/Macao", "Asia/Macao"),
|
||||
("Asia/Macau", "Asia/Macau"),
|
||||
("Asia/Magadan", "Asia/Magadan"),
|
||||
("Asia/Makassar", "Asia/Makassar"),
|
||||
("Asia/Manila", "Asia/Manila"),
|
||||
("Asia/Muscat", "Asia/Muscat"),
|
||||
("Asia/Nicosia", "Asia/Nicosia"),
|
||||
("Asia/Novokuznetsk", "Asia/Novokuznetsk"),
|
||||
("Asia/Novosibirsk", "Asia/Novosibirsk"),
|
||||
("Asia/Omsk", "Asia/Omsk"),
|
||||
("Asia/Oral", "Asia/Oral"),
|
||||
("Asia/Phnom_Penh", "Asia/Phnom_Penh"),
|
||||
("Asia/Pontianak", "Asia/Pontianak"),
|
||||
("Asia/Pyongyang", "Asia/Pyongyang"),
|
||||
("Asia/Qatar", "Asia/Qatar"),
|
||||
("Asia/Qostanay", "Asia/Qostanay"),
|
||||
("Asia/Qyzylorda", "Asia/Qyzylorda"),
|
||||
("Asia/Rangoon", "Asia/Rangoon"),
|
||||
("Asia/Riyadh", "Asia/Riyadh"),
|
||||
("Asia/Saigon", "Asia/Saigon"),
|
||||
("Asia/Sakhalin", "Asia/Sakhalin"),
|
||||
("Asia/Samarkand", "Asia/Samarkand"),
|
||||
("Asia/Seoul", "Asia/Seoul"),
|
||||
("Asia/Shanghai", "Asia/Shanghai"),
|
||||
("Asia/Singapore", "Asia/Singapore"),
|
||||
("Asia/Srednekolymsk", "Asia/Srednekolymsk"),
|
||||
("Asia/Taipei", "Asia/Taipei"),
|
||||
("Asia/Tashkent", "Asia/Tashkent"),
|
||||
("Asia/Tbilisi", "Asia/Tbilisi"),
|
||||
("Asia/Tehran", "Asia/Tehran"),
|
||||
("Asia/Tel_Aviv", "Asia/Tel_Aviv"),
|
||||
("Asia/Thimbu", "Asia/Thimbu"),
|
||||
("Asia/Thimphu", "Asia/Thimphu"),
|
||||
("Asia/Tokyo", "Asia/Tokyo"),
|
||||
("Asia/Tomsk", "Asia/Tomsk"),
|
||||
("Asia/Ujung_Pandang", "Asia/Ujung_Pandang"),
|
||||
("Asia/Ulaanbaatar", "Asia/Ulaanbaatar"),
|
||||
("Asia/Ulan_Bator", "Asia/Ulan_Bator"),
|
||||
("Asia/Urumqi", "Asia/Urumqi"),
|
||||
("Asia/Ust-Nera", "Asia/Ust-Nera"),
|
||||
("Asia/Vientiane", "Asia/Vientiane"),
|
||||
("Asia/Vladivostok", "Asia/Vladivostok"),
|
||||
("Asia/Yakutsk", "Asia/Yakutsk"),
|
||||
("Asia/Yangon", "Asia/Yangon"),
|
||||
("Asia/Yekaterinburg", "Asia/Yekaterinburg"),
|
||||
("Asia/Yerevan", "Asia/Yerevan"),
|
||||
("Atlantic/Azores", "Atlantic/Azores"),
|
||||
("Atlantic/Bermuda", "Atlantic/Bermuda"),
|
||||
("Atlantic/Canary", "Atlantic/Canary"),
|
||||
("Atlantic/Cape_Verde", "Atlantic/Cape_Verde"),
|
||||
("Atlantic/Faeroe", "Atlantic/Faeroe"),
|
||||
("Atlantic/Faroe", "Atlantic/Faroe"),
|
||||
("Atlantic/Jan_Mayen", "Atlantic/Jan_Mayen"),
|
||||
("Atlantic/Madeira", "Atlantic/Madeira"),
|
||||
("Atlantic/Reykjavik", "Atlantic/Reykjavik"),
|
||||
("Atlantic/South_Georgia", "Atlantic/South_Georgia"),
|
||||
("Atlantic/St_Helena", "Atlantic/St_Helena"),
|
||||
("Atlantic/Stanley", "Atlantic/Stanley"),
|
||||
("Australia/ACT", "Australia/ACT"),
|
||||
("Australia/Adelaide", "Australia/Adelaide"),
|
||||
("Australia/Brisbane", "Australia/Brisbane"),
|
||||
("Australia/Broken_Hill", "Australia/Broken_Hill"),
|
||||
("Australia/Canberra", "Australia/Canberra"),
|
||||
("Australia/Currie", "Australia/Currie"),
|
||||
("Australia/Darwin", "Australia/Darwin"),
|
||||
("Australia/Eucla", "Australia/Eucla"),
|
||||
("Australia/Hobart", "Australia/Hobart"),
|
||||
("Australia/LHI", "Australia/LHI"),
|
||||
("Australia/Lindeman", "Australia/Lindeman"),
|
||||
("Australia/Lord_Howe", "Australia/Lord_Howe"),
|
||||
("Australia/Melbourne", "Australia/Melbourne"),
|
||||
("Australia/NSW", "Australia/NSW"),
|
||||
("Australia/North", "Australia/North"),
|
||||
("Australia/Perth", "Australia/Perth"),
|
||||
("Australia/Queensland", "Australia/Queensland"),
|
||||
("Australia/South", "Australia/South"),
|
||||
("Australia/Sydney", "Australia/Sydney"),
|
||||
("Australia/Tasmania", "Australia/Tasmania"),
|
||||
("Australia/Victoria", "Australia/Victoria"),
|
||||
("Australia/West", "Australia/West"),
|
||||
("Australia/Yancowinna", "Australia/Yancowinna"),
|
||||
("Brazil/Acre", "Brazil/Acre"),
|
||||
("Brazil/DeNoronha", "Brazil/DeNoronha"),
|
||||
("Brazil/East", "Brazil/East"),
|
||||
("Brazil/West", "Brazil/West"),
|
||||
("CET", "CET"),
|
||||
("CST6CDT", "CST6CDT"),
|
||||
("Canada/Atlantic", "Canada/Atlantic"),
|
||||
("Canada/Central", "Canada/Central"),
|
||||
("Canada/Eastern", "Canada/Eastern"),
|
||||
("Canada/Mountain", "Canada/Mountain"),
|
||||
("Canada/Newfoundland", "Canada/Newfoundland"),
|
||||
("Canada/Pacific", "Canada/Pacific"),
|
||||
("Canada/Saskatchewan", "Canada/Saskatchewan"),
|
||||
("Canada/Yukon", "Canada/Yukon"),
|
||||
("Chile/Continental", "Chile/Continental"),
|
||||
("Chile/EasterIsland", "Chile/EasterIsland"),
|
||||
("Cuba", "Cuba"),
|
||||
("EET", "EET"),
|
||||
("EST", "EST"),
|
||||
("EST5EDT", "EST5EDT"),
|
||||
("Egypt", "Egypt"),
|
||||
("Eire", "Eire"),
|
||||
("Etc/GMT", "Etc/GMT"),
|
||||
("Etc/GMT+0", "Etc/GMT+0"),
|
||||
("Etc/GMT+1", "Etc/GMT+1"),
|
||||
("Etc/GMT+10", "Etc/GMT+10"),
|
||||
("Etc/GMT+11", "Etc/GMT+11"),
|
||||
("Etc/GMT+12", "Etc/GMT+12"),
|
||||
("Etc/GMT+2", "Etc/GMT+2"),
|
||||
("Etc/GMT+3", "Etc/GMT+3"),
|
||||
("Etc/GMT+4", "Etc/GMT+4"),
|
||||
("Etc/GMT+5", "Etc/GMT+5"),
|
||||
("Etc/GMT+6", "Etc/GMT+6"),
|
||||
("Etc/GMT+7", "Etc/GMT+7"),
|
||||
("Etc/GMT+8", "Etc/GMT+8"),
|
||||
("Etc/GMT+9", "Etc/GMT+9"),
|
||||
("Etc/GMT-0", "Etc/GMT-0"),
|
||||
("Etc/GMT-1", "Etc/GMT-1"),
|
||||
("Etc/GMT-10", "Etc/GMT-10"),
|
||||
("Etc/GMT-11", "Etc/GMT-11"),
|
||||
("Etc/GMT-12", "Etc/GMT-12"),
|
||||
("Etc/GMT-13", "Etc/GMT-13"),
|
||||
("Etc/GMT-14", "Etc/GMT-14"),
|
||||
("Etc/GMT-2", "Etc/GMT-2"),
|
||||
("Etc/GMT-3", "Etc/GMT-3"),
|
||||
("Etc/GMT-4", "Etc/GMT-4"),
|
||||
("Etc/GMT-5", "Etc/GMT-5"),
|
||||
("Etc/GMT-6", "Etc/GMT-6"),
|
||||
("Etc/GMT-7", "Etc/GMT-7"),
|
||||
("Etc/GMT-8", "Etc/GMT-8"),
|
||||
("Etc/GMT-9", "Etc/GMT-9"),
|
||||
("Etc/GMT0", "Etc/GMT0"),
|
||||
("Etc/Greenwich", "Etc/Greenwich"),
|
||||
("Etc/UCT", "Etc/UCT"),
|
||||
("Etc/UTC", "Etc/UTC"),
|
||||
("Etc/Universal", "Etc/Universal"),
|
||||
("Etc/Zulu", "Etc/Zulu"),
|
||||
("Europe/Amsterdam", "Europe/Amsterdam"),
|
||||
("Europe/Andorra", "Europe/Andorra"),
|
||||
("Europe/Astrakhan", "Europe/Astrakhan"),
|
||||
("Europe/Athens", "Europe/Athens"),
|
||||
("Europe/Belfast", "Europe/Belfast"),
|
||||
("Europe/Belgrade", "Europe/Belgrade"),
|
||||
("Europe/Berlin", "Europe/Berlin"),
|
||||
("Europe/Bratislava", "Europe/Bratislava"),
|
||||
("Europe/Brussels", "Europe/Brussels"),
|
||||
("Europe/Bucharest", "Europe/Bucharest"),
|
||||
("Europe/Budapest", "Europe/Budapest"),
|
||||
("Europe/Busingen", "Europe/Busingen"),
|
||||
("Europe/Chisinau", "Europe/Chisinau"),
|
||||
("Europe/Copenhagen", "Europe/Copenhagen"),
|
||||
("Europe/Dublin", "Europe/Dublin"),
|
||||
("Europe/Gibraltar", "Europe/Gibraltar"),
|
||||
("Europe/Guernsey", "Europe/Guernsey"),
|
||||
("Europe/Helsinki", "Europe/Helsinki"),
|
||||
("Europe/Isle_of_Man", "Europe/Isle_of_Man"),
|
||||
("Europe/Istanbul", "Europe/Istanbul"),
|
||||
("Europe/Jersey", "Europe/Jersey"),
|
||||
("Europe/Kaliningrad", "Europe/Kaliningrad"),
|
||||
("Europe/Kiev", "Europe/Kiev"),
|
||||
("Europe/Kirov", "Europe/Kirov"),
|
||||
("Europe/Kyiv", "Europe/Kyiv"),
|
||||
("Europe/Lisbon", "Europe/Lisbon"),
|
||||
("Europe/Ljubljana", "Europe/Ljubljana"),
|
||||
("Europe/London", "Europe/London"),
|
||||
("Europe/Luxembourg", "Europe/Luxembourg"),
|
||||
("Europe/Madrid", "Europe/Madrid"),
|
||||
("Europe/Malta", "Europe/Malta"),
|
||||
("Europe/Mariehamn", "Europe/Mariehamn"),
|
||||
("Europe/Minsk", "Europe/Minsk"),
|
||||
("Europe/Monaco", "Europe/Monaco"),
|
||||
("Europe/Moscow", "Europe/Moscow"),
|
||||
("Europe/Nicosia", "Europe/Nicosia"),
|
||||
("Europe/Oslo", "Europe/Oslo"),
|
||||
("Europe/Paris", "Europe/Paris"),
|
||||
("Europe/Podgorica", "Europe/Podgorica"),
|
||||
("Europe/Prague", "Europe/Prague"),
|
||||
("Europe/Riga", "Europe/Riga"),
|
||||
("Europe/Rome", "Europe/Rome"),
|
||||
("Europe/Samara", "Europe/Samara"),
|
||||
("Europe/San_Marino", "Europe/San_Marino"),
|
||||
("Europe/Sarajevo", "Europe/Sarajevo"),
|
||||
("Europe/Saratov", "Europe/Saratov"),
|
||||
("Europe/Simferopol", "Europe/Simferopol"),
|
||||
("Europe/Skopje", "Europe/Skopje"),
|
||||
("Europe/Sofia", "Europe/Sofia"),
|
||||
("Europe/Stockholm", "Europe/Stockholm"),
|
||||
("Europe/Tallinn", "Europe/Tallinn"),
|
||||
("Europe/Tirane", "Europe/Tirane"),
|
||||
("Europe/Tiraspol", "Europe/Tiraspol"),
|
||||
("Europe/Ulyanovsk", "Europe/Ulyanovsk"),
|
||||
("Europe/Uzhgorod", "Europe/Uzhgorod"),
|
||||
("Europe/Vaduz", "Europe/Vaduz"),
|
||||
("Europe/Vatican", "Europe/Vatican"),
|
||||
("Europe/Vienna", "Europe/Vienna"),
|
||||
("Europe/Vilnius", "Europe/Vilnius"),
|
||||
("Europe/Volgograd", "Europe/Volgograd"),
|
||||
("Europe/Warsaw", "Europe/Warsaw"),
|
||||
("Europe/Zagreb", "Europe/Zagreb"),
|
||||
("Europe/Zaporozhye", "Europe/Zaporozhye"),
|
||||
("Europe/Zurich", "Europe/Zurich"),
|
||||
("GB", "GB"),
|
||||
("GB-Eire", "GB-Eire"),
|
||||
("GMT", "GMT"),
|
||||
("GMT+0", "GMT+0"),
|
||||
("GMT-0", "GMT-0"),
|
||||
("GMT0", "GMT0"),
|
||||
("Greenwich", "Greenwich"),
|
||||
("HST", "HST"),
|
||||
("Hongkong", "Hongkong"),
|
||||
("Iceland", "Iceland"),
|
||||
("Indian/Antananarivo", "Indian/Antananarivo"),
|
||||
("Indian/Chagos", "Indian/Chagos"),
|
||||
("Indian/Christmas", "Indian/Christmas"),
|
||||
("Indian/Cocos", "Indian/Cocos"),
|
||||
("Indian/Comoro", "Indian/Comoro"),
|
||||
("Indian/Kerguelen", "Indian/Kerguelen"),
|
||||
("Indian/Mahe", "Indian/Mahe"),
|
||||
("Indian/Maldives", "Indian/Maldives"),
|
||||
("Indian/Mauritius", "Indian/Mauritius"),
|
||||
("Indian/Mayotte", "Indian/Mayotte"),
|
||||
("Indian/Reunion", "Indian/Reunion"),
|
||||
("Iran", "Iran"),
|
||||
("Israel", "Israel"),
|
||||
("Jamaica", "Jamaica"),
|
||||
("Japan", "Japan"),
|
||||
("Kwajalein", "Kwajalein"),
|
||||
("Libya", "Libya"),
|
||||
("MET", "MET"),
|
||||
("MST", "MST"),
|
||||
("MST7MDT", "MST7MDT"),
|
||||
("Mexico/BajaNorte", "Mexico/BajaNorte"),
|
||||
("Mexico/BajaSur", "Mexico/BajaSur"),
|
||||
("Mexico/General", "Mexico/General"),
|
||||
("NZ", "NZ"),
|
||||
("NZ-CHAT", "NZ-CHAT"),
|
||||
("Navajo", "Navajo"),
|
||||
("PRC", "PRC"),
|
||||
("PST8PDT", "PST8PDT"),
|
||||
("Pacific/Apia", "Pacific/Apia"),
|
||||
("Pacific/Auckland", "Pacific/Auckland"),
|
||||
("Pacific/Bougainville", "Pacific/Bougainville"),
|
||||
("Pacific/Chatham", "Pacific/Chatham"),
|
||||
("Pacific/Chuuk", "Pacific/Chuuk"),
|
||||
("Pacific/Easter", "Pacific/Easter"),
|
||||
("Pacific/Efate", "Pacific/Efate"),
|
||||
("Pacific/Enderbury", "Pacific/Enderbury"),
|
||||
("Pacific/Fakaofo", "Pacific/Fakaofo"),
|
||||
("Pacific/Fiji", "Pacific/Fiji"),
|
||||
("Pacific/Funafuti", "Pacific/Funafuti"),
|
||||
("Pacific/Galapagos", "Pacific/Galapagos"),
|
||||
("Pacific/Gambier", "Pacific/Gambier"),
|
||||
("Pacific/Guadalcanal", "Pacific/Guadalcanal"),
|
||||
("Pacific/Guam", "Pacific/Guam"),
|
||||
("Pacific/Honolulu", "Pacific/Honolulu"),
|
||||
("Pacific/Johnston", "Pacific/Johnston"),
|
||||
("Pacific/Kanton", "Pacific/Kanton"),
|
||||
("Pacific/Kiritimati", "Pacific/Kiritimati"),
|
||||
("Pacific/Kosrae", "Pacific/Kosrae"),
|
||||
("Pacific/Kwajalein", "Pacific/Kwajalein"),
|
||||
("Pacific/Majuro", "Pacific/Majuro"),
|
||||
("Pacific/Marquesas", "Pacific/Marquesas"),
|
||||
("Pacific/Midway", "Pacific/Midway"),
|
||||
("Pacific/Nauru", "Pacific/Nauru"),
|
||||
("Pacific/Niue", "Pacific/Niue"),
|
||||
("Pacific/Norfolk", "Pacific/Norfolk"),
|
||||
("Pacific/Noumea", "Pacific/Noumea"),
|
||||
("Pacific/Pago_Pago", "Pacific/Pago_Pago"),
|
||||
("Pacific/Palau", "Pacific/Palau"),
|
||||
("Pacific/Pitcairn", "Pacific/Pitcairn"),
|
||||
("Pacific/Pohnpei", "Pacific/Pohnpei"),
|
||||
("Pacific/Ponape", "Pacific/Ponape"),
|
||||
("Pacific/Port_Moresby", "Pacific/Port_Moresby"),
|
||||
("Pacific/Rarotonga", "Pacific/Rarotonga"),
|
||||
("Pacific/Saipan", "Pacific/Saipan"),
|
||||
("Pacific/Samoa", "Pacific/Samoa"),
|
||||
("Pacific/Tahiti", "Pacific/Tahiti"),
|
||||
("Pacific/Tarawa", "Pacific/Tarawa"),
|
||||
("Pacific/Tongatapu", "Pacific/Tongatapu"),
|
||||
("Pacific/Truk", "Pacific/Truk"),
|
||||
("Pacific/Wake", "Pacific/Wake"),
|
||||
("Pacific/Wallis", "Pacific/Wallis"),
|
||||
("Pacific/Yap", "Pacific/Yap"),
|
||||
("Poland", "Poland"),
|
||||
("Portugal", "Portugal"),
|
||||
("ROC", "ROC"),
|
||||
("ROK", "ROK"),
|
||||
("Singapore", "Singapore"),
|
||||
("Turkey", "Turkey"),
|
||||
("UCT", "UCT"),
|
||||
("US/Alaska", "US/Alaska"),
|
||||
("US/Aleutian", "US/Aleutian"),
|
||||
("US/Arizona", "US/Arizona"),
|
||||
("US/Central", "US/Central"),
|
||||
("US/East-Indiana", "US/East-Indiana"),
|
||||
("US/Eastern", "US/Eastern"),
|
||||
("US/Hawaii", "US/Hawaii"),
|
||||
("US/Indiana-Starke", "US/Indiana-Starke"),
|
||||
("US/Michigan", "US/Michigan"),
|
||||
("US/Mountain", "US/Mountain"),
|
||||
("US/Pacific", "US/Pacific"),
|
||||
("US/Samoa", "US/Samoa"),
|
||||
("UTC", "UTC"),
|
||||
("Universal", "Universal"),
|
||||
("W-SU", "W-SU"),
|
||||
("WET", "WET"),
|
||||
("Zulu", "Zulu"),
|
||||
],
|
||||
default="UTC",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
]
|
65
bookwyrm/migrations/0158_auto_20220919_1634.py
Normal file
65
bookwyrm/migrations/0158_auto_20220919_1634.py
Normal file
|
@ -0,0 +1,65 @@
|
|||
# Generated by Django 3.2.15 on 2022-09-19 16:34
|
||||
|
||||
import bookwyrm.models.fields
|
||||
from django.db import migrations, models
|
||||
import django.utils.timezone
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0157_auto_20220909_2338"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="automod",
|
||||
name="created_date",
|
||||
field=models.DateTimeField(
|
||||
auto_now_add=True, default=django.utils.timezone.now
|
||||
),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="automod",
|
||||
name="remote_id",
|
||||
field=bookwyrm.models.fields.RemoteIdField(
|
||||
max_length=255,
|
||||
null=True,
|
||||
validators=[bookwyrm.models.fields.validate_remote_id],
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="automod",
|
||||
name="updated_date",
|
||||
field=models.DateTimeField(auto_now=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="emailblocklist",
|
||||
name="remote_id",
|
||||
field=bookwyrm.models.fields.RemoteIdField(
|
||||
max_length=255,
|
||||
null=True,
|
||||
validators=[bookwyrm.models.fields.validate_remote_id],
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="emailblocklist",
|
||||
name="updated_date",
|
||||
field=models.DateTimeField(auto_now=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="ipblocklist",
|
||||
name="remote_id",
|
||||
field=bookwyrm.models.fields.RemoteIdField(
|
||||
max_length=255,
|
||||
null=True,
|
||||
validators=[bookwyrm.models.fields.validate_remote_id],
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="ipblocklist",
|
||||
name="updated_date",
|
||||
field=models.DateTimeField(auto_now=True),
|
||||
),
|
||||
]
|
33
bookwyrm/migrations/0159_auto_20220924_0634.py
Normal file
33
bookwyrm/migrations/0159_auto_20220924_0634.py
Normal file
|
@ -0,0 +1,33 @@
|
|||
# Generated by Django 3.2.15 on 2022-09-24 06:34
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0158_auto_20220919_1634"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="hotp_count",
|
||||
field=models.IntegerField(blank=True, default=0, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="hotp_secret",
|
||||
field=models.CharField(blank=True, default=None, max_length=32, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="otp_secret",
|
||||
field=models.CharField(blank=True, default=None, max_length=32, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="two_factor_auth",
|
||||
field=models.BooleanField(blank=True, default=None, null=True),
|
||||
),
|
||||
]
|
52
bookwyrm/migrations/0160_auto_20221101_2251.py
Normal file
52
bookwyrm/migrations/0160_auto_20221101_2251.py
Normal file
|
@ -0,0 +1,52 @@
|
|||
# Generated by Django 3.2.15 on 2022-11-01 22:51
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0159_auto_20220924_0634"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="allow_reactivation",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="connector",
|
||||
name="deactivation_reason",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
choices=[
|
||||
("pending", "Pending"),
|
||||
("self_deletion", "Self deletion"),
|
||||
("self_deactivation", "Self deactivation"),
|
||||
("moderator_suspension", "Moderator suspension"),
|
||||
("moderator_deletion", "Moderator deletion"),
|
||||
("domain_block", "Domain block"),
|
||||
],
|
||||
max_length=255,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="user",
|
||||
name="deactivation_reason",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
choices=[
|
||||
("pending", "Pending"),
|
||||
("self_deletion", "Self deletion"),
|
||||
("self_deactivation", "Self deactivation"),
|
||||
("moderator_suspension", "Moderator suspension"),
|
||||
("moderator_deletion", "Moderator deletion"),
|
||||
("domain_block", "Domain block"),
|
||||
],
|
||||
max_length=255,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
]
|
32
bookwyrm/migrations/0160_auto_20221105_2030.py
Normal file
32
bookwyrm/migrations/0160_auto_20221105_2030.py
Normal file
|
@ -0,0 +1,32 @@
|
|||
# Generated by Django 3.2.15 on 2022-11-05 20:30
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0159_auto_20220924_0634"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="importitem",
|
||||
name="task_id",
|
||||
field=models.CharField(blank=True, max_length=200, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="importjob",
|
||||
name="status",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("pending", "Pending"),
|
||||
("active", "Active"),
|
||||
("complete", "Complete"),
|
||||
("stopped", "Stopped"),
|
||||
],
|
||||
max_length=50,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
]
|
28
bookwyrm/migrations/0161_alter_importjob_status.py
Normal file
28
bookwyrm/migrations/0161_alter_importjob_status.py
Normal file
|
@ -0,0 +1,28 @@
|
|||
# Generated by Django 3.2.15 on 2022-11-05 20:40
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0160_auto_20221105_2030"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="importjob",
|
||||
name="status",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("pending", "Pending"),
|
||||
("active", "Active"),
|
||||
("complete", "Complete"),
|
||||
("stopped", "Stopped"),
|
||||
],
|
||||
default="pending",
|
||||
max_length=50,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
]
|
18
bookwyrm/migrations/0162_importjob_task_id.py
Normal file
18
bookwyrm/migrations/0162_importjob_task_id.py
Normal file
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 3.2.15 on 2022-11-05 22:28
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0161_alter_importjob_status"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="importjob",
|
||||
name="task_id",
|
||||
field=models.CharField(blank=True, max_length=200, null=True),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,13 @@
|
|||
# Generated by Django 3.2.15 on 2022-11-10 20:34
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0160_auto_20221101_2251"),
|
||||
("bookwyrm", "0162_importjob_task_id"),
|
||||
]
|
||||
|
||||
operations = []
|
18
bookwyrm/migrations/0164_status_ready.py
Normal file
18
bookwyrm/migrations/0164_status_ready.py
Normal file
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 3.2.16 on 2022-11-15 21:40
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0163_merge_0160_auto_20221101_2251_0162_importjob_task_id"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="status",
|
||||
name="ready",
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
]
|
18
bookwyrm/migrations/0165_alter_inviterequest_answer.py
Normal file
18
bookwyrm/migrations/0165_alter_inviterequest_answer.py
Normal file
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 3.2.16 on 2022-11-15 22:43
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0164_status_ready"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="inviterequest",
|
||||
name="answer",
|
||||
field=models.TextField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
]
|
18
bookwyrm/migrations/0166_sitesettings_imports_enabled.py
Normal file
18
bookwyrm/migrations/0166_sitesettings_imports_enabled.py
Normal file
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 3.2.16 on 2022-11-17 21:50
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0165_alter_inviterequest_answer"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="sitesettings",
|
||||
name="imports_enabled",
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
]
|
23
bookwyrm/migrations/0167_auto_20221125_1900.py
Normal file
23
bookwyrm/migrations/0167_auto_20221125_1900.py
Normal file
|
@ -0,0 +1,23 @@
|
|||
# Generated by Django 3.2.16 on 2022-11-25 19:00
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0166_sitesettings_imports_enabled"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="sitesettings",
|
||||
name="impressum",
|
||||
field=models.TextField(default="Add a impressum here."),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="sitesettings",
|
||||
name="show_impressum",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
23
bookwyrm/migrations/0167_sitesettings_import_size_limit.py
Normal file
23
bookwyrm/migrations/0167_sitesettings_import_size_limit.py
Normal file
|
@ -0,0 +1,23 @@
|
|||
# Generated by Django 3.2.16 on 2022-12-05 13:53
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0166_sitesettings_imports_enabled"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="sitesettings",
|
||||
name="import_size_limit",
|
||||
field=models.IntegerField(default=0),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="sitesettings",
|
||||
name="import_limit_reset",
|
||||
field=models.IntegerField(default=0),
|
||||
),
|
||||
]
|
28
bookwyrm/migrations/0168_auto_20221205_1701.py
Normal file
28
bookwyrm/migrations/0168_auto_20221205_1701.py
Normal file
|
@ -0,0 +1,28 @@
|
|||
# Generated by Django 3.2.16 on 2022-12-05 17:01
|
||||
|
||||
import bookwyrm.models.fields
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0167_auto_20221125_1900"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="author",
|
||||
name="aasin",
|
||||
field=bookwyrm.models.fields.CharField(
|
||||
blank=True, max_length=255, null=True
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="book",
|
||||
name="aasin",
|
||||
field=bookwyrm.models.fields.CharField(
|
||||
blank=True, max_length=255, null=True
|
||||
),
|
||||
),
|
||||
]
|
63
bookwyrm/migrations/0168_auto_20221205_2331.py
Normal file
63
bookwyrm/migrations/0168_auto_20221205_2331.py
Normal file
|
@ -0,0 +1,63 @@
|
|||
""" I added two new permission types and a new group to the management command that
|
||||
creates the database on install, this creates them for existing instances """
|
||||
# Generated by Django 3.2.16 on 2022-12-05 23:31
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
def create_groups_and_perms(apps, schema_editor):
|
||||
"""create the new "owner" group and "system admin" permission"""
|
||||
db_alias = schema_editor.connection.alias
|
||||
group_model = apps.get_model("auth", "Group")
|
||||
# Add the "owner" group, if needed
|
||||
owner_group, group_created = group_model.objects.using(db_alias).get_or_create(
|
||||
name="owner"
|
||||
)
|
||||
|
||||
# Create perms, if needed
|
||||
user_model = apps.get_model("bookwyrm", "User")
|
||||
content_type_model = apps.get_model("contenttypes", "ContentType")
|
||||
content_type = content_type_model.objects.get_for_model(user_model)
|
||||
perms_model = apps.get_model("auth", "Permission")
|
||||
reg_perm, perm_created = perms_model.objects.using(db_alias).get_or_create(
|
||||
codename="manage_registration",
|
||||
name="allow or prevent user registration",
|
||||
content_type=content_type,
|
||||
)
|
||||
admin_perm, admin_perm_created = perms_model.objects.using(db_alias).get_or_create(
|
||||
codename="system_administration",
|
||||
name="technical controls",
|
||||
content_type=content_type,
|
||||
)
|
||||
|
||||
# Add perms to the group if anything was created
|
||||
if group_created or perm_created or admin_perm_created:
|
||||
perms = [
|
||||
"edit_instance_settings",
|
||||
"set_user_group",
|
||||
"control_federation",
|
||||
"create_invites",
|
||||
"moderate_user",
|
||||
"moderate_post",
|
||||
"edit_book",
|
||||
]
|
||||
owner_group.permissions.set(
|
||||
perms_model.objects.using(db_alias).filter(codename__in=perms).all()
|
||||
)
|
||||
|
||||
# also extend these perms to admins
|
||||
# This is get or create so the tests don't fail -- it should already exist
|
||||
admin_group, _ = group_model.objects.using(db_alias).get_or_create(name="admin")
|
||||
admin_group.permissions.add(reg_perm)
|
||||
admin_group.permissions.add(admin_perm)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0167_auto_20221125_1900"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(create_groups_and_perms, migrations.RunPython.noop)
|
||||
]
|
28
bookwyrm/migrations/0169_auto_20221206_0902.py
Normal file
28
bookwyrm/migrations/0169_auto_20221206_0902.py
Normal file
|
@ -0,0 +1,28 @@
|
|||
# Generated by Django 3.2.16 on 2022-12-06 09:02
|
||||
|
||||
import bookwyrm.models.fields
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0168_auto_20221205_1701"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="author",
|
||||
name="isfdb",
|
||||
field=bookwyrm.models.fields.CharField(
|
||||
blank=True, max_length=255, null=True
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="book",
|
||||
name="isfdb",
|
||||
field=bookwyrm.models.fields.CharField(
|
||||
blank=True, max_length=255, null=True
|
||||
),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,13 @@
|
|||
# Generated by Django 3.2.16 on 2022-12-11 20:00
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0168_auto_20221205_2331"),
|
||||
("bookwyrm", "0169_auto_20221206_0902"),
|
||||
]
|
||||
|
||||
operations = []
|
631
bookwyrm/migrations/0171_alter_user_preferred_timezone.py
Normal file
631
bookwyrm/migrations/0171_alter_user_preferred_timezone.py
Normal file
|
@ -0,0 +1,631 @@
|
|||
# Generated by Django 3.2.16 on 2022-12-19 15:30
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0170_merge_0168_auto_20221205_2331_0169_auto_20221206_0902"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="user",
|
||||
name="preferred_timezone",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("Africa/Abidjan", "Africa/Abidjan"),
|
||||
("Africa/Accra", "Africa/Accra"),
|
||||
("Africa/Addis_Ababa", "Africa/Addis_Ababa"),
|
||||
("Africa/Algiers", "Africa/Algiers"),
|
||||
("Africa/Asmara", "Africa/Asmara"),
|
||||
("Africa/Asmera", "Africa/Asmera"),
|
||||
("Africa/Bamako", "Africa/Bamako"),
|
||||
("Africa/Bangui", "Africa/Bangui"),
|
||||
("Africa/Banjul", "Africa/Banjul"),
|
||||
("Africa/Bissau", "Africa/Bissau"),
|
||||
("Africa/Blantyre", "Africa/Blantyre"),
|
||||
("Africa/Brazzaville", "Africa/Brazzaville"),
|
||||
("Africa/Bujumbura", "Africa/Bujumbura"),
|
||||
("Africa/Cairo", "Africa/Cairo"),
|
||||
("Africa/Casablanca", "Africa/Casablanca"),
|
||||
("Africa/Ceuta", "Africa/Ceuta"),
|
||||
("Africa/Conakry", "Africa/Conakry"),
|
||||
("Africa/Dakar", "Africa/Dakar"),
|
||||
("Africa/Dar_es_Salaam", "Africa/Dar_es_Salaam"),
|
||||
("Africa/Djibouti", "Africa/Djibouti"),
|
||||
("Africa/Douala", "Africa/Douala"),
|
||||
("Africa/El_Aaiun", "Africa/El_Aaiun"),
|
||||
("Africa/Freetown", "Africa/Freetown"),
|
||||
("Africa/Gaborone", "Africa/Gaborone"),
|
||||
("Africa/Harare", "Africa/Harare"),
|
||||
("Africa/Johannesburg", "Africa/Johannesburg"),
|
||||
("Africa/Juba", "Africa/Juba"),
|
||||
("Africa/Kampala", "Africa/Kampala"),
|
||||
("Africa/Khartoum", "Africa/Khartoum"),
|
||||
("Africa/Kigali", "Africa/Kigali"),
|
||||
("Africa/Kinshasa", "Africa/Kinshasa"),
|
||||
("Africa/Lagos", "Africa/Lagos"),
|
||||
("Africa/Libreville", "Africa/Libreville"),
|
||||
("Africa/Lome", "Africa/Lome"),
|
||||
("Africa/Luanda", "Africa/Luanda"),
|
||||
("Africa/Lubumbashi", "Africa/Lubumbashi"),
|
||||
("Africa/Lusaka", "Africa/Lusaka"),
|
||||
("Africa/Malabo", "Africa/Malabo"),
|
||||
("Africa/Maputo", "Africa/Maputo"),
|
||||
("Africa/Maseru", "Africa/Maseru"),
|
||||
("Africa/Mbabane", "Africa/Mbabane"),
|
||||
("Africa/Mogadishu", "Africa/Mogadishu"),
|
||||
("Africa/Monrovia", "Africa/Monrovia"),
|
||||
("Africa/Nairobi", "Africa/Nairobi"),
|
||||
("Africa/Ndjamena", "Africa/Ndjamena"),
|
||||
("Africa/Niamey", "Africa/Niamey"),
|
||||
("Africa/Nouakchott", "Africa/Nouakchott"),
|
||||
("Africa/Ouagadougou", "Africa/Ouagadougou"),
|
||||
("Africa/Porto-Novo", "Africa/Porto-Novo"),
|
||||
("Africa/Sao_Tome", "Africa/Sao_Tome"),
|
||||
("Africa/Timbuktu", "Africa/Timbuktu"),
|
||||
("Africa/Tripoli", "Africa/Tripoli"),
|
||||
("Africa/Tunis", "Africa/Tunis"),
|
||||
("Africa/Windhoek", "Africa/Windhoek"),
|
||||
("America/Adak", "America/Adak"),
|
||||
("America/Anchorage", "America/Anchorage"),
|
||||
("America/Anguilla", "America/Anguilla"),
|
||||
("America/Antigua", "America/Antigua"),
|
||||
("America/Araguaina", "America/Araguaina"),
|
||||
(
|
||||
"America/Argentina/Buenos_Aires",
|
||||
"America/Argentina/Buenos_Aires",
|
||||
),
|
||||
("America/Argentina/Catamarca", "America/Argentina/Catamarca"),
|
||||
(
|
||||
"America/Argentina/ComodRivadavia",
|
||||
"America/Argentina/ComodRivadavia",
|
||||
),
|
||||
("America/Argentina/Cordoba", "America/Argentina/Cordoba"),
|
||||
("America/Argentina/Jujuy", "America/Argentina/Jujuy"),
|
||||
("America/Argentina/La_Rioja", "America/Argentina/La_Rioja"),
|
||||
("America/Argentina/Mendoza", "America/Argentina/Mendoza"),
|
||||
(
|
||||
"America/Argentina/Rio_Gallegos",
|
||||
"America/Argentina/Rio_Gallegos",
|
||||
),
|
||||
("America/Argentina/Salta", "America/Argentina/Salta"),
|
||||
("America/Argentina/San_Juan", "America/Argentina/San_Juan"),
|
||||
("America/Argentina/San_Luis", "America/Argentina/San_Luis"),
|
||||
("America/Argentina/Tucuman", "America/Argentina/Tucuman"),
|
||||
("America/Argentina/Ushuaia", "America/Argentina/Ushuaia"),
|
||||
("America/Aruba", "America/Aruba"),
|
||||
("America/Asuncion", "America/Asuncion"),
|
||||
("America/Atikokan", "America/Atikokan"),
|
||||
("America/Atka", "America/Atka"),
|
||||
("America/Bahia", "America/Bahia"),
|
||||
("America/Bahia_Banderas", "America/Bahia_Banderas"),
|
||||
("America/Barbados", "America/Barbados"),
|
||||
("America/Belem", "America/Belem"),
|
||||
("America/Belize", "America/Belize"),
|
||||
("America/Blanc-Sablon", "America/Blanc-Sablon"),
|
||||
("America/Boa_Vista", "America/Boa_Vista"),
|
||||
("America/Bogota", "America/Bogota"),
|
||||
("America/Boise", "America/Boise"),
|
||||
("America/Buenos_Aires", "America/Buenos_Aires"),
|
||||
("America/Cambridge_Bay", "America/Cambridge_Bay"),
|
||||
("America/Campo_Grande", "America/Campo_Grande"),
|
||||
("America/Cancun", "America/Cancun"),
|
||||
("America/Caracas", "America/Caracas"),
|
||||
("America/Catamarca", "America/Catamarca"),
|
||||
("America/Cayenne", "America/Cayenne"),
|
||||
("America/Cayman", "America/Cayman"),
|
||||
("America/Chicago", "America/Chicago"),
|
||||
("America/Chihuahua", "America/Chihuahua"),
|
||||
("America/Ciudad_Juarez", "America/Ciudad_Juarez"),
|
||||
("America/Coral_Harbour", "America/Coral_Harbour"),
|
||||
("America/Cordoba", "America/Cordoba"),
|
||||
("America/Costa_Rica", "America/Costa_Rica"),
|
||||
("America/Creston", "America/Creston"),
|
||||
("America/Cuiaba", "America/Cuiaba"),
|
||||
("America/Curacao", "America/Curacao"),
|
||||
("America/Danmarkshavn", "America/Danmarkshavn"),
|
||||
("America/Dawson", "America/Dawson"),
|
||||
("America/Dawson_Creek", "America/Dawson_Creek"),
|
||||
("America/Denver", "America/Denver"),
|
||||
("America/Detroit", "America/Detroit"),
|
||||
("America/Dominica", "America/Dominica"),
|
||||
("America/Edmonton", "America/Edmonton"),
|
||||
("America/Eirunepe", "America/Eirunepe"),
|
||||
("America/El_Salvador", "America/El_Salvador"),
|
||||
("America/Ensenada", "America/Ensenada"),
|
||||
("America/Fort_Nelson", "America/Fort_Nelson"),
|
||||
("America/Fort_Wayne", "America/Fort_Wayne"),
|
||||
("America/Fortaleza", "America/Fortaleza"),
|
||||
("America/Glace_Bay", "America/Glace_Bay"),
|
||||
("America/Godthab", "America/Godthab"),
|
||||
("America/Goose_Bay", "America/Goose_Bay"),
|
||||
("America/Grand_Turk", "America/Grand_Turk"),
|
||||
("America/Grenada", "America/Grenada"),
|
||||
("America/Guadeloupe", "America/Guadeloupe"),
|
||||
("America/Guatemala", "America/Guatemala"),
|
||||
("America/Guayaquil", "America/Guayaquil"),
|
||||
("America/Guyana", "America/Guyana"),
|
||||
("America/Halifax", "America/Halifax"),
|
||||
("America/Havana", "America/Havana"),
|
||||
("America/Hermosillo", "America/Hermosillo"),
|
||||
("America/Indiana/Indianapolis", "America/Indiana/Indianapolis"),
|
||||
("America/Indiana/Knox", "America/Indiana/Knox"),
|
||||
("America/Indiana/Marengo", "America/Indiana/Marengo"),
|
||||
("America/Indiana/Petersburg", "America/Indiana/Petersburg"),
|
||||
("America/Indiana/Tell_City", "America/Indiana/Tell_City"),
|
||||
("America/Indiana/Vevay", "America/Indiana/Vevay"),
|
||||
("America/Indiana/Vincennes", "America/Indiana/Vincennes"),
|
||||
("America/Indiana/Winamac", "America/Indiana/Winamac"),
|
||||
("America/Indianapolis", "America/Indianapolis"),
|
||||
("America/Inuvik", "America/Inuvik"),
|
||||
("America/Iqaluit", "America/Iqaluit"),
|
||||
("America/Jamaica", "America/Jamaica"),
|
||||
("America/Jujuy", "America/Jujuy"),
|
||||
("America/Juneau", "America/Juneau"),
|
||||
("America/Kentucky/Louisville", "America/Kentucky/Louisville"),
|
||||
("America/Kentucky/Monticello", "America/Kentucky/Monticello"),
|
||||
("America/Knox_IN", "America/Knox_IN"),
|
||||
("America/Kralendijk", "America/Kralendijk"),
|
||||
("America/La_Paz", "America/La_Paz"),
|
||||
("America/Lima", "America/Lima"),
|
||||
("America/Los_Angeles", "America/Los_Angeles"),
|
||||
("America/Louisville", "America/Louisville"),
|
||||
("America/Lower_Princes", "America/Lower_Princes"),
|
||||
("America/Maceio", "America/Maceio"),
|
||||
("America/Managua", "America/Managua"),
|
||||
("America/Manaus", "America/Manaus"),
|
||||
("America/Marigot", "America/Marigot"),
|
||||
("America/Martinique", "America/Martinique"),
|
||||
("America/Matamoros", "America/Matamoros"),
|
||||
("America/Mazatlan", "America/Mazatlan"),
|
||||
("America/Mendoza", "America/Mendoza"),
|
||||
("America/Menominee", "America/Menominee"),
|
||||
("America/Merida", "America/Merida"),
|
||||
("America/Metlakatla", "America/Metlakatla"),
|
||||
("America/Mexico_City", "America/Mexico_City"),
|
||||
("America/Miquelon", "America/Miquelon"),
|
||||
("America/Moncton", "America/Moncton"),
|
||||
("America/Monterrey", "America/Monterrey"),
|
||||
("America/Montevideo", "America/Montevideo"),
|
||||
("America/Montreal", "America/Montreal"),
|
||||
("America/Montserrat", "America/Montserrat"),
|
||||
("America/Nassau", "America/Nassau"),
|
||||
("America/New_York", "America/New_York"),
|
||||
("America/Nipigon", "America/Nipigon"),
|
||||
("America/Nome", "America/Nome"),
|
||||
("America/Noronha", "America/Noronha"),
|
||||
("America/North_Dakota/Beulah", "America/North_Dakota/Beulah"),
|
||||
("America/North_Dakota/Center", "America/North_Dakota/Center"),
|
||||
(
|
||||
"America/North_Dakota/New_Salem",
|
||||
"America/North_Dakota/New_Salem",
|
||||
),
|
||||
("America/Nuuk", "America/Nuuk"),
|
||||
("America/Ojinaga", "America/Ojinaga"),
|
||||
("America/Panama", "America/Panama"),
|
||||
("America/Pangnirtung", "America/Pangnirtung"),
|
||||
("America/Paramaribo", "America/Paramaribo"),
|
||||
("America/Phoenix", "America/Phoenix"),
|
||||
("America/Port-au-Prince", "America/Port-au-Prince"),
|
||||
("America/Port_of_Spain", "America/Port_of_Spain"),
|
||||
("America/Porto_Acre", "America/Porto_Acre"),
|
||||
("America/Porto_Velho", "America/Porto_Velho"),
|
||||
("America/Puerto_Rico", "America/Puerto_Rico"),
|
||||
("America/Punta_Arenas", "America/Punta_Arenas"),
|
||||
("America/Rainy_River", "America/Rainy_River"),
|
||||
("America/Rankin_Inlet", "America/Rankin_Inlet"),
|
||||
("America/Recife", "America/Recife"),
|
||||
("America/Regina", "America/Regina"),
|
||||
("America/Resolute", "America/Resolute"),
|
||||
("America/Rio_Branco", "America/Rio_Branco"),
|
||||
("America/Rosario", "America/Rosario"),
|
||||
("America/Santa_Isabel", "America/Santa_Isabel"),
|
||||
("America/Santarem", "America/Santarem"),
|
||||
("America/Santiago", "America/Santiago"),
|
||||
("America/Santo_Domingo", "America/Santo_Domingo"),
|
||||
("America/Sao_Paulo", "America/Sao_Paulo"),
|
||||
("America/Scoresbysund", "America/Scoresbysund"),
|
||||
("America/Shiprock", "America/Shiprock"),
|
||||
("America/Sitka", "America/Sitka"),
|
||||
("America/St_Barthelemy", "America/St_Barthelemy"),
|
||||
("America/St_Johns", "America/St_Johns"),
|
||||
("America/St_Kitts", "America/St_Kitts"),
|
||||
("America/St_Lucia", "America/St_Lucia"),
|
||||
("America/St_Thomas", "America/St_Thomas"),
|
||||
("America/St_Vincent", "America/St_Vincent"),
|
||||
("America/Swift_Current", "America/Swift_Current"),
|
||||
("America/Tegucigalpa", "America/Tegucigalpa"),
|
||||
("America/Thule", "America/Thule"),
|
||||
("America/Thunder_Bay", "America/Thunder_Bay"),
|
||||
("America/Tijuana", "America/Tijuana"),
|
||||
("America/Toronto", "America/Toronto"),
|
||||
("America/Tortola", "America/Tortola"),
|
||||
("America/Vancouver", "America/Vancouver"),
|
||||
("America/Virgin", "America/Virgin"),
|
||||
("America/Whitehorse", "America/Whitehorse"),
|
||||
("America/Winnipeg", "America/Winnipeg"),
|
||||
("America/Yakutat", "America/Yakutat"),
|
||||
("America/Yellowknife", "America/Yellowknife"),
|
||||
("Antarctica/Casey", "Antarctica/Casey"),
|
||||
("Antarctica/Davis", "Antarctica/Davis"),
|
||||
("Antarctica/DumontDUrville", "Antarctica/DumontDUrville"),
|
||||
("Antarctica/Macquarie", "Antarctica/Macquarie"),
|
||||
("Antarctica/Mawson", "Antarctica/Mawson"),
|
||||
("Antarctica/McMurdo", "Antarctica/McMurdo"),
|
||||
("Antarctica/Palmer", "Antarctica/Palmer"),
|
||||
("Antarctica/Rothera", "Antarctica/Rothera"),
|
||||
("Antarctica/South_Pole", "Antarctica/South_Pole"),
|
||||
("Antarctica/Syowa", "Antarctica/Syowa"),
|
||||
("Antarctica/Troll", "Antarctica/Troll"),
|
||||
("Antarctica/Vostok", "Antarctica/Vostok"),
|
||||
("Arctic/Longyearbyen", "Arctic/Longyearbyen"),
|
||||
("Asia/Aden", "Asia/Aden"),
|
||||
("Asia/Almaty", "Asia/Almaty"),
|
||||
("Asia/Amman", "Asia/Amman"),
|
||||
("Asia/Anadyr", "Asia/Anadyr"),
|
||||
("Asia/Aqtau", "Asia/Aqtau"),
|
||||
("Asia/Aqtobe", "Asia/Aqtobe"),
|
||||
("Asia/Ashgabat", "Asia/Ashgabat"),
|
||||
("Asia/Ashkhabad", "Asia/Ashkhabad"),
|
||||
("Asia/Atyrau", "Asia/Atyrau"),
|
||||
("Asia/Baghdad", "Asia/Baghdad"),
|
||||
("Asia/Bahrain", "Asia/Bahrain"),
|
||||
("Asia/Baku", "Asia/Baku"),
|
||||
("Asia/Bangkok", "Asia/Bangkok"),
|
||||
("Asia/Barnaul", "Asia/Barnaul"),
|
||||
("Asia/Beirut", "Asia/Beirut"),
|
||||
("Asia/Bishkek", "Asia/Bishkek"),
|
||||
("Asia/Brunei", "Asia/Brunei"),
|
||||
("Asia/Calcutta", "Asia/Calcutta"),
|
||||
("Asia/Chita", "Asia/Chita"),
|
||||
("Asia/Choibalsan", "Asia/Choibalsan"),
|
||||
("Asia/Chongqing", "Asia/Chongqing"),
|
||||
("Asia/Chungking", "Asia/Chungking"),
|
||||
("Asia/Colombo", "Asia/Colombo"),
|
||||
("Asia/Dacca", "Asia/Dacca"),
|
||||
("Asia/Damascus", "Asia/Damascus"),
|
||||
("Asia/Dhaka", "Asia/Dhaka"),
|
||||
("Asia/Dili", "Asia/Dili"),
|
||||
("Asia/Dubai", "Asia/Dubai"),
|
||||
("Asia/Dushanbe", "Asia/Dushanbe"),
|
||||
("Asia/Famagusta", "Asia/Famagusta"),
|
||||
("Asia/Gaza", "Asia/Gaza"),
|
||||
("Asia/Harbin", "Asia/Harbin"),
|
||||
("Asia/Hebron", "Asia/Hebron"),
|
||||
("Asia/Ho_Chi_Minh", "Asia/Ho_Chi_Minh"),
|
||||
("Asia/Hong_Kong", "Asia/Hong_Kong"),
|
||||
("Asia/Hovd", "Asia/Hovd"),
|
||||
("Asia/Irkutsk", "Asia/Irkutsk"),
|
||||
("Asia/Istanbul", "Asia/Istanbul"),
|
||||
("Asia/Jakarta", "Asia/Jakarta"),
|
||||
("Asia/Jayapura", "Asia/Jayapura"),
|
||||
("Asia/Jerusalem", "Asia/Jerusalem"),
|
||||
("Asia/Kabul", "Asia/Kabul"),
|
||||
("Asia/Kamchatka", "Asia/Kamchatka"),
|
||||
("Asia/Karachi", "Asia/Karachi"),
|
||||
("Asia/Kashgar", "Asia/Kashgar"),
|
||||
("Asia/Kathmandu", "Asia/Kathmandu"),
|
||||
("Asia/Katmandu", "Asia/Katmandu"),
|
||||
("Asia/Khandyga", "Asia/Khandyga"),
|
||||
("Asia/Kolkata", "Asia/Kolkata"),
|
||||
("Asia/Krasnoyarsk", "Asia/Krasnoyarsk"),
|
||||
("Asia/Kuala_Lumpur", "Asia/Kuala_Lumpur"),
|
||||
("Asia/Kuching", "Asia/Kuching"),
|
||||
("Asia/Kuwait", "Asia/Kuwait"),
|
||||
("Asia/Macao", "Asia/Macao"),
|
||||
("Asia/Macau", "Asia/Macau"),
|
||||
("Asia/Magadan", "Asia/Magadan"),
|
||||
("Asia/Makassar", "Asia/Makassar"),
|
||||
("Asia/Manila", "Asia/Manila"),
|
||||
("Asia/Muscat", "Asia/Muscat"),
|
||||
("Asia/Nicosia", "Asia/Nicosia"),
|
||||
("Asia/Novokuznetsk", "Asia/Novokuznetsk"),
|
||||
("Asia/Novosibirsk", "Asia/Novosibirsk"),
|
||||
("Asia/Omsk", "Asia/Omsk"),
|
||||
("Asia/Oral", "Asia/Oral"),
|
||||
("Asia/Phnom_Penh", "Asia/Phnom_Penh"),
|
||||
("Asia/Pontianak", "Asia/Pontianak"),
|
||||
("Asia/Pyongyang", "Asia/Pyongyang"),
|
||||
("Asia/Qatar", "Asia/Qatar"),
|
||||
("Asia/Qostanay", "Asia/Qostanay"),
|
||||
("Asia/Qyzylorda", "Asia/Qyzylorda"),
|
||||
("Asia/Rangoon", "Asia/Rangoon"),
|
||||
("Asia/Riyadh", "Asia/Riyadh"),
|
||||
("Asia/Saigon", "Asia/Saigon"),
|
||||
("Asia/Sakhalin", "Asia/Sakhalin"),
|
||||
("Asia/Samarkand", "Asia/Samarkand"),
|
||||
("Asia/Seoul", "Asia/Seoul"),
|
||||
("Asia/Shanghai", "Asia/Shanghai"),
|
||||
("Asia/Singapore", "Asia/Singapore"),
|
||||
("Asia/Srednekolymsk", "Asia/Srednekolymsk"),
|
||||
("Asia/Taipei", "Asia/Taipei"),
|
||||
("Asia/Tashkent", "Asia/Tashkent"),
|
||||
("Asia/Tbilisi", "Asia/Tbilisi"),
|
||||
("Asia/Tehran", "Asia/Tehran"),
|
||||
("Asia/Tel_Aviv", "Asia/Tel_Aviv"),
|
||||
("Asia/Thimbu", "Asia/Thimbu"),
|
||||
("Asia/Thimphu", "Asia/Thimphu"),
|
||||
("Asia/Tokyo", "Asia/Tokyo"),
|
||||
("Asia/Tomsk", "Asia/Tomsk"),
|
||||
("Asia/Ujung_Pandang", "Asia/Ujung_Pandang"),
|
||||
("Asia/Ulaanbaatar", "Asia/Ulaanbaatar"),
|
||||
("Asia/Ulan_Bator", "Asia/Ulan_Bator"),
|
||||
("Asia/Urumqi", "Asia/Urumqi"),
|
||||
("Asia/Ust-Nera", "Asia/Ust-Nera"),
|
||||
("Asia/Vientiane", "Asia/Vientiane"),
|
||||
("Asia/Vladivostok", "Asia/Vladivostok"),
|
||||
("Asia/Yakutsk", "Asia/Yakutsk"),
|
||||
("Asia/Yangon", "Asia/Yangon"),
|
||||
("Asia/Yekaterinburg", "Asia/Yekaterinburg"),
|
||||
("Asia/Yerevan", "Asia/Yerevan"),
|
||||
("Atlantic/Azores", "Atlantic/Azores"),
|
||||
("Atlantic/Bermuda", "Atlantic/Bermuda"),
|
||||
("Atlantic/Canary", "Atlantic/Canary"),
|
||||
("Atlantic/Cape_Verde", "Atlantic/Cape_Verde"),
|
||||
("Atlantic/Faeroe", "Atlantic/Faeroe"),
|
||||
("Atlantic/Faroe", "Atlantic/Faroe"),
|
||||
("Atlantic/Jan_Mayen", "Atlantic/Jan_Mayen"),
|
||||
("Atlantic/Madeira", "Atlantic/Madeira"),
|
||||
("Atlantic/Reykjavik", "Atlantic/Reykjavik"),
|
||||
("Atlantic/South_Georgia", "Atlantic/South_Georgia"),
|
||||
("Atlantic/St_Helena", "Atlantic/St_Helena"),
|
||||
("Atlantic/Stanley", "Atlantic/Stanley"),
|
||||
("Australia/ACT", "Australia/ACT"),
|
||||
("Australia/Adelaide", "Australia/Adelaide"),
|
||||
("Australia/Brisbane", "Australia/Brisbane"),
|
||||
("Australia/Broken_Hill", "Australia/Broken_Hill"),
|
||||
("Australia/Canberra", "Australia/Canberra"),
|
||||
("Australia/Currie", "Australia/Currie"),
|
||||
("Australia/Darwin", "Australia/Darwin"),
|
||||
("Australia/Eucla", "Australia/Eucla"),
|
||||
("Australia/Hobart", "Australia/Hobart"),
|
||||
("Australia/LHI", "Australia/LHI"),
|
||||
("Australia/Lindeman", "Australia/Lindeman"),
|
||||
("Australia/Lord_Howe", "Australia/Lord_Howe"),
|
||||
("Australia/Melbourne", "Australia/Melbourne"),
|
||||
("Australia/NSW", "Australia/NSW"),
|
||||
("Australia/North", "Australia/North"),
|
||||
("Australia/Perth", "Australia/Perth"),
|
||||
("Australia/Queensland", "Australia/Queensland"),
|
||||
("Australia/South", "Australia/South"),
|
||||
("Australia/Sydney", "Australia/Sydney"),
|
||||
("Australia/Tasmania", "Australia/Tasmania"),
|
||||
("Australia/Victoria", "Australia/Victoria"),
|
||||
("Australia/West", "Australia/West"),
|
||||
("Australia/Yancowinna", "Australia/Yancowinna"),
|
||||
("Brazil/Acre", "Brazil/Acre"),
|
||||
("Brazil/DeNoronha", "Brazil/DeNoronha"),
|
||||
("Brazil/East", "Brazil/East"),
|
||||
("Brazil/West", "Brazil/West"),
|
||||
("CET", "CET"),
|
||||
("CST6CDT", "CST6CDT"),
|
||||
("Canada/Atlantic", "Canada/Atlantic"),
|
||||
("Canada/Central", "Canada/Central"),
|
||||
("Canada/Eastern", "Canada/Eastern"),
|
||||
("Canada/Mountain", "Canada/Mountain"),
|
||||
("Canada/Newfoundland", "Canada/Newfoundland"),
|
||||
("Canada/Pacific", "Canada/Pacific"),
|
||||
("Canada/Saskatchewan", "Canada/Saskatchewan"),
|
||||
("Canada/Yukon", "Canada/Yukon"),
|
||||
("Chile/Continental", "Chile/Continental"),
|
||||
("Chile/EasterIsland", "Chile/EasterIsland"),
|
||||
("Cuba", "Cuba"),
|
||||
("EET", "EET"),
|
||||
("EST", "EST"),
|
||||
("EST5EDT", "EST5EDT"),
|
||||
("Egypt", "Egypt"),
|
||||
("Eire", "Eire"),
|
||||
("Etc/GMT", "Etc/GMT"),
|
||||
("Etc/GMT+0", "Etc/GMT+0"),
|
||||
("Etc/GMT+1", "Etc/GMT+1"),
|
||||
("Etc/GMT+10", "Etc/GMT+10"),
|
||||
("Etc/GMT+11", "Etc/GMT+11"),
|
||||
("Etc/GMT+12", "Etc/GMT+12"),
|
||||
("Etc/GMT+2", "Etc/GMT+2"),
|
||||
("Etc/GMT+3", "Etc/GMT+3"),
|
||||
("Etc/GMT+4", "Etc/GMT+4"),
|
||||
("Etc/GMT+5", "Etc/GMT+5"),
|
||||
("Etc/GMT+6", "Etc/GMT+6"),
|
||||
("Etc/GMT+7", "Etc/GMT+7"),
|
||||
("Etc/GMT+8", "Etc/GMT+8"),
|
||||
("Etc/GMT+9", "Etc/GMT+9"),
|
||||
("Etc/GMT-0", "Etc/GMT-0"),
|
||||
("Etc/GMT-1", "Etc/GMT-1"),
|
||||
("Etc/GMT-10", "Etc/GMT-10"),
|
||||
("Etc/GMT-11", "Etc/GMT-11"),
|
||||
("Etc/GMT-12", "Etc/GMT-12"),
|
||||
("Etc/GMT-13", "Etc/GMT-13"),
|
||||
("Etc/GMT-14", "Etc/GMT-14"),
|
||||
("Etc/GMT-2", "Etc/GMT-2"),
|
||||
("Etc/GMT-3", "Etc/GMT-3"),
|
||||
("Etc/GMT-4", "Etc/GMT-4"),
|
||||
("Etc/GMT-5", "Etc/GMT-5"),
|
||||
("Etc/GMT-6", "Etc/GMT-6"),
|
||||
("Etc/GMT-7", "Etc/GMT-7"),
|
||||
("Etc/GMT-8", "Etc/GMT-8"),
|
||||
("Etc/GMT-9", "Etc/GMT-9"),
|
||||
("Etc/GMT0", "Etc/GMT0"),
|
||||
("Etc/Greenwich", "Etc/Greenwich"),
|
||||
("Etc/UCT", "Etc/UCT"),
|
||||
("Etc/UTC", "Etc/UTC"),
|
||||
("Etc/Universal", "Etc/Universal"),
|
||||
("Etc/Zulu", "Etc/Zulu"),
|
||||
("Europe/Amsterdam", "Europe/Amsterdam"),
|
||||
("Europe/Andorra", "Europe/Andorra"),
|
||||
("Europe/Astrakhan", "Europe/Astrakhan"),
|
||||
("Europe/Athens", "Europe/Athens"),
|
||||
("Europe/Belfast", "Europe/Belfast"),
|
||||
("Europe/Belgrade", "Europe/Belgrade"),
|
||||
("Europe/Berlin", "Europe/Berlin"),
|
||||
("Europe/Bratislava", "Europe/Bratislava"),
|
||||
("Europe/Brussels", "Europe/Brussels"),
|
||||
("Europe/Bucharest", "Europe/Bucharest"),
|
||||
("Europe/Budapest", "Europe/Budapest"),
|
||||
("Europe/Busingen", "Europe/Busingen"),
|
||||
("Europe/Chisinau", "Europe/Chisinau"),
|
||||
("Europe/Copenhagen", "Europe/Copenhagen"),
|
||||
("Europe/Dublin", "Europe/Dublin"),
|
||||
("Europe/Gibraltar", "Europe/Gibraltar"),
|
||||
("Europe/Guernsey", "Europe/Guernsey"),
|
||||
("Europe/Helsinki", "Europe/Helsinki"),
|
||||
("Europe/Isle_of_Man", "Europe/Isle_of_Man"),
|
||||
("Europe/Istanbul", "Europe/Istanbul"),
|
||||
("Europe/Jersey", "Europe/Jersey"),
|
||||
("Europe/Kaliningrad", "Europe/Kaliningrad"),
|
||||
("Europe/Kiev", "Europe/Kiev"),
|
||||
("Europe/Kirov", "Europe/Kirov"),
|
||||
("Europe/Kyiv", "Europe/Kyiv"),
|
||||
("Europe/Lisbon", "Europe/Lisbon"),
|
||||
("Europe/Ljubljana", "Europe/Ljubljana"),
|
||||
("Europe/London", "Europe/London"),
|
||||
("Europe/Luxembourg", "Europe/Luxembourg"),
|
||||
("Europe/Madrid", "Europe/Madrid"),
|
||||
("Europe/Malta", "Europe/Malta"),
|
||||
("Europe/Mariehamn", "Europe/Mariehamn"),
|
||||
("Europe/Minsk", "Europe/Minsk"),
|
||||
("Europe/Monaco", "Europe/Monaco"),
|
||||
("Europe/Moscow", "Europe/Moscow"),
|
||||
("Europe/Nicosia", "Europe/Nicosia"),
|
||||
("Europe/Oslo", "Europe/Oslo"),
|
||||
("Europe/Paris", "Europe/Paris"),
|
||||
("Europe/Podgorica", "Europe/Podgorica"),
|
||||
("Europe/Prague", "Europe/Prague"),
|
||||
("Europe/Riga", "Europe/Riga"),
|
||||
("Europe/Rome", "Europe/Rome"),
|
||||
("Europe/Samara", "Europe/Samara"),
|
||||
("Europe/San_Marino", "Europe/San_Marino"),
|
||||
("Europe/Sarajevo", "Europe/Sarajevo"),
|
||||
("Europe/Saratov", "Europe/Saratov"),
|
||||
("Europe/Simferopol", "Europe/Simferopol"),
|
||||
("Europe/Skopje", "Europe/Skopje"),
|
||||
("Europe/Sofia", "Europe/Sofia"),
|
||||
("Europe/Stockholm", "Europe/Stockholm"),
|
||||
("Europe/Tallinn", "Europe/Tallinn"),
|
||||
("Europe/Tirane", "Europe/Tirane"),
|
||||
("Europe/Tiraspol", "Europe/Tiraspol"),
|
||||
("Europe/Ulyanovsk", "Europe/Ulyanovsk"),
|
||||
("Europe/Uzhgorod", "Europe/Uzhgorod"),
|
||||
("Europe/Vaduz", "Europe/Vaduz"),
|
||||
("Europe/Vatican", "Europe/Vatican"),
|
||||
("Europe/Vienna", "Europe/Vienna"),
|
||||
("Europe/Vilnius", "Europe/Vilnius"),
|
||||
("Europe/Volgograd", "Europe/Volgograd"),
|
||||
("Europe/Warsaw", "Europe/Warsaw"),
|
||||
("Europe/Zagreb", "Europe/Zagreb"),
|
||||
("Europe/Zaporozhye", "Europe/Zaporozhye"),
|
||||
("Europe/Zurich", "Europe/Zurich"),
|
||||
("GB", "GB"),
|
||||
("GB-Eire", "GB-Eire"),
|
||||
("GMT", "GMT"),
|
||||
("GMT+0", "GMT+0"),
|
||||
("GMT-0", "GMT-0"),
|
||||
("GMT0", "GMT0"),
|
||||
("Greenwich", "Greenwich"),
|
||||
("HST", "HST"),
|
||||
("Hongkong", "Hongkong"),
|
||||
("Iceland", "Iceland"),
|
||||
("Indian/Antananarivo", "Indian/Antananarivo"),
|
||||
("Indian/Chagos", "Indian/Chagos"),
|
||||
("Indian/Christmas", "Indian/Christmas"),
|
||||
("Indian/Cocos", "Indian/Cocos"),
|
||||
("Indian/Comoro", "Indian/Comoro"),
|
||||
("Indian/Kerguelen", "Indian/Kerguelen"),
|
||||
("Indian/Mahe", "Indian/Mahe"),
|
||||
("Indian/Maldives", "Indian/Maldives"),
|
||||
("Indian/Mauritius", "Indian/Mauritius"),
|
||||
("Indian/Mayotte", "Indian/Mayotte"),
|
||||
("Indian/Reunion", "Indian/Reunion"),
|
||||
("Iran", "Iran"),
|
||||
("Israel", "Israel"),
|
||||
("Jamaica", "Jamaica"),
|
||||
("Japan", "Japan"),
|
||||
("Kwajalein", "Kwajalein"),
|
||||
("Libya", "Libya"),
|
||||
("MET", "MET"),
|
||||
("MST", "MST"),
|
||||
("MST7MDT", "MST7MDT"),
|
||||
("Mexico/BajaNorte", "Mexico/BajaNorte"),
|
||||
("Mexico/BajaSur", "Mexico/BajaSur"),
|
||||
("Mexico/General", "Mexico/General"),
|
||||
("NZ", "NZ"),
|
||||
("NZ-CHAT", "NZ-CHAT"),
|
||||
("Navajo", "Navajo"),
|
||||
("PRC", "PRC"),
|
||||
("PST8PDT", "PST8PDT"),
|
||||
("Pacific/Apia", "Pacific/Apia"),
|
||||
("Pacific/Auckland", "Pacific/Auckland"),
|
||||
("Pacific/Bougainville", "Pacific/Bougainville"),
|
||||
("Pacific/Chatham", "Pacific/Chatham"),
|
||||
("Pacific/Chuuk", "Pacific/Chuuk"),
|
||||
("Pacific/Easter", "Pacific/Easter"),
|
||||
("Pacific/Efate", "Pacific/Efate"),
|
||||
("Pacific/Enderbury", "Pacific/Enderbury"),
|
||||
("Pacific/Fakaofo", "Pacific/Fakaofo"),
|
||||
("Pacific/Fiji", "Pacific/Fiji"),
|
||||
("Pacific/Funafuti", "Pacific/Funafuti"),
|
||||
("Pacific/Galapagos", "Pacific/Galapagos"),
|
||||
("Pacific/Gambier", "Pacific/Gambier"),
|
||||
("Pacific/Guadalcanal", "Pacific/Guadalcanal"),
|
||||
("Pacific/Guam", "Pacific/Guam"),
|
||||
("Pacific/Honolulu", "Pacific/Honolulu"),
|
||||
("Pacific/Johnston", "Pacific/Johnston"),
|
||||
("Pacific/Kanton", "Pacific/Kanton"),
|
||||
("Pacific/Kiritimati", "Pacific/Kiritimati"),
|
||||
("Pacific/Kosrae", "Pacific/Kosrae"),
|
||||
("Pacific/Kwajalein", "Pacific/Kwajalein"),
|
||||
("Pacific/Majuro", "Pacific/Majuro"),
|
||||
("Pacific/Marquesas", "Pacific/Marquesas"),
|
||||
("Pacific/Midway", "Pacific/Midway"),
|
||||
("Pacific/Nauru", "Pacific/Nauru"),
|
||||
("Pacific/Niue", "Pacific/Niue"),
|
||||
("Pacific/Norfolk", "Pacific/Norfolk"),
|
||||
("Pacific/Noumea", "Pacific/Noumea"),
|
||||
("Pacific/Pago_Pago", "Pacific/Pago_Pago"),
|
||||
("Pacific/Palau", "Pacific/Palau"),
|
||||
("Pacific/Pitcairn", "Pacific/Pitcairn"),
|
||||
("Pacific/Pohnpei", "Pacific/Pohnpei"),
|
||||
("Pacific/Ponape", "Pacific/Ponape"),
|
||||
("Pacific/Port_Moresby", "Pacific/Port_Moresby"),
|
||||
("Pacific/Rarotonga", "Pacific/Rarotonga"),
|
||||
("Pacific/Saipan", "Pacific/Saipan"),
|
||||
("Pacific/Samoa", "Pacific/Samoa"),
|
||||
("Pacific/Tahiti", "Pacific/Tahiti"),
|
||||
("Pacific/Tarawa", "Pacific/Tarawa"),
|
||||
("Pacific/Tongatapu", "Pacific/Tongatapu"),
|
||||
("Pacific/Truk", "Pacific/Truk"),
|
||||
("Pacific/Wake", "Pacific/Wake"),
|
||||
("Pacific/Wallis", "Pacific/Wallis"),
|
||||
("Pacific/Yap", "Pacific/Yap"),
|
||||
("Poland", "Poland"),
|
||||
("Portugal", "Portugal"),
|
||||
("ROC", "ROC"),
|
||||
("ROK", "ROK"),
|
||||
("Singapore", "Singapore"),
|
||||
("Turkey", "Turkey"),
|
||||
("UCT", "UCT"),
|
||||
("US/Alaska", "US/Alaska"),
|
||||
("US/Aleutian", "US/Aleutian"),
|
||||
("US/Arizona", "US/Arizona"),
|
||||
("US/Central", "US/Central"),
|
||||
("US/East-Indiana", "US/East-Indiana"),
|
||||
("US/Eastern", "US/Eastern"),
|
||||
("US/Hawaii", "US/Hawaii"),
|
||||
("US/Indiana-Starke", "US/Indiana-Starke"),
|
||||
("US/Michigan", "US/Michigan"),
|
||||
("US/Mountain", "US/Mountain"),
|
||||
("US/Pacific", "US/Pacific"),
|
||||
("US/Samoa", "US/Samoa"),
|
||||
("UTC", "UTC"),
|
||||
("Universal", "Universal"),
|
||||
("W-SU", "W-SU"),
|
||||
("WET", "WET"),
|
||||
("Zulu", "Zulu"),
|
||||
],
|
||||
default="UTC",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
]
|
13
bookwyrm/migrations/0171_merge_20221219_2020.py
Normal file
13
bookwyrm/migrations/0171_merge_20221219_2020.py
Normal file
|
@ -0,0 +1,13 @@
|
|||
# Generated by Django 3.2.16 on 2022-12-19 20:20
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0167_sitesettings_import_size_limit"),
|
||||
("bookwyrm", "0170_merge_0168_auto_20221205_2331_0169_auto_20221206_0902"),
|
||||
]
|
||||
|
||||
operations = []
|
42
bookwyrm/migrations/0172_alter_user_preferred_language.py
Normal file
42
bookwyrm/migrations/0172_alter_user_preferred_language.py
Normal file
|
@ -0,0 +1,42 @@
|
|||
# Generated by Django 3.2.16 on 2022-12-21 18:06
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0171_alter_user_preferred_timezone"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="user",
|
||||
name="preferred_language",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
choices=[
|
||||
("en-us", "English"),
|
||||
("ca-es", "Català (Catalan)"),
|
||||
("de-de", "Deutsch (German)"),
|
||||
("es-es", "Español (Spanish)"),
|
||||
("eu-es", "Euskara (Basque)"),
|
||||
("gl-es", "Galego (Galician)"),
|
||||
("it-it", "Italiano (Italian)"),
|
||||
("fi-fi", "Suomi (Finnish)"),
|
||||
("fr-fr", "Français (French)"),
|
||||
("lt-lt", "Lietuvių (Lithuanian)"),
|
||||
("no-no", "Norsk (Norwegian)"),
|
||||
("pl-pl", "Polski (Polish)"),
|
||||
("pt-br", "Português do Brasil (Brazilian Portuguese)"),
|
||||
("pt-pt", "Português Europeu (European Portuguese)"),
|
||||
("ro-ro", "Română (Romanian)"),
|
||||
("sv-se", "Svenska (Swedish)"),
|
||||
("zh-hans", "简体中文 (Simplified Chinese)"),
|
||||
("zh-hant", "繁體中文 (Traditional Chinese)"),
|
||||
],
|
||||
max_length=255,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
]
|
21
bookwyrm/migrations/0173_author_website.py
Normal file
21
bookwyrm/migrations/0173_author_website.py
Normal file
|
@ -0,0 +1,21 @@
|
|||
# Generated by Django 3.2.16 on 2023-01-15 08:38
|
||||
|
||||
import bookwyrm.models.fields
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0172_alter_user_preferred_language"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="author",
|
||||
name="website",
|
||||
field=bookwyrm.models.fields.CharField(
|
||||
blank=True, max_length=255, null=True
|
||||
),
|
||||
),
|
||||
]
|
34
bookwyrm/migrations/0173_default_user_auth_group_setting.py
Normal file
34
bookwyrm/migrations/0173_default_user_auth_group_setting.py
Normal file
|
@ -0,0 +1,34 @@
|
|||
# Generated by Django 3.2.16 on 2022-12-27 21:34
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
def backfill_sitesettings(apps, schema_editor):
|
||||
db_alias = schema_editor.connection.alias
|
||||
group_model = apps.get_model("auth", "Group")
|
||||
editor_group = group_model.objects.using(db_alias).filter(name="editor").first()
|
||||
|
||||
sitesettings_model = apps.get_model("bookwyrm", "SiteSettings")
|
||||
sitesettings_model.objects.update(default_user_auth_group=editor_group)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0175_merge_0173_author_website_0174_merge_20230111_1523"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="sitesettings",
|
||||
name="default_user_auth_group",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.RESTRICT,
|
||||
to="auth.group",
|
||||
),
|
||||
),
|
||||
migrations.RunPython(backfill_sitesettings, migrations.RunPython.noop),
|
||||
]
|
13
bookwyrm/migrations/0173_merge_20230102_1444.py
Normal file
13
bookwyrm/migrations/0173_merge_20230102_1444.py
Normal file
|
@ -0,0 +1,13 @@
|
|||
# Generated by Django 3.2.16 on 2023-01-02 14:44
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0171_merge_20221219_2020"),
|
||||
("bookwyrm", "0172_alter_user_preferred_language"),
|
||||
]
|
||||
|
||||
operations = []
|
35
bookwyrm/migrations/0174_auto_20230130_1240.py
Normal file
35
bookwyrm/migrations/0174_auto_20230130_1240.py
Normal file
|
@ -0,0 +1,35 @@
|
|||
# Generated by Django 3.2.16 on 2023-01-30 12:40
|
||||
|
||||
import django.core.validators
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("auth", "0012_alter_user_first_name_max_length"),
|
||||
("bookwyrm", "0173_default_user_auth_group_setting"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="quotation",
|
||||
name="endposition",
|
||||
field=models.IntegerField(
|
||||
blank=True,
|
||||
null=True,
|
||||
validators=[django.core.validators.MinValueValidator(0)],
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="sitesettings",
|
||||
name="default_user_auth_group",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.PROTECT,
|
||||
to="auth.group",
|
||||
),
|
||||
),
|
||||
]
|
46
bookwyrm/migrations/0174_auto_20230222_1742.py
Normal file
46
bookwyrm/migrations/0174_auto_20230222_1742.py
Normal file
|
@ -0,0 +1,46 @@
|
|||
# Generated by Django 3.2.18 on 2023-02-22 17:42
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0174_auto_20230130_1240"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="notification",
|
||||
name="related_link_domains",
|
||||
field=models.ManyToManyField(to="bookwyrm.LinkDomain"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="notification",
|
||||
name="notification_type",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("FAVORITE", "Favorite"),
|
||||
("REPLY", "Reply"),
|
||||
("MENTION", "Mention"),
|
||||
("TAG", "Tag"),
|
||||
("FOLLOW", "Follow"),
|
||||
("FOLLOW_REQUEST", "Follow Request"),
|
||||
("BOOST", "Boost"),
|
||||
("IMPORT", "Import"),
|
||||
("ADD", "Add"),
|
||||
("REPORT", "Report"),
|
||||
("LINK_DOMAIN", "Link Domain"),
|
||||
("INVITE", "Invite"),
|
||||
("ACCEPT", "Accept"),
|
||||
("JOIN", "Join"),
|
||||
("LEAVE", "Leave"),
|
||||
("REMOVE", "Remove"),
|
||||
("GROUP_PRIVACY", "Group Privacy"),
|
||||
("GROUP_NAME", "Group Name"),
|
||||
("GROUP_DESCRIPTION", "Group Description"),
|
||||
],
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
]
|
12
bookwyrm/migrations/0174_merge_20230111_1523.py
Normal file
12
bookwyrm/migrations/0174_merge_20230111_1523.py
Normal file
|
@ -0,0 +1,12 @@
|
|||
# Generated by Django 3.2.16 on 2023-01-11 15:23
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0173_merge_20230102_1444"),
|
||||
]
|
||||
|
||||
operations = []
|
|
@ -0,0 +1,13 @@
|
|||
# Generated by Django 3.2.16 on 2023-01-19 20:17
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0173_author_website"),
|
||||
("bookwyrm", "0174_merge_20230111_1523"),
|
||||
]
|
||||
|
||||
operations = []
|
53
bookwyrm/migrations/0176_hashtag_support.py
Normal file
53
bookwyrm/migrations/0176_hashtag_support.py
Normal file
|
@ -0,0 +1,53 @@
|
|||
# Generated by Django 3.2.16 on 2022-12-17 19:28
|
||||
|
||||
import bookwyrm.models.fields
|
||||
import django.contrib.postgres.fields.citext
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0174_auto_20230130_1240"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="Hashtag",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("created_date", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_date", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"remote_id",
|
||||
bookwyrm.models.fields.RemoteIdField(
|
||||
max_length=255,
|
||||
null=True,
|
||||
validators=[bookwyrm.models.fields.validate_remote_id],
|
||||
),
|
||||
),
|
||||
(
|
||||
"name",
|
||||
django.contrib.postgres.fields.citext.CICharField(max_length=256),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="status",
|
||||
name="mention_hashtags",
|
||||
field=bookwyrm.models.fields.TagField(
|
||||
related_name="mention_hashtag", to="bookwyrm.Hashtag"
|
||||
),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,13 @@
|
|||
# Generated by Django 3.2.18 on 2023-03-12 23:41
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0174_auto_20230222_1742"),
|
||||
("bookwyrm", "0176_hashtag_support"),
|
||||
]
|
||||
|
||||
operations = []
|
61
bookwyrm/migrations/0178_auto_20230328_2132.py
Normal file
61
bookwyrm/migrations/0178_auto_20230328_2132.py
Normal file
|
@ -0,0 +1,61 @@
|
|||
# Generated by Django 3.2.18 on 2023-03-28 21:32
|
||||
|
||||
import bookwyrm.models.fields
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("auth", "0012_alter_user_first_name_max_length"),
|
||||
("bookwyrm", "0177_merge_0174_auto_20230222_1742_0176_hashtag_support"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="hashtag",
|
||||
name="name",
|
||||
field=bookwyrm.models.fields.CICharField(max_length=256),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="sitesettings",
|
||||
name="default_user_auth_group",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.RESTRICT,
|
||||
to="auth.group",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="user",
|
||||
name="preferred_language",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
choices=[
|
||||
("en-us", "English"),
|
||||
("ca-es", "Català (Catalan)"),
|
||||
("de-de", "Deutsch (German)"),
|
||||
("eo-uy", "Esperanto (Esperanto)"),
|
||||
("es-es", "Español (Spanish)"),
|
||||
("eu-es", "Euskara (Basque)"),
|
||||
("gl-es", "Galego (Galician)"),
|
||||
("it-it", "Italiano (Italian)"),
|
||||
("fi-fi", "Suomi (Finnish)"),
|
||||
("fr-fr", "Français (French)"),
|
||||
("lt-lt", "Lietuvių (Lithuanian)"),
|
||||
("no-no", "Norsk (Norwegian)"),
|
||||
("pl-pl", "Polski (Polish)"),
|
||||
("pt-br", "Português do Brasil (Brazilian Portuguese)"),
|
||||
("pt-pt", "Português Europeu (European Portuguese)"),
|
||||
("ro-ro", "Română (Romanian)"),
|
||||
("sv-se", "Svenska (Swedish)"),
|
||||
("zh-hans", "简体中文 (Simplified Chinese)"),
|
||||
("zh-hant", "繁體中文 (Traditional Chinese)"),
|
||||
],
|
||||
max_length=255,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
]
|
|
@ -17,7 +17,8 @@ from .attachment import Image
|
|||
from .favorite import Favorite
|
||||
from .readthrough import ReadThrough, ProgressUpdate, ProgressMode
|
||||
|
||||
from .user import User, KeyPair, AnnualGoal
|
||||
from .user import User, KeyPair
|
||||
from .annual_goal import AnnualGoal
|
||||
from .relationship import UserFollows, UserFollowRequest, UserBlocks
|
||||
from .report import Report, ReportComment
|
||||
from .federated_server import FederatedServer
|
||||
|
@ -33,6 +34,8 @@ from .antispam import EmailBlocklist, IPBlocklist, AutoMod, automod_task
|
|||
|
||||
from .notification import Notification
|
||||
|
||||
from .hashtag import Hashtag
|
||||
|
||||
cls_members = inspect.getmembers(sys.modules[__name__], inspect.isclass)
|
||||
activity_models = {
|
||||
c[1].activity_serializer.__name__: c[1]
|
||||
|
|
|
@ -1,14 +1,15 @@
|
|||
""" activitypub model functionality """
|
||||
import asyncio
|
||||
from base64 import b64encode
|
||||
from collections import namedtuple
|
||||
from functools import reduce
|
||||
import json
|
||||
import operator
|
||||
import logging
|
||||
from typing import List
|
||||
from uuid import uuid4
|
||||
import requests
|
||||
from requests.exceptions import RequestException
|
||||
|
||||
import aiohttp
|
||||
from Crypto.PublicKey import RSA
|
||||
from Crypto.Signature import pkcs1_15
|
||||
from Crypto.Hash import SHA256
|
||||
|
@ -20,11 +21,11 @@ from django.utils.http import http_date
|
|||
from bookwyrm import activitypub
|
||||
from bookwyrm.settings import USER_AGENT, PAGE_LENGTH
|
||||
from bookwyrm.signatures import make_signature, make_digest
|
||||
from bookwyrm.tasks import app, MEDIUM
|
||||
from bookwyrm.tasks import app, MEDIUM, BROADCAST
|
||||
from bookwyrm.models.fields import ImageField, ManyToManyField
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
# I tried to separate these classes into mutliple files but I kept getting
|
||||
# I tried to separate these classes into multiple files but I kept getting
|
||||
# circular import errors so I gave up. I'm sure it could be done though!
|
||||
|
||||
PropertyField = namedtuple("PropertyField", ("set_activity_from_field"))
|
||||
|
@ -90,7 +91,7 @@ class ActivitypubMixin:
|
|||
|
||||
@classmethod
|
||||
def find_existing(cls, data):
|
||||
"""compare data to fields that can be used for deduplation.
|
||||
"""compare data to fields that can be used for deduplication.
|
||||
This always includes remote_id, but can also be unique identifiers
|
||||
like an isbn for an edition"""
|
||||
filters = []
|
||||
|
@ -125,7 +126,7 @@ class ActivitypubMixin:
|
|||
# there OUGHT to be only one match
|
||||
return match.first()
|
||||
|
||||
def broadcast(self, activity, sender, software=None, queue=MEDIUM):
|
||||
def broadcast(self, activity, sender, software=None, queue=BROADCAST):
|
||||
"""send out an activity"""
|
||||
broadcast_task.apply_async(
|
||||
args=(
|
||||
|
@ -136,7 +137,7 @@ class ActivitypubMixin:
|
|||
queue=queue,
|
||||
)
|
||||
|
||||
def get_recipients(self, software=None):
|
||||
def get_recipients(self, software=None) -> List[str]:
|
||||
"""figure out which inbox urls to post to"""
|
||||
# first we have to figure out who should receive this activity
|
||||
privacy = self.privacy if hasattr(self, "privacy") else "public"
|
||||
|
@ -197,7 +198,7 @@ class ActivitypubMixin:
|
|||
class ObjectMixin(ActivitypubMixin):
|
||||
"""add this mixin for object models that are AP serializable"""
|
||||
|
||||
def save(self, *args, created=None, software=None, priority=MEDIUM, **kwargs):
|
||||
def save(self, *args, created=None, software=None, priority=BROADCAST, **kwargs):
|
||||
"""broadcast created/updated/deleted objects as appropriate"""
|
||||
broadcast = kwargs.get("broadcast", True)
|
||||
# this bonus kwarg would cause an error in the base save method
|
||||
|
@ -233,8 +234,8 @@ class ObjectMixin(ActivitypubMixin):
|
|||
activity = self.to_create_activity(user)
|
||||
self.broadcast(activity, user, software=software, queue=priority)
|
||||
except AttributeError:
|
||||
# janky as heck, this catches the mutliple inheritence chain
|
||||
# for boosts and ignores this auxilliary broadcast
|
||||
# janky as heck, this catches the multiple inheritance chain
|
||||
# for boosts and ignores this auxiliary broadcast
|
||||
return
|
||||
return
|
||||
|
||||
|
@ -310,7 +311,7 @@ class OrderedCollectionPageMixin(ObjectMixin):
|
|||
|
||||
@property
|
||||
def collection_remote_id(self):
|
||||
"""this can be overriden if there's a special remote id, ie outbox"""
|
||||
"""this can be overridden if there's a special remote id, ie outbox"""
|
||||
return self.remote_id
|
||||
|
||||
def to_ordered_collection(
|
||||
|
@ -338,7 +339,7 @@ class OrderedCollectionPageMixin(ObjectMixin):
|
|||
activity["id"] = remote_id
|
||||
|
||||
paginated = Paginator(queryset, PAGE_LENGTH)
|
||||
# add computed fields specific to orderd collections
|
||||
# add computed fields specific to ordered collections
|
||||
activity["totalItems"] = paginated.count
|
||||
activity["first"] = f"{remote_id}?page=1"
|
||||
activity["last"] = f"{remote_id}?page={paginated.num_pages}"
|
||||
|
@ -404,7 +405,7 @@ class CollectionItemMixin(ActivitypubMixin):
|
|||
# first off, we want to save normally no matter what
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
# list items can be updateda, normally you would only broadcast on created
|
||||
# list items can be updated, normally you would only broadcast on created
|
||||
if not broadcast or not self.user.local:
|
||||
return
|
||||
|
||||
|
@ -505,20 +506,32 @@ def unfurl_related_field(related_field, sort_field=None):
|
|||
return related_field.remote_id
|
||||
|
||||
|
||||
@app.task(queue=MEDIUM)
|
||||
def broadcast_task(sender_id, activity, recipients):
|
||||
@app.task(queue=BROADCAST)
|
||||
def broadcast_task(sender_id: int, activity: str, recipients: List[str]):
|
||||
"""the celery task for broadcast"""
|
||||
user_model = apps.get_model("bookwyrm.User", require_ready=True)
|
||||
sender = user_model.objects.get(id=sender_id)
|
||||
sender = user_model.objects.select_related("key_pair").get(id=sender_id)
|
||||
asyncio.run(async_broadcast(recipients, sender, activity))
|
||||
|
||||
|
||||
async def async_broadcast(recipients: List[str], sender, data: str):
|
||||
"""Send all the broadcasts simultaneously"""
|
||||
timeout = aiohttp.ClientTimeout(total=10)
|
||||
async with aiohttp.ClientSession(timeout=timeout) as session:
|
||||
tasks = []
|
||||
for recipient in recipients:
|
||||
try:
|
||||
sign_and_send(sender, activity, recipient)
|
||||
except RequestException:
|
||||
pass
|
||||
tasks.append(
|
||||
asyncio.ensure_future(sign_and_send(session, sender, data, recipient))
|
||||
)
|
||||
|
||||
results = await asyncio.gather(*tasks)
|
||||
return results
|
||||
|
||||
|
||||
def sign_and_send(sender, data, destination):
|
||||
"""crpyto whatever and http junk"""
|
||||
async def sign_and_send(
|
||||
session: aiohttp.ClientSession, sender, data: str, destination: str
|
||||
):
|
||||
"""Sign the messages and send them in an asynchronous bundle"""
|
||||
now = http_date()
|
||||
|
||||
if not sender.key_pair.private_key:
|
||||
|
@ -527,27 +540,32 @@ def sign_and_send(sender, data, destination):
|
|||
|
||||
digest = make_digest(data)
|
||||
|
||||
response = requests.post(
|
||||
destination,
|
||||
data=data,
|
||||
headers={
|
||||
headers = {
|
||||
"Date": now,
|
||||
"Digest": digest,
|
||||
"Signature": make_signature(sender, destination, now, digest),
|
||||
"Signature": make_signature("post", sender, destination, now, digest),
|
||||
"Content-Type": "application/activity+json; charset=utf-8",
|
||||
"User-Agent": USER_AGENT,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
try:
|
||||
async with session.post(destination, data=data, headers=headers) as response:
|
||||
if not response.ok:
|
||||
response.raise_for_status()
|
||||
logger.exception(
|
||||
"Failed to send broadcast to %s: %s", destination, response.reason
|
||||
)
|
||||
return response
|
||||
except asyncio.TimeoutError:
|
||||
logger.info("Connection timed out for url: %s", destination)
|
||||
except aiohttp.ClientError as err:
|
||||
logger.exception(err)
|
||||
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def to_ordered_collection_page(
|
||||
queryset, remote_id, id_only=False, page=1, pure=False, **kwargs
|
||||
):
|
||||
"""serialize and pagiante a queryset"""
|
||||
"""serialize and paginate a queryset"""
|
||||
paginated = Paginator(queryset, PAGE_LENGTH)
|
||||
|
||||
activity_page = paginated.get_page(page)
|
||||
|
|
67
bookwyrm/models/annual_goal.py
Normal file
67
bookwyrm/models/annual_goal.py
Normal file
|
@ -0,0 +1,67 @@
|
|||
""" How many books do you want to read this year """
|
||||
from django.core.validators import MinValueValidator
|
||||
from django.db import models
|
||||
from django.utils import timezone
|
||||
|
||||
from bookwyrm.models.status import Review
|
||||
from .base_model import BookWyrmModel
|
||||
from . import fields, Review
|
||||
|
||||
|
||||
def get_current_year():
|
||||
"""sets default year for annual goal to this year"""
|
||||
return timezone.now().year
|
||||
|
||||
|
||||
class AnnualGoal(BookWyrmModel):
|
||||
"""set a goal for how many books you read in a year"""
|
||||
|
||||
user = models.ForeignKey("User", on_delete=models.PROTECT)
|
||||
goal = models.IntegerField(validators=[MinValueValidator(1)])
|
||||
year = models.IntegerField(default=get_current_year)
|
||||
privacy = models.CharField(
|
||||
max_length=255, default="public", choices=fields.PrivacyLevels
|
||||
)
|
||||
|
||||
class Meta:
|
||||
"""uniqueness constraint"""
|
||||
|
||||
unique_together = ("user", "year")
|
||||
|
||||
def get_remote_id(self):
|
||||
"""put the year in the path"""
|
||||
return f"{self.user.remote_id}/goal/{self.year}"
|
||||
|
||||
@property
|
||||
def books(self):
|
||||
"""the books you've read this year"""
|
||||
return (
|
||||
self.user.readthrough_set.filter(
|
||||
finish_date__year__gte=self.year,
|
||||
finish_date__year__lt=self.year + 1,
|
||||
)
|
||||
.order_by("-finish_date")
|
||||
.all()
|
||||
)
|
||||
|
||||
@property
|
||||
def ratings(self):
|
||||
"""ratings for books read this year"""
|
||||
book_ids = [r.book.id for r in self.books]
|
||||
reviews = Review.objects.filter(
|
||||
user=self.user,
|
||||
book__in=book_ids,
|
||||
)
|
||||
return {r.book_id: r.rating for r in reviews}
|
||||
|
||||
@property
|
||||
def progress(self):
|
||||
"""how many books you've read this year"""
|
||||
count = self.user.readthrough_set.filter(
|
||||
finish_date__year__gte=self.year,
|
||||
finish_date__year__lt=self.year + 1,
|
||||
).count()
|
||||
return {
|
||||
"count": count,
|
||||
"percent": int(float(count / self.goal) * 100),
|
||||
}
|
|
@ -3,18 +3,33 @@ from functools import reduce
|
|||
import operator
|
||||
|
||||
from django.apps import apps
|
||||
from django.core.exceptions import PermissionDenied
|
||||
from django.db import models, transaction
|
||||
from django.db.models import Q
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from bookwyrm.tasks import app
|
||||
from bookwyrm.tasks import app, LOW
|
||||
from .base_model import BookWyrmModel
|
||||
from .user import User
|
||||
|
||||
|
||||
class EmailBlocklist(models.Model):
|
||||
class AdminModel(BookWyrmModel):
|
||||
"""Overrides the permissions methods"""
|
||||
|
||||
class Meta:
|
||||
"""this is just here to provide default fields for other models"""
|
||||
|
||||
abstract = True
|
||||
|
||||
def raise_not_editable(self, viewer):
|
||||
if viewer.has_perm("bookwyrm.moderate_user"):
|
||||
return
|
||||
raise PermissionDenied()
|
||||
|
||||
|
||||
class EmailBlocklist(AdminModel):
|
||||
"""blocked email addresses"""
|
||||
|
||||
created_date = models.DateTimeField(auto_now_add=True)
|
||||
domain = models.CharField(max_length=255, unique=True)
|
||||
is_active = models.BooleanField(default=True)
|
||||
|
||||
|
@ -29,10 +44,9 @@ class EmailBlocklist(models.Model):
|
|||
return User.objects.filter(email__endswith=f"@{self.domain}")
|
||||
|
||||
|
||||
class IPBlocklist(models.Model):
|
||||
class IPBlocklist(AdminModel):
|
||||
"""blocked ip addresses"""
|
||||
|
||||
created_date = models.DateTimeField(auto_now_add=True)
|
||||
address = models.CharField(max_length=255, unique=True)
|
||||
is_active = models.BooleanField(default=True)
|
||||
|
||||
|
@ -42,7 +56,7 @@ class IPBlocklist(models.Model):
|
|||
ordering = ("-created_date",)
|
||||
|
||||
|
||||
class AutoMod(models.Model):
|
||||
class AutoMod(AdminModel):
|
||||
"""rules to automatically flag suspicious activity"""
|
||||
|
||||
string_match = models.CharField(max_length=200, unique=True)
|
||||
|
@ -51,7 +65,7 @@ class AutoMod(models.Model):
|
|||
created_by = models.ForeignKey("User", on_delete=models.PROTECT)
|
||||
|
||||
|
||||
@app.task(queue="low_priority")
|
||||
@app.task(queue=LOW)
|
||||
def automod_task():
|
||||
"""Create reports"""
|
||||
if not AutoMod.objects.exists():
|
||||
|
@ -61,17 +75,14 @@ def automod_task():
|
|||
if not reports:
|
||||
return
|
||||
|
||||
admins = User.objects.filter(
|
||||
models.Q(user_permissions__name__in=["moderate_user", "moderate_post"])
|
||||
| models.Q(is_superuser=True)
|
||||
).all()
|
||||
admins = User.admins()
|
||||
notification_model = apps.get_model("bookwyrm", "Notification", require_ready=True)
|
||||
with transaction.atomic():
|
||||
for admin in admins:
|
||||
notification, _ = notification_model.objects.get_or_create(
|
||||
user=admin, notification_type=notification_model.REPORT, read=False
|
||||
)
|
||||
notification.related_repors.add(reports)
|
||||
notification.related_reports.set(reports)
|
||||
|
||||
|
||||
def automod_users(reporter):
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
""" database schema for info about authors """
|
||||
import re
|
||||
from django.contrib.postgres.indexes import GinIndex
|
||||
from django.core.cache import cache
|
||||
from django.core.cache.utils import make_template_fragment_key
|
||||
from django.db import models
|
||||
|
||||
from bookwyrm import activitypub
|
||||
|
@ -24,6 +22,13 @@ class Author(BookDataModel):
|
|||
gutenberg_id = fields.CharField(
|
||||
max_length=255, blank=True, null=True, deduplication_field=True
|
||||
)
|
||||
isfdb = fields.CharField(
|
||||
max_length=255, blank=True, null=True, deduplication_field=True
|
||||
)
|
||||
|
||||
website = fields.CharField(
|
||||
max_length=255, blank=True, null=True, deduplication_field=True
|
||||
)
|
||||
# idk probably other keys would be useful here?
|
||||
born = fields.DateTimeField(blank=True, null=True)
|
||||
died = fields.DateTimeField(blank=True, null=True)
|
||||
|
@ -34,14 +39,10 @@ class Author(BookDataModel):
|
|||
bio = fields.HtmlField(null=True, blank=True)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
"""clear related template caches"""
|
||||
# clear template caches
|
||||
if self.id:
|
||||
cache_keys = [
|
||||
make_template_fragment_key("titleby", [book])
|
||||
for book in self.book_set.values_list("id", flat=True)
|
||||
]
|
||||
cache.delete_many(cache_keys)
|
||||
"""normalize isni format"""
|
||||
if self.isni:
|
||||
self.isni = re.sub(r"\s", "", self.isni)
|
||||
|
||||
return super().save(*args, **kwargs)
|
||||
|
||||
@property
|
||||
|
@ -55,6 +56,11 @@ class Author(BookDataModel):
|
|||
"""generate the url from the openlibrary id"""
|
||||
return f"https://openlibrary.org/authors/{self.openlibrary_key}"
|
||||
|
||||
@property
|
||||
def isfdb_link(self):
|
||||
"""generate the url from the isni id"""
|
||||
return f"https://www.isfdb.org/cgi-bin/ea.cgi?{self.isfdb}"
|
||||
|
||||
def get_remote_id(self):
|
||||
"""editions and works both use "book" instead of model_name"""
|
||||
return f"https://{DOMAIN}/author/{self.id}"
|
||||
|
|
|
@ -17,6 +17,7 @@ from .fields import RemoteIdField
|
|||
DeactivationReason = [
|
||||
("pending", _("Pending")),
|
||||
("self_deletion", _("Self deletion")),
|
||||
("self_deactivation", _("Self deactivation")),
|
||||
("moderator_suspension", _("Moderator suspension")),
|
||||
("moderator_deletion", _("Moderator deletion")),
|
||||
("domain_block", _("Domain block")),
|
||||
|
|
|
@ -4,7 +4,6 @@ import re
|
|||
from django.contrib.postgres.search import SearchVectorField
|
||||
from django.contrib.postgres.indexes import GinIndex
|
||||
from django.core.cache import cache
|
||||
from django.core.cache.utils import make_template_fragment_key
|
||||
from django.db import models, transaction
|
||||
from django.db.models import Prefetch
|
||||
from django.dispatch import receiver
|
||||
|
@ -55,6 +54,12 @@ class BookDataModel(ObjectMixin, BookWyrmModel):
|
|||
asin = fields.CharField(
|
||||
max_length=255, blank=True, null=True, deduplication_field=True
|
||||
)
|
||||
aasin = fields.CharField(
|
||||
max_length=255, blank=True, null=True, deduplication_field=True
|
||||
)
|
||||
isfdb = fields.CharField(
|
||||
max_length=255, blank=True, null=True, deduplication_field=True
|
||||
)
|
||||
search_vector = SearchVectorField(null=True)
|
||||
|
||||
last_edited_by = fields.ForeignKey(
|
||||
|
@ -73,6 +78,11 @@ class BookDataModel(ObjectMixin, BookWyrmModel):
|
|||
"""generate the url from the inventaire id"""
|
||||
return f"https://inventaire.io/entity/{self.inventaire_id}"
|
||||
|
||||
@property
|
||||
def isfdb_link(self):
|
||||
"""generate the url from the isfdb id"""
|
||||
return f"https://www.isfdb.org/cgi-bin/title.cgi?{self.isfdb}"
|
||||
|
||||
class Meta:
|
||||
"""can't initialize this model, that wouldn't make sense"""
|
||||
|
||||
|
@ -199,10 +209,6 @@ class Book(BookDataModel):
|
|||
if not isinstance(self, Edition) and not isinstance(self, Work):
|
||||
raise ValueError("Books should be added as Editions or Works")
|
||||
|
||||
# clear template caches
|
||||
cache_key = make_template_fragment_key("titleby", [self.id])
|
||||
cache.delete(cache_key)
|
||||
|
||||
return super().save(*args, **kwargs)
|
||||
|
||||
def get_remote_id(self):
|
||||
|
@ -243,6 +249,10 @@ class Work(OrderedCollectionPageMixin, Book):
|
|||
"""in case the default edition is not set"""
|
||||
return self.editions.order_by("-edition_rank").first()
|
||||
|
||||
def author_edition(self, author):
|
||||
"""in case the default edition doesn't have the required author"""
|
||||
return self.editions.filter(authors=author).order_by("-edition_rank").first()
|
||||
|
||||
def to_edition_list(self, **kwargs):
|
||||
"""an ordered collection of editions"""
|
||||
return self.to_ordered_collection(
|
||||
|
@ -313,7 +323,7 @@ class Edition(Book):
|
|||
def get_rank(self):
|
||||
"""calculate how complete the data is on this edition"""
|
||||
rank = 0
|
||||
# big ups for havinga cover
|
||||
# big ups for having a cover
|
||||
rank += int(bool(self.cover)) * 3
|
||||
# is it in the instance's preferred language?
|
||||
rank += int(bool(DEFAULT_LANGUAGE in self.languages))
|
||||
|
|
|
@ -20,8 +20,9 @@ class Favorite(ActivityMixin, BookWyrmModel):
|
|||
|
||||
activity_serializer = activitypub.Like
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
@classmethod
|
||||
def ignore_activity(cls, activity):
|
||||
def ignore_activity(cls, activity, allow_external_connections=True):
|
||||
"""don't bother with incoming favs of unknown statuses"""
|
||||
return not Status.objects.filter(remote_id=activity.object).exists()
|
||||
|
||||
|
|
|
@ -7,12 +7,14 @@ from urllib.parse import urljoin
|
|||
import dateutil.parser
|
||||
from dateutil.parser import ParserError
|
||||
from django.contrib.postgres.fields import ArrayField as DjangoArrayField
|
||||
from django.contrib.postgres.fields import CICharField as DjangoCICharField
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db import models
|
||||
from django.forms import ClearableFileInput, ImageField as DjangoImageField
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.utils.encoding import filepath_to_uri
|
||||
from markdown import markdown
|
||||
|
||||
from bookwyrm import activitypub
|
||||
from bookwyrm.connectors import get_image
|
||||
|
@ -66,16 +68,20 @@ class ActivitypubFieldMixin:
|
|||
self.activitypub_field = activitypub_field
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def set_field_from_activity(self, instance, data, overwrite=True):
|
||||
"""helper function for assinging a value to the field. Returns if changed"""
|
||||
def set_field_from_activity(
|
||||
self, instance, data, overwrite=True, allow_external_connections=True
|
||||
):
|
||||
"""helper function for assigning a value to the field. Returns if changed"""
|
||||
try:
|
||||
value = getattr(data, self.get_activitypub_field())
|
||||
except AttributeError:
|
||||
# masssively hack-y workaround for boosts
|
||||
# massively hack-y workaround for boosts
|
||||
if self.get_activitypub_field() != "attributedTo":
|
||||
raise
|
||||
value = getattr(data, "actor")
|
||||
formatted = self.field_from_activity(value)
|
||||
formatted = self.field_from_activity(
|
||||
value, allow_external_connections=allow_external_connections
|
||||
)
|
||||
if formatted is None or formatted is MISSING or formatted == {}:
|
||||
return False
|
||||
|
||||
|
@ -115,7 +121,8 @@ class ActivitypubFieldMixin:
|
|||
return {self.activitypub_wrapper: value}
|
||||
return value
|
||||
|
||||
def field_from_activity(self, value):
|
||||
# pylint: disable=unused-argument
|
||||
def field_from_activity(self, value, allow_external_connections=True):
|
||||
"""formatter to convert activitypub into a model value"""
|
||||
if value and hasattr(self, "activitypub_wrapper"):
|
||||
value = value.get(self.activitypub_wrapper)
|
||||
|
@ -137,7 +144,7 @@ class ActivitypubRelatedFieldMixin(ActivitypubFieldMixin):
|
|||
self.load_remote = load_remote
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def field_from_activity(self, value):
|
||||
def field_from_activity(self, value, allow_external_connections=True):
|
||||
if not value:
|
||||
return None
|
||||
|
||||
|
@ -158,7 +165,11 @@ class ActivitypubRelatedFieldMixin(ActivitypubFieldMixin):
|
|||
if not self.load_remote:
|
||||
# only look in the local database
|
||||
return related_model.find_existing_by_remote_id(value)
|
||||
return activitypub.resolve_remote_id(value, model=related_model)
|
||||
return activitypub.resolve_remote_id(
|
||||
value,
|
||||
model=related_model,
|
||||
allow_external_connections=allow_external_connections,
|
||||
)
|
||||
|
||||
|
||||
class RemoteIdField(ActivitypubFieldMixin, models.CharField):
|
||||
|
@ -210,7 +221,7 @@ PrivacyLevels = [
|
|||
|
||||
|
||||
class PrivacyField(ActivitypubFieldMixin, models.CharField):
|
||||
"""this maps to two differente activitypub fields"""
|
||||
"""this maps to two different activitypub fields"""
|
||||
|
||||
public = "https://www.w3.org/ns/activitystreams#Public"
|
||||
|
||||
|
@ -218,7 +229,9 @@ class PrivacyField(ActivitypubFieldMixin, models.CharField):
|
|||
super().__init__(*args, max_length=255, choices=PrivacyLevels, default="public")
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
def set_field_from_activity(self, instance, data, overwrite=True):
|
||||
def set_field_from_activity(
|
||||
self, instance, data, overwrite=True, allow_external_connections=True
|
||||
):
|
||||
if not overwrite:
|
||||
return False
|
||||
|
||||
|
@ -233,7 +246,11 @@ class PrivacyField(ActivitypubFieldMixin, models.CharField):
|
|||
break
|
||||
if not user_field:
|
||||
raise ValidationError("No user field found for privacy", data)
|
||||
user = activitypub.resolve_remote_id(getattr(data, user_field), model="User")
|
||||
user = activitypub.resolve_remote_id(
|
||||
getattr(data, user_field),
|
||||
model="User",
|
||||
allow_external_connections=allow_external_connections,
|
||||
)
|
||||
|
||||
if to == [self.public]:
|
||||
setattr(instance, self.name, "public")
|
||||
|
@ -294,13 +311,17 @@ class ManyToManyField(ActivitypubFieldMixin, models.ManyToManyField):
|
|||
self.link_only = link_only
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def set_field_from_activity(self, instance, data, overwrite=True):
|
||||
def set_field_from_activity(
|
||||
self, instance, data, overwrite=True, allow_external_connections=True
|
||||
):
|
||||
"""helper function for assigning a value to the field"""
|
||||
if not overwrite and getattr(instance, self.name).exists():
|
||||
return False
|
||||
|
||||
value = getattr(data, self.get_activitypub_field())
|
||||
formatted = self.field_from_activity(value)
|
||||
formatted = self.field_from_activity(
|
||||
value, allow_external_connections=allow_external_connections
|
||||
)
|
||||
if formatted is None or formatted is MISSING:
|
||||
return False
|
||||
getattr(instance, self.name).set(formatted)
|
||||
|
@ -312,7 +333,7 @@ class ManyToManyField(ActivitypubFieldMixin, models.ManyToManyField):
|
|||
return f"{value.instance.remote_id}/{self.name}"
|
||||
return [i.remote_id for i in value.all()]
|
||||
|
||||
def field_from_activity(self, value):
|
||||
def field_from_activity(self, value, allow_external_connections=True):
|
||||
if value is None or value is MISSING:
|
||||
return None
|
||||
if not isinstance(value, list):
|
||||
|
@ -325,7 +346,11 @@ class ManyToManyField(ActivitypubFieldMixin, models.ManyToManyField):
|
|||
except ValidationError:
|
||||
continue
|
||||
items.append(
|
||||
activitypub.resolve_remote_id(remote_id, model=self.related_model)
|
||||
activitypub.resolve_remote_id(
|
||||
remote_id,
|
||||
model=self.related_model,
|
||||
allow_external_connections=allow_external_connections,
|
||||
)
|
||||
)
|
||||
return items
|
||||
|
||||
|
@ -352,7 +377,7 @@ class TagField(ManyToManyField):
|
|||
)
|
||||
return tags
|
||||
|
||||
def field_from_activity(self, value):
|
||||
def field_from_activity(self, value, allow_external_connections=True):
|
||||
if not isinstance(value, list):
|
||||
return None
|
||||
items = []
|
||||
|
@ -364,8 +389,21 @@ class TagField(ManyToManyField):
|
|||
if tag_type != self.related_model.activity_serializer.type:
|
||||
# tags can contain multiple types
|
||||
continue
|
||||
|
||||
if tag_type == "Hashtag":
|
||||
# we already have all data to create hashtags,
|
||||
# no need to fetch from remote
|
||||
item = self.related_model.activity_serializer(**link_json)
|
||||
hashtag = item.to_model(model=self.related_model, save=True)
|
||||
items.append(hashtag)
|
||||
else:
|
||||
# for other tag types we fetch them remotely
|
||||
items.append(
|
||||
activitypub.resolve_remote_id(link.href, model=self.related_model)
|
||||
activitypub.resolve_remote_id(
|
||||
link.href,
|
||||
model=self.related_model,
|
||||
allow_external_connections=allow_external_connections,
|
||||
)
|
||||
)
|
||||
return items
|
||||
|
||||
|
@ -389,11 +427,15 @@ class ImageField(ActivitypubFieldMixin, models.ImageField):
|
|||
self.alt_field = alt_field
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
# pylint: disable=arguments-differ,arguments-renamed
|
||||
def set_field_from_activity(self, instance, data, save=True, overwrite=True):
|
||||
"""helper function for assinging a value to the field"""
|
||||
# pylint: disable=arguments-differ,arguments-renamed,too-many-arguments
|
||||
def set_field_from_activity(
|
||||
self, instance, data, save=True, overwrite=True, allow_external_connections=True
|
||||
):
|
||||
"""helper function for assigning a value to the field"""
|
||||
value = getattr(data, self.get_activitypub_field())
|
||||
formatted = self.field_from_activity(value)
|
||||
formatted = self.field_from_activity(
|
||||
value, allow_external_connections=allow_external_connections
|
||||
)
|
||||
if formatted is None or formatted is MISSING:
|
||||
return False
|
||||
|
||||
|
@ -425,7 +467,7 @@ class ImageField(ActivitypubFieldMixin, models.ImageField):
|
|||
|
||||
return activitypub.Document(url=url, name=alt)
|
||||
|
||||
def field_from_activity(self, value):
|
||||
def field_from_activity(self, value, allow_external_connections=True):
|
||||
image_slug = value
|
||||
# when it's an inline image (User avatar/icon, Book cover), it's a json
|
||||
# blob, but when it's an attached image, it's just a url
|
||||
|
@ -480,7 +522,7 @@ class DateTimeField(ActivitypubFieldMixin, models.DateTimeField):
|
|||
return None
|
||||
return value.isoformat()
|
||||
|
||||
def field_from_activity(self, value):
|
||||
def field_from_activity(self, value, allow_external_connections=True):
|
||||
try:
|
||||
date_value = dateutil.parser.parse(value)
|
||||
try:
|
||||
|
@ -494,11 +536,14 @@ class DateTimeField(ActivitypubFieldMixin, models.DateTimeField):
|
|||
class HtmlField(ActivitypubFieldMixin, models.TextField):
|
||||
"""a text field for storing html"""
|
||||
|
||||
def field_from_activity(self, value):
|
||||
def field_from_activity(self, value, allow_external_connections=True):
|
||||
if not value or value == MISSING:
|
||||
return None
|
||||
return clean(value)
|
||||
|
||||
def field_to_activity(self, value):
|
||||
return markdown(value) if value else value
|
||||
|
||||
|
||||
class ArrayField(ActivitypubFieldMixin, DjangoArrayField):
|
||||
"""activitypub-aware array field"""
|
||||
|
@ -511,6 +556,10 @@ class CharField(ActivitypubFieldMixin, models.CharField):
|
|||
"""activitypub-aware char field"""
|
||||
|
||||
|
||||
class CICharField(ActivitypubFieldMixin, DjangoCICharField):
|
||||
"""activitypub-aware cichar field"""
|
||||
|
||||
|
||||
class URLField(ActivitypubFieldMixin, models.URLField):
|
||||
"""activitypub-aware url field"""
|
||||
|
||||
|
|
23
bookwyrm/models/hashtag.py
Normal file
23
bookwyrm/models/hashtag.py
Normal file
|
@ -0,0 +1,23 @@
|
|||
""" model for tags """
|
||||
from bookwyrm import activitypub
|
||||
from .activitypub_mixin import ActivitypubMixin
|
||||
from .base_model import BookWyrmModel
|
||||
from .fields import CICharField
|
||||
|
||||
|
||||
class Hashtag(ActivitypubMixin, BookWyrmModel):
|
||||
"a hashtag which can be used in statuses"
|
||||
|
||||
name = CICharField(
|
||||
max_length=256,
|
||||
blank=False,
|
||||
null=False,
|
||||
activitypub_field="name",
|
||||
deduplication_field=True,
|
||||
)
|
||||
|
||||
name_field = "name"
|
||||
activity_serializer = activitypub.Hashtag
|
||||
|
||||
def __repr__(self):
|
||||
return f"<{self.__class__} id={self.id} name={self.name}>"
|
|
@ -1,12 +1,25 @@
|
|||
""" track progress of goodreads imports """
|
||||
import math
|
||||
import re
|
||||
import dateutil.parser
|
||||
|
||||
from django.db import models
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from bookwyrm.connectors import connector_manager
|
||||
from bookwyrm.models import ReadThrough, User, Book, Edition
|
||||
from bookwyrm.models import (
|
||||
User,
|
||||
Book,
|
||||
Edition,
|
||||
Work,
|
||||
ShelfBook,
|
||||
Shelf,
|
||||
ReadThrough,
|
||||
Review,
|
||||
ReviewRating,
|
||||
)
|
||||
from bookwyrm.tasks import app, LOW, IMPORTS
|
||||
from .fields import PrivacyLevels
|
||||
|
||||
|
||||
|
@ -30,6 +43,14 @@ def construct_search_term(title, author):
|
|||
return " ".join([title, author])
|
||||
|
||||
|
||||
ImportStatuses = [
|
||||
("pending", _("Pending")),
|
||||
("active", _("Active")),
|
||||
("complete", _("Complete")),
|
||||
("stopped", _("Stopped")),
|
||||
]
|
||||
|
||||
|
||||
class ImportJob(models.Model):
|
||||
"""entry for a specific request for book data import"""
|
||||
|
||||
|
@ -38,16 +59,77 @@ class ImportJob(models.Model):
|
|||
updated_date = models.DateTimeField(default=timezone.now)
|
||||
include_reviews = models.BooleanField(default=True)
|
||||
mappings = models.JSONField()
|
||||
complete = models.BooleanField(default=False)
|
||||
source = models.CharField(max_length=100)
|
||||
privacy = models.CharField(max_length=255, default="public", choices=PrivacyLevels)
|
||||
retry = models.BooleanField(default=False)
|
||||
task_id = models.CharField(max_length=200, null=True, blank=True)
|
||||
|
||||
complete = models.BooleanField(default=False)
|
||||
status = models.CharField(
|
||||
max_length=50, choices=ImportStatuses, default="pending", null=True
|
||||
)
|
||||
|
||||
def start_job(self):
|
||||
"""Report that the job has started"""
|
||||
task = start_import_task.delay(self.id)
|
||||
self.task_id = task.id
|
||||
|
||||
self.save(update_fields=["task_id"])
|
||||
|
||||
def complete_job(self):
|
||||
"""Report that the job has completed"""
|
||||
self.status = "complete"
|
||||
self.complete = True
|
||||
self.pending_items.update(fail_reason=_("Import stopped"))
|
||||
self.save(update_fields=["status", "complete"])
|
||||
|
||||
def stop_job(self):
|
||||
"""Stop the job"""
|
||||
self.status = "stopped"
|
||||
self.complete = True
|
||||
self.save(update_fields=["status", "complete"])
|
||||
self.pending_items.update(fail_reason=_("Import stopped"))
|
||||
|
||||
# stop starting
|
||||
app.control.revoke(self.task_id, terminate=True)
|
||||
tasks = self.pending_items.filter(task_id__isnull=False).values_list(
|
||||
"task_id", flat=True
|
||||
)
|
||||
app.control.revoke(list(tasks))
|
||||
|
||||
@property
|
||||
def pending_items(self):
|
||||
"""items that haven't been processed yet"""
|
||||
return self.items.filter(fail_reason__isnull=True, book__isnull=True)
|
||||
|
||||
@property
|
||||
def item_count(self):
|
||||
"""How many books do you want to import???"""
|
||||
return self.items.count()
|
||||
|
||||
@property
|
||||
def percent_complete(self):
|
||||
"""How far along?"""
|
||||
item_count = self.item_count
|
||||
if not item_count:
|
||||
return 0
|
||||
return math.floor((item_count - self.pending_item_count) / item_count * 100)
|
||||
|
||||
@property
|
||||
def pending_item_count(self):
|
||||
"""And how many pending items??"""
|
||||
return self.pending_items.count()
|
||||
|
||||
@property
|
||||
def successful_item_count(self):
|
||||
"""How many found a book?"""
|
||||
return self.items.filter(book__isnull=False).count()
|
||||
|
||||
@property
|
||||
def failed_item_count(self):
|
||||
"""How many found a book?"""
|
||||
return self.items.filter(fail_reason__isnull=False).count()
|
||||
|
||||
|
||||
class ImportItem(models.Model):
|
||||
"""a single line of a csv being imported"""
|
||||
|
@ -68,15 +150,18 @@ class ImportItem(models.Model):
|
|||
linked_review = models.ForeignKey(
|
||||
"Review", on_delete=models.SET_NULL, null=True, blank=True
|
||||
)
|
||||
task_id = models.CharField(max_length=200, null=True, blank=True)
|
||||
|
||||
def update_job(self):
|
||||
"""let the job know when the items get work done"""
|
||||
job = self.job
|
||||
if job.complete:
|
||||
return
|
||||
|
||||
job.updated_date = timezone.now()
|
||||
job.save()
|
||||
if not job.pending_items.exists() and not job.complete:
|
||||
job.complete = True
|
||||
job.save(update_fields=["complete"])
|
||||
job.complete_job()
|
||||
|
||||
def resolve(self):
|
||||
"""try various ways to lookup a book"""
|
||||
|
@ -167,8 +252,11 @@ class ImportItem(models.Model):
|
|||
@property
|
||||
def rating(self):
|
||||
"""x/5 star rating for a book"""
|
||||
if self.normalized_data.get("rating"):
|
||||
if not self.normalized_data.get("rating"):
|
||||
return None
|
||||
try:
|
||||
return float(self.normalized_data.get("rating"))
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
@property
|
||||
|
@ -240,3 +328,138 @@ class ImportItem(models.Model):
|
|||
return "{} by {}".format(
|
||||
self.normalized_data.get("title"), self.normalized_data.get("authors")
|
||||
)
|
||||
|
||||
|
||||
@app.task(queue=IMPORTS)
|
||||
def start_import_task(job_id):
|
||||
"""trigger the child tasks for each row"""
|
||||
job = ImportJob.objects.get(id=job_id)
|
||||
job.status = "active"
|
||||
job.save(update_fields=["status"])
|
||||
# don't start the job if it was stopped from the UI
|
||||
if job.complete:
|
||||
return
|
||||
|
||||
# these are sub-tasks so that one big task doesn't use up all the memory in celery
|
||||
for item in job.items.all():
|
||||
task = import_item_task.delay(item.id)
|
||||
item.task_id = task.id
|
||||
item.save()
|
||||
job.status = "active"
|
||||
job.save()
|
||||
|
||||
|
||||
@app.task(queue=IMPORTS)
|
||||
def import_item_task(item_id):
|
||||
"""resolve a row into a book"""
|
||||
item = ImportItem.objects.get(id=item_id)
|
||||
# make sure the job has not been stopped
|
||||
if item.job.complete:
|
||||
return
|
||||
|
||||
try:
|
||||
item.resolve()
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
item.fail_reason = _("Error loading book")
|
||||
item.save()
|
||||
item.update_job()
|
||||
raise err
|
||||
|
||||
if item.book:
|
||||
# shelves book and handles reviews
|
||||
handle_imported_book(item)
|
||||
else:
|
||||
item.fail_reason = _("Could not find a match for book")
|
||||
|
||||
item.save()
|
||||
item.update_job()
|
||||
|
||||
|
||||
def handle_imported_book(item):
|
||||
"""process a csv and then post about it"""
|
||||
job = item.job
|
||||
if job.complete:
|
||||
return
|
||||
|
||||
user = job.user
|
||||
if isinstance(item.book, Work):
|
||||
item.book = item.book.default_edition
|
||||
if not item.book:
|
||||
item.fail_reason = _("Error loading book")
|
||||
item.save()
|
||||
return
|
||||
if not isinstance(item.book, Edition):
|
||||
item.book = item.book.edition
|
||||
|
||||
existing_shelf = ShelfBook.objects.filter(book=item.book, user=user).exists()
|
||||
|
||||
# shelve the book if it hasn't been shelved already
|
||||
if item.shelf and not existing_shelf:
|
||||
desired_shelf = Shelf.objects.get(identifier=item.shelf, user=user)
|
||||
shelved_date = item.date_added or timezone.now()
|
||||
ShelfBook(
|
||||
book=item.book, shelf=desired_shelf, user=user, shelved_date=shelved_date
|
||||
).save(priority=LOW)
|
||||
|
||||
for read in item.reads:
|
||||
# check for an existing readthrough with the same dates
|
||||
if ReadThrough.objects.filter(
|
||||
user=user,
|
||||
book=item.book,
|
||||
start_date=read.start_date,
|
||||
finish_date=read.finish_date,
|
||||
).exists():
|
||||
continue
|
||||
read.book = item.book
|
||||
read.user = user
|
||||
read.save()
|
||||
|
||||
if job.include_reviews and (item.rating or item.review) and not item.linked_review:
|
||||
# we don't know the publication date of the review,
|
||||
# but "now" is a bad guess
|
||||
published_date_guess = item.date_read or item.date_added
|
||||
if item.review:
|
||||
# pylint: disable=consider-using-f-string
|
||||
review_title = "Review of {!r} on {!r}".format(
|
||||
item.book.title,
|
||||
job.source,
|
||||
)
|
||||
review = Review.objects.filter(
|
||||
user=user,
|
||||
book=item.book,
|
||||
name=review_title,
|
||||
rating=item.rating,
|
||||
published_date=published_date_guess,
|
||||
).first()
|
||||
if not review:
|
||||
review = Review(
|
||||
user=user,
|
||||
book=item.book,
|
||||
name=review_title,
|
||||
content=item.review,
|
||||
rating=item.rating,
|
||||
published_date=published_date_guess,
|
||||
privacy=job.privacy,
|
||||
)
|
||||
review.save(software="bookwyrm", priority=LOW)
|
||||
else:
|
||||
# just a rating
|
||||
review = ReviewRating.objects.filter(
|
||||
user=user,
|
||||
book=item.book,
|
||||
published_date=published_date_guess,
|
||||
rating=item.rating,
|
||||
).first()
|
||||
if not review:
|
||||
review = ReviewRating(
|
||||
user=user,
|
||||
book=item.book,
|
||||
rating=item.rating,
|
||||
published_date=published_date_guess,
|
||||
privacy=job.privacy,
|
||||
)
|
||||
review.save(software="bookwyrm", priority=LOW)
|
||||
|
||||
# only broadcast this review to other bookwyrm instances
|
||||
item.linked_review = review
|
||||
item.save()
|
||||
|
|
|
@ -31,7 +31,7 @@ class Link(ActivitypubMixin, BookWyrmModel):
|
|||
|
||||
@property
|
||||
def name(self):
|
||||
"""link name via the assocaited domain"""
|
||||
"""link name via the associated domain"""
|
||||
return self.domain.name
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
|
|
|
@ -2,8 +2,8 @@
|
|||
from django.db import models, transaction
|
||||
from django.dispatch import receiver
|
||||
from .base_model import BookWyrmModel
|
||||
from . import Boost, Favorite, GroupMemberInvitation, ImportJob, ListItem, Report
|
||||
from . import Status, User, UserFollowRequest
|
||||
from . import Boost, Favorite, GroupMemberInvitation, ImportJob, LinkDomain
|
||||
from . import ListItem, Report, Status, User, UserFollowRequest
|
||||
|
||||
|
||||
class Notification(BookWyrmModel):
|
||||
|
@ -28,6 +28,7 @@ class Notification(BookWyrmModel):
|
|||
|
||||
# Admin
|
||||
REPORT = "REPORT"
|
||||
LINK_DOMAIN = "LINK_DOMAIN"
|
||||
|
||||
# Groups
|
||||
INVITE = "INVITE"
|
||||
|
@ -43,7 +44,7 @@ class Notification(BookWyrmModel):
|
|||
NotificationType = models.TextChoices(
|
||||
# there has got be a better way to do this
|
||||
"NotificationType",
|
||||
f"{FAVORITE} {REPLY} {MENTION} {TAG} {FOLLOW} {FOLLOW_REQUEST} {BOOST} {IMPORT} {ADD} {REPORT} {INVITE} {ACCEPT} {JOIN} {LEAVE} {REMOVE} {GROUP_PRIVACY} {GROUP_NAME} {GROUP_DESCRIPTION}",
|
||||
f"{FAVORITE} {REPLY} {MENTION} {TAG} {FOLLOW} {FOLLOW_REQUEST} {BOOST} {IMPORT} {ADD} {REPORT} {LINK_DOMAIN} {INVITE} {ACCEPT} {JOIN} {LEAVE} {REMOVE} {GROUP_PRIVACY} {GROUP_NAME} {GROUP_DESCRIPTION}",
|
||||
)
|
||||
|
||||
user = models.ForeignKey("User", on_delete=models.CASCADE)
|
||||
|
@ -64,6 +65,7 @@ class Notification(BookWyrmModel):
|
|||
"ListItem", symmetrical=False, related_name="notifications"
|
||||
)
|
||||
related_reports = models.ManyToManyField("Report", symmetrical=False)
|
||||
related_link_domains = models.ManyToManyField("LinkDomain", symmetrical=False)
|
||||
|
||||
@classmethod
|
||||
@transaction.atomic
|
||||
|
@ -214,7 +216,7 @@ def notify_user_on_import_complete(
|
|||
update_fields = update_fields or []
|
||||
if not instance.complete or "complete" not in update_fields:
|
||||
return
|
||||
Notification.objects.create(
|
||||
Notification.objects.get_or_create(
|
||||
user=instance.user,
|
||||
notification_type=Notification.IMPORT,
|
||||
related_import=instance,
|
||||
|
@ -231,10 +233,7 @@ def notify_admins_on_report(sender, instance, created, *args, **kwargs):
|
|||
return
|
||||
|
||||
# moderators and superusers should be notified
|
||||
admins = User.objects.filter(
|
||||
models.Q(user_permissions__name__in=["moderate_user", "moderate_post"])
|
||||
| models.Q(is_superuser=True)
|
||||
).all()
|
||||
admins = User.admins()
|
||||
for admin in admins:
|
||||
notification, _ = Notification.objects.get_or_create(
|
||||
user=admin,
|
||||
|
@ -244,6 +243,26 @@ def notify_admins_on_report(sender, instance, created, *args, **kwargs):
|
|||
notification.related_reports.add(instance)
|
||||
|
||||
|
||||
@receiver(models.signals.post_save, sender=LinkDomain)
|
||||
@transaction.atomic
|
||||
# pylint: disable=unused-argument
|
||||
def notify_admins_on_link_domain(sender, instance, created, *args, **kwargs):
|
||||
"""a new link domain needs to be verified"""
|
||||
if not created:
|
||||
# otherwise you'll get a notification when you approve a domain
|
||||
return
|
||||
|
||||
# moderators and superusers should be notified
|
||||
admins = User.admins()
|
||||
for admin in admins:
|
||||
notification, _ = Notification.objects.get_or_create(
|
||||
user=admin,
|
||||
notification_type=Notification.LINK_DOMAIN,
|
||||
read=False,
|
||||
)
|
||||
notification.related_link_domains.add(instance)
|
||||
|
||||
|
||||
@receiver(models.signals.post_save, sender=GroupMemberInvitation)
|
||||
# pylint: disable=unused-argument
|
||||
def notify_user_on_group_invite(sender, instance, *args, **kwargs):
|
||||
|
@ -265,7 +284,7 @@ def notify_user_on_list_item_add(sender, instance, created, *args, **kwargs):
|
|||
return
|
||||
|
||||
list_owner = instance.book_list.user
|
||||
# create a notification if somoene ELSE added to a local user's list
|
||||
# create a notification if someone ELSE added to a local user's list
|
||||
if list_owner.local and list_owner != instance.user:
|
||||
# keep the related_user singular, group the items
|
||||
Notification.notify_list_item(list_owner, instance)
|
||||
|
@ -300,8 +319,10 @@ def notify_user_on_follow(sender, instance, created, *args, **kwargs):
|
|||
notification.read = False
|
||||
notification.save()
|
||||
else:
|
||||
# Only group unread follows
|
||||
Notification.notify(
|
||||
instance.user_object,
|
||||
instance.user_subject,
|
||||
notification_type=Notification.FOLLOW,
|
||||
read=False,
|
||||
)
|
||||
|
|
|
@ -8,7 +8,7 @@ from .base_model import BookWyrmModel
|
|||
|
||||
|
||||
class ProgressMode(models.TextChoices):
|
||||
"""types of prgress available"""
|
||||
"""types of progress available"""
|
||||
|
||||
PAGE = "PG", "page"
|
||||
PERCENT = "PCT", "percent"
|
||||
|
@ -32,7 +32,7 @@ class ReadThrough(BookWyrmModel):
|
|||
|
||||
def save(self, *args, **kwargs):
|
||||
"""update user active time"""
|
||||
cache.delete(f"latest_read_through-{self.user.id}-{self.book.id}")
|
||||
cache.delete(f"latest_read_through-{self.user_id}-{self.book_id}")
|
||||
self.user.update_active_date()
|
||||
# an active readthrough must have an unset finish date
|
||||
if self.finish_date or self.stopped_date:
|
||||
|
|
|
@ -4,6 +4,7 @@ from django.db import models, transaction, IntegrityError
|
|||
from django.db.models import Q
|
||||
|
||||
from bookwyrm import activitypub
|
||||
from bookwyrm.tasks import HIGH
|
||||
from .activitypub_mixin import ActivitypubMixin, ActivityMixin
|
||||
from .activitypub_mixin import generate_activity
|
||||
from .base_model import BookWyrmModel
|
||||
|
@ -33,7 +34,7 @@ class UserRelationship(BookWyrmModel):
|
|||
|
||||
@property
|
||||
def recipients(self):
|
||||
"""the remote user needs to recieve direct broadcasts"""
|
||||
"""the remote user needs to receive direct broadcasts"""
|
||||
return [u for u in [self.user_subject, self.user_object] if not u.local]
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
|
@ -139,8 +140,9 @@ class UserFollowRequest(ActivitypubMixin, UserRelationship):
|
|||
)
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
# a local user is following a remote user
|
||||
if broadcast and self.user_subject.local and not self.user_object.local:
|
||||
self.broadcast(self.to_activity(), self.user_subject)
|
||||
self.broadcast(self.to_activity(), self.user_subject, queue=HIGH)
|
||||
|
||||
if self.user_object.local:
|
||||
manually_approves = self.user_object.manually_approves_followers
|
||||
|
@ -157,18 +159,23 @@ class UserFollowRequest(ActivitypubMixin, UserRelationship):
|
|||
def accept(self, broadcast_only=False):
|
||||
"""turn this request into the real deal"""
|
||||
user = self.user_object
|
||||
# broadcast when accepting a remote request
|
||||
if not self.user_subject.local:
|
||||
activity = activitypub.Accept(
|
||||
id=self.get_accept_reject_id(status="accepts"),
|
||||
actor=self.user_object.remote_id,
|
||||
object=self.to_activity(),
|
||||
).serialize()
|
||||
self.broadcast(activity, user)
|
||||
self.broadcast(activity, user, queue=HIGH)
|
||||
if broadcast_only:
|
||||
return
|
||||
|
||||
with transaction.atomic():
|
||||
try:
|
||||
UserFollows.from_request(self)
|
||||
except IntegrityError:
|
||||
# this just means we already saved this relationship
|
||||
pass
|
||||
if self.id:
|
||||
self.delete()
|
||||
|
||||
|
@ -180,7 +187,7 @@ class UserFollowRequest(ActivitypubMixin, UserRelationship):
|
|||
actor=self.user_object.remote_id,
|
||||
object=self.to_activity(),
|
||||
).serialize()
|
||||
self.broadcast(activity, self.user_object)
|
||||
self.broadcast(activity, self.user_object, queue=HIGH)
|
||||
|
||||
self.delete()
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue