Compare commits

..

5 commits

Author SHA1 Message Date
Mouse Reeve
7789b600e1 Python formatting 2021-04-22 13:01:05 -07:00
Mouse Reeve
ea0e54e8da Merge branch 'main' into list-status 2021-04-22 12:59:56 -07:00
Mouse Reeve
deb4676e2f WIP 2021-03-05 06:56:45 -08:00
Mouse Reeve
f5fe746176 Merge branch 'main' into list-status 2021-03-04 17:37:41 -08:00
Mouse Reeve
7a8a228dbe Markup for posting a status about a list 2021-02-03 15:59:26 -08:00
1009 changed files with 28239 additions and 124672 deletions

View file

@ -32,7 +32,7 @@ indent_size = 2
max_line_length = off max_line_length = off
# Computer generated files # Computer generated files
[{icons.css,package.json,*.lock,*.mo}] [{package.json,*.lock,*.mo}]
indent_size = unset indent_size = unset
indent_style = unset indent_style = unset
max_line_length = unset max_line_length = unset

50
.env.dev.example Normal file
View file

@ -0,0 +1,50 @@
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY="7(2w1sedok=aznpq)ta1mc4i%4h=xx@hxwx*o57ctsuml0x%fr"
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG=true
DOMAIN=your.domain.here
#EMAIL=your@email.here
## Leave unset to allow all hosts
# ALLOWED_HOSTS="localhost,127.0.0.1,[::1]"
OL_URL=https://openlibrary.org
## Database backend to use.
## Default is postgres, sqlite is for dev quickstart only (NOT production!!!)
BOOKWYRM_DATABASE_BACKEND=postgres
MEDIA_ROOT=images/
POSTGRES_PASSWORD=fedireads
POSTGRES_USER=fedireads
POSTGRES_DB=fedireads
POSTGRES_HOST=db
# Redis activity stream manager
MAX_STREAM_LENGTH=200
REDIS_ACTIVITY_HOST=redis_activity
REDIS_ACTIVITY_PORT=6379
#REDIS_ACTIVITY_PASSWORD=redispassword345
# Redis as celery broker
#REDIS_BROKER_PORT=6379
#REDIS_BROKER_PASSWORD=redispassword123
CELERY_BROKER=redis://redis_broker:6379/0
CELERY_RESULT_BACKEND=redis://redis_broker:6379/0
FLOWER_PORT=8888
#FLOWER_USER=mouse
#FLOWER_PASSWORD=changeme
EMAIL_HOST="smtp.mailgun.org"
EMAIL_PORT=587
EMAIL_HOST_USER=mail@your.domain.here
EMAIL_HOST_PASSWORD=emailpassword123
EMAIL_USE_TLS=true
EMAIL_USE_SSL=false
# Set this to true when initializing certbot for domain, false when not
CERTBOT_INIT=false

View file

@ -1,110 +0,0 @@
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY="7(2w1sedok=aznpq)ta1mc4i%4h=xx@hxwx*o57ctsuml0x%fr"
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG=false
USE_HTTPS=true
DOMAIN=your.domain.here
EMAIL=your@email.here
# Instance defualt language (see options at bookwyrm/settings.py "LANGUAGES"
LANGUAGE_CODE="en-us"
# Used for deciding which editions to prefer
DEFAULT_LANGUAGE="English"
## Leave unset to allow all hosts
# ALLOWED_HOSTS="localhost,127.0.0.1,[::1]"
MEDIA_ROOT=images/
# Database configuration
PGPORT=5432
POSTGRES_PASSWORD=securedbypassword123
POSTGRES_USER=fedireads
POSTGRES_DB=fedireads
POSTGRES_HOST=db
# Redis activity stream manager
MAX_STREAM_LENGTH=200
REDIS_ACTIVITY_HOST=redis_activity
REDIS_ACTIVITY_PORT=6379
REDIS_ACTIVITY_PASSWORD=redispassword345
# Optional, use a different redis database (defaults to 0)
# REDIS_ACTIVITY_DB_INDEX=0
# Redis as celery broker
REDIS_BROKER_PORT=6379
REDIS_BROKER_PASSWORD=redispassword123
# Optional, use a different redis database (defaults to 0)
# REDIS_BROKER_DB_INDEX=0
# Monitoring for celery
FLOWER_PORT=8888
FLOWER_USER=admin
FLOWER_PASSWORD=changeme
# Email config
EMAIL_HOST=smtp.mailgun.org
EMAIL_PORT=587
EMAIL_HOST_USER=mail@your.domain.here
EMAIL_HOST_PASSWORD=emailpassword123
EMAIL_USE_TLS=true
EMAIL_USE_SSL=false
EMAIL_SENDER_NAME=admin
# defaults to DOMAIN
EMAIL_SENDER_DOMAIN=
# Query timeouts
SEARCH_TIMEOUT=15
QUERY_TIMEOUT=5
# Thumbnails Generation
ENABLE_THUMBNAIL_GENERATION=false
# S3 configuration
USE_S3=false
AWS_ACCESS_KEY_ID=
AWS_SECRET_ACCESS_KEY=
# Commented are example values if you use a non-AWS, S3-compatible service
# AWS S3 should work with only AWS_STORAGE_BUCKET_NAME and AWS_S3_REGION_NAME
# non-AWS S3-compatible services will need AWS_STORAGE_BUCKET_NAME,
# along with both AWS_S3_CUSTOM_DOMAIN and AWS_S3_ENDPOINT_URL
# AWS_STORAGE_BUCKET_NAME= # "example-bucket-name"
# AWS_S3_CUSTOM_DOMAIN=None # "example-bucket-name.s3.fr-par.scw.cloud"
# AWS_S3_REGION_NAME=None # "fr-par"
# AWS_S3_ENDPOINT_URL=None # "https://s3.fr-par.scw.cloud"
# Preview image generation can be computing and storage intensive
# ENABLE_PREVIEW_IMAGES=True
# Specify RGB tuple or RGB hex strings,
# or use_dominant_color_light / use_dominant_color_dark
PREVIEW_BG_COLOR=use_dominant_color_light
# Change to #FFF if you use use_dominant_color_dark
PREVIEW_TEXT_COLOR=#363636
PREVIEW_IMG_WIDTH=1200
PREVIEW_IMG_HEIGHT=630
PREVIEW_DEFAULT_COVER_COLOR=#002549
# Below are example keys if you want to enable automatically
# sending telemetry to an OTLP-compatible service. Many of
# the main monitoring apps have OLTP collectors, including
# NewRelic, DataDog, and Honeycomb.io - consult their
# documentation for setup instructions, and what exactly to
# put below!
#
# Service name is an arbitrary tag that is attached to any
# data sent, used to distinguish different sources. Useful
# for sending prod and dev metrics to the same place and
# keeping them separate, for instance!
# API endpoint for your provider
OTEL_EXPORTER_OTLP_ENDPOINT=
# Any headers required, usually authentication info
OTEL_EXPORTER_OTLP_HEADERS=
# Service name to identify your app
OTEL_SERVICE_NAME=

50
.env.prod.example Normal file
View file

@ -0,0 +1,50 @@
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY="7(2w1sedok=aznpq)ta1mc4i%4h=xx@hxwx*o57ctsuml0x%fr"
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG=false
DOMAIN=your.domain.here
EMAIL=your@email.here
## Leave unset to allow all hosts
# ALLOWED_HOSTS="localhost,127.0.0.1,[::1]"
OL_URL=https://openlibrary.org
## Database backend to use.
## Default is postgres, sqlite is for dev quickstart only (NOT production!!!)
BOOKWYRM_DATABASE_BACKEND=postgres
MEDIA_ROOT=images/
POSTGRES_PASSWORD=securedbpassword123
POSTGRES_USER=fedireads
POSTGRES_DB=fedireads
POSTGRES_HOST=db
# Redis activity stream manager
MAX_STREAM_LENGTH=200
REDIS_ACTIVITY_HOST=redis_activity
REDIS_ACTIVITY_PORT=6379
REDIS_ACTIVITY_PASSWORD=redispassword345
# Redis as celery broker
REDIS_BROKER_PORT=6379
REDIS_BROKER_PASSWORD=redispassword123
CELERY_BROKER=redis://:${REDIS_BROKER_PASSWORD}@redis_broker:${REDIS_BROKER_PORT}/0
CELERY_RESULT_BACKEND=redis://:${REDIS_BROKER_PASSWORD}@redis_broker:${REDIS_BROKER_PORT}/0
FLOWER_PORT=8888
FLOWER_USER=mouse
FLOWER_PASSWORD=changeme
EMAIL_HOST="smtp.mailgun.org"
EMAIL_PORT=587
EMAIL_HOST_USER=mail@your.domain.here
EMAIL_HOST_PASSWORD=emailpassword123
EMAIL_USE_TLS=true
EMAIL_USE_SSL=false
# Set this to true when initializing certbot for domain, false when not
CERTBOT_INIT=false

View file

@ -2,7 +2,7 @@
name: Bug report name: Bug report
about: Create a report to help us improve about: Create a report to help us improve
title: '' title: ''
labels: 'bug' labels: ''
assignees: '' assignees: ''
--- ---
@ -23,14 +23,6 @@ A clear and concise description of what you expected to happen.
**Screenshots** **Screenshots**
If applicable, add screenshots to help explain your problem. If applicable, add screenshots to help explain your problem.
**Instance**
On which BookWyrm instance did you encounter this problem.
**Additional context**
Add any other context about the problem here.
---
**Desktop (please complete the following information):** **Desktop (please complete the following information):**
- OS: [e.g. iOS] - OS: [e.g. iOS]
- Browser [e.g. chrome, safari] - Browser [e.g. chrome, safari]
@ -41,3 +33,6 @@ Add any other context about the problem here.
- OS: [e.g. iOS8.1] - OS: [e.g. iOS8.1]
- Browser [e.g. stock browser, safari] - Browser [e.g. stock browser, safari]
- Version [e.g. 22] - Version [e.g. 22]
**Additional context**
Add any other context about the problem here.

View file

@ -1,10 +1,6 @@
name: Python Formatting (run ./bw-dev black to fix) name: Lint Python
on: on: [push, pull_request]
push:
branches: [ main ]
pull_request:
branches: [ main ]
jobs: jobs:
lint: lint:
@ -12,4 +8,6 @@ jobs:
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: actions/setup-python@v2 - uses: actions/setup-python@v2
- uses: psf/black@21.4b2 - uses: psf/black@stable
with:
args: ". --check -l 80 -S"

View file

@ -1,28 +0,0 @@
name: Templates validator
on:
push:
branches: [ main ]
pull_request:
branches: [ main ]
jobs:
lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Install curlylint
run: pip install curlylint
- name: Run linter
run: >
curlylint --rule 'aria_role: true' \
--rule 'django_forms_rendering: true' \
--rule 'html_has_lang: true' \
--rule 'image_alt: true' \
--rule 'meta_viewport: true' \
--rule 'no_autofocus: true' \
--rule 'tabindex_no_positive: true' \
--exclude '_modal.html|create_status/layout.html|reading_modals/layout.html' \
bookwyrm/templates

View file

@ -9,9 +9,18 @@ jobs:
build: build:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
strategy:
max-parallel: 4
matrix:
db: [postgres]
python-version: [3.9]
include:
- db: postgres
db_port: 5432
services: services:
postgres: postgres:
image: postgres:13 image: postgres:10
env: env:
POSTGRES_USER: postgres POSTGRES_USER: postgres
POSTGRES_PASSWORD: hunter2 POSTGRES_PASSWORD: hunter2
@ -24,20 +33,24 @@ jobs:
- 5432:5432 - 5432:5432
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- name: Set up Python - name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2 uses: actions/setup-python@v2
with: with:
python-version: 3.9 python-version: ${{ matrix.python-version }}
- name: Install Dependencies - name: Install Dependencies
run: | run: |
python -m pip install --upgrade pip python -m pip install --upgrade pip
pip install -r requirements.txt pip install -r requirements.txt
- name: Run Tests - name: Run Tests
env: env:
DB: ${{ matrix.db }}
DB_HOST: 127.0.0.1
DB_PORT: ${{ matrix.db_port }}
DB_PASSWORD: hunter2
SECRET_KEY: beepbeep SECRET_KEY: beepbeep
DEBUG: false DEBUG: true
USE_HTTPS: true
DOMAIN: your.domain.here DOMAIN: your.domain.here
OL_URL: https://openlibrary.org
BOOKWYRM_DATABASE_BACKEND: postgres BOOKWYRM_DATABASE_BACKEND: postgres
MEDIA_ROOT: images/ MEDIA_ROOT: images/
POSTGRES_PASSWORD: hunter2 POSTGRES_PASSWORD: hunter2
@ -45,15 +58,11 @@ jobs:
POSTGRES_DB: github_actions POSTGRES_DB: github_actions
POSTGRES_HOST: 127.0.0.1 POSTGRES_HOST: 127.0.0.1
CELERY_BROKER: "" CELERY_BROKER: ""
REDIS_BROKER_PORT: 6379 CELERY_RESULT_BACKEND: ""
REDIS_BROKER_PASSWORD: beep
USE_DUMMY_CACHE: true
FLOWER_PORT: 8888
EMAIL_HOST: "smtp.mailgun.org" EMAIL_HOST: "smtp.mailgun.org"
EMAIL_PORT: 587 EMAIL_PORT: 587
EMAIL_HOST_USER: "" EMAIL_HOST_USER: ""
EMAIL_HOST_PASSWORD: "" EMAIL_HOST_PASSWORD: ""
EMAIL_USE_TLS: true EMAIL_USE_TLS: true
ENABLE_PREVIEW_IMAGES: false
run: | run: |
pytest -n 3 python manage.py test

View file

@ -1,5 +1,5 @@
# @url https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions # @url https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions
name: Lint Frontend (run `./bw-dev stylelint` to fix css errors) name: Lint Frontend
on: on:
push: push:
@ -8,7 +8,7 @@ on:
- '.github/workflows/**' - '.github/workflows/**'
- 'static/**' - 'static/**'
- '.eslintrc' - '.eslintrc'
- '.stylelintrc.js' - '.stylelintrc'
pull_request: pull_request:
branches: [ main, ci, frontend ] branches: [ main, ci, frontend ]
@ -22,16 +22,17 @@ jobs:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- name: Install modules - name: Install modules
run: npm install stylelint stylelint-config-recommended stylelint-config-standard stylelint-order eslint run: yarn
# See .stylelintignore for files that are not linted. # See .stylelintignore for files that are not linted.
- name: Run stylelint - name: Run stylelint
run: > run: >
npx stylelint bookwyrm/static/css/*.scss bookwyrm/static/css/bookwyrm/**/*.scss \ yarn stylelint bookwyrm/static/**/*.css \
--config dev-tools/.stylelintrc.js --report-needless-disables \
--report-invalid-scope-disables
# See .eslintignore for files that are not linted. # See .eslintignore for files that are not linted.
- name: Run ESLint - name: Run ESLint
run: > run: >
npx eslint bookwyrm/static \ yarn eslint bookwyrm/static \
--ext .js,.jsx,.ts,.tsx --ext .js,.jsx,.ts,.tsx

21
.github/workflows/lint-global.yaml vendored Normal file
View file

@ -0,0 +1,21 @@
# @url https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions
name: Lint project globally
on:
push:
branches: [ main, ci ]
pull_request:
branches: [ main, ci ]
jobs:
lint:
name: Lint with EditorConfig.
runs-on: ubuntu-20.04
# Steps represent a sequence of tasks that will be executed as part of the job
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v2
- name: EditorConfig
uses: greut/eclint-action@v0

View file

@ -1,23 +0,0 @@
# @url https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions
name: JavaScript Prettier (run ./bw-dev prettier to fix)
on:
push:
branches: [ main ]
pull_request:
branches: [ main ]
jobs:
lint:
name: Lint with Prettier
runs-on: ubuntu-20.04
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it.
- uses: actions/checkout@v2
- name: Install modules
run: npm install prettier
- name: Run Prettier
run: npx prettier --check bookwyrm/static/js/*.js

View file

@ -1,27 +0,0 @@
name: Pylint
on:
push:
branches: [ main ]
pull_request:
branches: [ main ]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python 3.9
uses: actions/setup-python@v2
with:
python-version: 3.9
- name: Install Dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
- name: Analysing the code with pylint
run: |
pylint bookwyrm/

11
.gitignore vendored
View file

@ -4,7 +4,6 @@
*.swp *.swp
**/__pycache__ **/__pycache__
.local .local
/nginx/nginx.conf
# VSCode # VSCode
/.vscode /.vscode
@ -16,9 +15,6 @@
# BookWyrm # BookWyrm
.env .env
/images/ /images/
bookwyrm/static/css/bookwyrm.css
bookwyrm/static/css/themes/
!bookwyrm/static/css/themes/bookwyrm-*.scss
# Testing # Testing
.coverage .coverage
@ -27,12 +23,7 @@ bookwyrm/static/css/themes/
.idea .idea
#Node tools #Node tools
node_modules/ /node_modules/
package-lock.json
yarn.lock
#nginx #nginx
nginx/default.conf nginx/default.conf
#macOS
**/.DS_Store

View file

@ -1 +0,0 @@
**/vendor/*

View file

@ -1,6 +0,0 @@
[MAIN]
ignore=migrations
load-plugins=pylint.extensions.no_self_use
[MESSAGES CONTROL]
disable=E1101,E1135,E1136,R0903,R0901,R0902,W0707,W0511,W0406,R0401,R0801,C3001

17
.stylelintrc.js Normal file
View file

@ -0,0 +1,17 @@
/* global module */
module.exports = {
"extends": "stylelint-config-standard",
"plugins": [
"stylelint-order"
],
"rules": {
"order/order": [
"custom-properties",
"declarations"
],
"indentation": 4
}
};

View file

@ -6,7 +6,8 @@ RUN mkdir /app /app/static /app/images
WORKDIR /app WORKDIR /app
RUN apt-get update && apt-get install -y gettext libgettextpo-dev tidy && apt-get clean
COPY requirements.txt /app/ COPY requirements.txt /app/
RUN pip install -r requirements.txt --no-cache-dir RUN pip install -r requirements.txt --no-cache-dir
RUN apt-get update && apt-get install -y gettext libgettextpo-dev && apt-get clean
COPY ./bookwyrm ./celerywyrm /app/

View file

@ -9,11 +9,10 @@ Permission is hereby granted, free of charge, to any person or organization (the
1. The above copyright notice and this permission notice shall be included in all copies or modified versions of the Software. 1. The above copyright notice and this permission notice shall be included in all copies or modified versions of the Software.
2. The User is one of the following: 2. The User is one of the following:
a. An individual person, laboring for themselves
1. An individual person, laboring for themselves b. A non-profit organization
2. A non-profit organization c. An educational institution
3. An educational institution d. An organization that seeks shared profit for all of its members, and allows non-members to set the cost of their labor
4. An organization that seeks shared profit for all of its members, and allows non-members to set the cost of their labor
3. If the User is an organization with owners, then all owners are workers and all workers are owners with equal equity and/or equal vote. 3. If the User is an organization with owners, then all owners are workers and all workers are owners with equal equity and/or equal vote.

View file

@ -9,18 +9,21 @@ Social reading and reviewing, decentralized with ActivityPub
- [What it is and isn't](#what-it-is-and-isnt) - [What it is and isn't](#what-it-is-and-isnt)
- [The role of federation](#the-role-of-federation) - [The role of federation](#the-role-of-federation)
- [Features](#features) - [Features](#features)
- [Set up BookWyrm](#set-up-bookwyrm) - [Book data](#book-data)
- [Set up Bookwyrm](#set-up-bookwyrm)
## Joining BookWyrm ## Joining BookWyrm
If you'd like to join an instance, you can check out the [instances](https://joinbookwyrm.com/instances/) list. BookWyrm is still a young piece of software, and isn't at the level of stability and feature-richness that you'd find in a production-ready application. But it does what it says on the box! If you'd like to join an instance, you can check out the [instances](https://docs.joinbookwyrm.com/instances.html) list.
You can request an invite by entering your email address at https://bookwyrm.social.
## Contributing ## Contributing
See [contributing](https://docs.joinbookwyrm.com/contributing.html) for code, translation or monetary contributions. See [contributing](https://docs.joinbookwyrm.com/how-to-contribute.html) for code, translation or monetary contributions.
## About BookWyrm ## About BookWyrm
### What it is and isn't ### What it is and isn't
BookWyrm is a platform for social reading. You can use it to track what you're reading, review books, and follow your friends. It isn't primarily meant for cataloguing or as a data-source for books, but it does do both of those things to some degree. BookWyrm is a platform for social reading! You can use it to track what you're reading, review books, and follow your friends. It isn't primarily meant for cataloguing or as a data-source for books, but it does do both of those things to some degree.
### The role of federation ### The role of federation
BookWyrm is built on [ActivityPub](http://activitypub.rocks/). With ActivityPub, it inter-operates with different instances of BookWyrm, and other ActivityPub compliant services, like Mastodon. This means you can run an instance for your book club, and still follow your friend who posts on a server devoted to 20th century Russian speculative fiction. It also means that your friend on mastodon can read and comment on a book review that you post on your BookWyrm instance. BookWyrm is built on [ActivityPub](http://activitypub.rocks/). With ActivityPub, it inter-operates with different instances of BookWyrm, and other ActivityPub compliant services, like Mastodon. This means you can run an instance for your book club, and still follow your friend who posts on a server devoted to 20th century Russian speculative fiction. It also means that your friend on mastodon can read and comment on a book review that you post on your BookWyrm instance.
@ -75,5 +78,8 @@ Deployment
- [Nginx](https://nginx.org/en/) HTTP server - [Nginx](https://nginx.org/en/) HTTP server
## Set up BookWyrm ## Book data
The [documentation website](https://docs.joinbookwyrm.com/) has instruction on how to set up BookWyrm in a [developer environment](https://docs.joinbookwyrm.com/install-dev.html) or [production](https://docs.joinbookwyrm.com/install-prod.html). The application is set up to share book and author data between instances, and get book data from arbitrary outside sources. Right now, the only connector is to OpenLibrary, but other connectors could be written.
## Set up Bookwyrm
The [documentation website](https://docs.joinbookwyrm.com/) has instruction on how to set up Bookwyrm in a [developer environment](https://docs.joinbookwyrm.com/developer-environment.html) or [production](https://docs.joinbookwyrm.com/installing-in-production.html).

View file

@ -27,5 +27,5 @@ activity_objects = {c[0]: c[1] for c in cls_members if hasattr(c[1], "to_model")
def parse(activity_json): def parse(activity_json):
"""figure out what activity this is and parse it""" """ figure out what activity this is and parse it """
return naive_parse(activity_objects, activity_json) return naive_parse(activity_objects, activity_json)

View file

@ -1,7 +1,6 @@
""" basics for an activitypub serializer """ """ basics for an activitypub serializer """
from dataclasses import dataclass, fields, MISSING from dataclasses import dataclass, fields, MISSING
from json import JSONEncoder from json import JSONEncoder
import logging
from django.apps import apps from django.apps import apps
from django.db import IntegrityError, transaction from django.db import IntegrityError, transaction
@ -9,24 +8,37 @@ from django.db import IntegrityError, transaction
from bookwyrm.connectors import ConnectorException, get_data from bookwyrm.connectors import ConnectorException, get_data
from bookwyrm.tasks import app from bookwyrm.tasks import app
logger = logging.getLogger(__name__)
class ActivitySerializerError(ValueError): class ActivitySerializerError(ValueError):
"""routine problems serializing activitypub json""" """ routine problems serializing activitypub json """
class ActivityEncoder(JSONEncoder): class ActivityEncoder(JSONEncoder):
"""used to convert an Activity object into json""" """ used to convert an Activity object into json """
def default(self, o): def default(self, o):
return o.__dict__ return o.__dict__
@dataclass @dataclass
# pylint: disable=invalid-name class Link:
""" for tagging a book in a status """
href: str
name: str
type: str = "Link"
@dataclass
class Mention(Link):
""" a subtype of Link for mentioning an actor """
type: str = "Mention"
@dataclass
class Signature: class Signature:
"""public key block""" """ public key block """
creator: str creator: str
created: str created: str
@ -35,27 +47,27 @@ class Signature:
def naive_parse(activity_objects, activity_json, serializer=None): def naive_parse(activity_objects, activity_json, serializer=None):
"""this navigates circular import issues by looking up models' serializers""" """ this navigates circular import issues """
if not serializer: if not serializer:
if activity_json.get("publicKeyPem"): if activity_json.get("publicKeyPem"):
# ugh # ugh
activity_json["type"] = "PublicKey" activity_json["type"] = "PublicKey"
activity_type = activity_json.get("type") activity_type = activity_json.get("type")
if activity_type in ["Question", "Article"]:
return None
try: try:
serializer = activity_objects[activity_type] serializer = activity_objects[activity_type]
except KeyError as err: except KeyError as e:
# we know this exists and that we can't handle it # we know this exists and that we can't handle it
raise ActivitySerializerError(err) if activity_type in ["Question"]:
return None
raise ActivitySerializerError(e)
return serializer(activity_objects=activity_objects, **activity_json) return serializer(activity_objects=activity_objects, **activity_json)
@dataclass(init=False) @dataclass(init=False)
class ActivityObject: class ActivityObject:
"""actor activitypub json""" """ actor activitypub json """
id: str id: str
type: str type: str
@ -68,7 +80,7 @@ class ActivityObject:
try: try:
value = kwargs[field.name] value = kwargs[field.name]
if value in (None, MISSING, {}): if value in (None, MISSING, {}):
raise KeyError("Missing required field", field.name) raise KeyError()
try: try:
is_subclass = issubclass(field.type, ActivityObject) is_subclass = issubclass(field.type, ActivityObject)
except TypeError: except TypeError:
@ -88,16 +100,13 @@ class ActivityObject:
except KeyError: except KeyError:
if field.default == MISSING and field.default_factory == MISSING: if field.default == MISSING and field.default_factory == MISSING:
raise ActivitySerializerError( raise ActivitySerializerError(
f"Missing required field: {field.name}" "Missing required field: %s" % field.name
) )
value = field.default value = field.default
setattr(self, field.name, value) setattr(self, field.name, value)
# pylint: disable=too-many-locals,too-many-branches,too-many-arguments def to_model(self, model=None, instance=None, allow_create=True, save=True):
def to_model( """ convert from an activity to a model instance """
self, model=None, instance=None, allow_create=True, save=True, overwrite=True
):
"""convert from an activity to a model instance"""
model = model or get_model_from_type(self.type) model = model or get_model_from_type(self.type)
# only reject statuses if we're potentially creating them # only reject statuses if we're potentially creating them
@ -116,41 +125,27 @@ class ActivityObject:
return None return None
instance = instance or model() instance = instance or model()
# keep track of what we've changed
update_fields = []
# sets field on the model using the activity value
for field in instance.simple_fields: for field in instance.simple_fields:
try: try:
changed = field.set_field_from_activity( field.set_field_from_activity(instance, self)
instance, self, overwrite=overwrite
)
if changed:
update_fields.append(field.name)
except AttributeError as e: except AttributeError as e:
raise ActivitySerializerError(e) raise ActivitySerializerError(e)
# image fields have to be set after other fields because they can save # image fields have to be set after other fields because they can save
# too early and jank up users # too early and jank up users
for field in instance.image_fields: for field in instance.image_fields:
changed = field.set_field_from_activity( field.set_field_from_activity(instance, self, save=save)
instance, self, save=save, overwrite=overwrite
)
if changed:
update_fields.append(field.name)
if not save: if not save:
return instance return instance
with transaction.atomic(): with transaction.atomic():
# can't force an update on fields unless the object already exists in the db
if not instance.id:
update_fields = None
# we can't set many to many and reverse fields on an unsaved object # we can't set many to many and reverse fields on an unsaved object
try: try:
try: try:
instance.save(broadcast=False, update_fields=update_fields) instance.save(broadcast=False)
except TypeError: except TypeError:
instance.save(update_fields=update_fields) instance.save()
except IntegrityError as e: except IntegrityError as e:
raise ActivitySerializerError(e) raise ActivitySerializerError(e)
@ -185,9 +180,8 @@ class ActivityObject:
) )
return instance return instance
def serialize(self, **kwargs): def serialize(self):
"""convert to dictionary with context attr""" """ convert to dictionary with context attr """
omit = kwargs.get("omit", ())
data = self.__dict__.copy() data = self.__dict__.copy()
# recursively serialize # recursively serialize
for (k, v) in data.items(): for (k, v) in data.items():
@ -196,51 +190,53 @@ class ActivityObject:
data[k] = v.serialize() data[k] = v.serialize()
except TypeError: except TypeError:
pass pass
data = {k: v for (k, v) in data.items() if v is not None and k not in omit} data = {k: v for (k, v) in data.items() if v is not None}
if "@context" not in omit: data["@context"] = "https://www.w3.org/ns/activitystreams"
data["@context"] = "https://www.w3.org/ns/activitystreams"
return data return data
@app.task(queue="medium_priority") @app.task
@transaction.atomic @transaction.atomic
def set_related_field( def set_related_field(
model_name, origin_model_name, related_field_name, related_remote_id, data model_name, origin_model_name, related_field_name, related_remote_id, data
): ):
"""load reverse related fields (editions, attachments) without blocking""" """ load reverse related fields (editions, attachments) without blocking """
model = apps.get_model(f"bookwyrm.{model_name}", require_ready=True) model = apps.get_model("bookwyrm.%s" % model_name, require_ready=True)
origin_model = apps.get_model(f"bookwyrm.{origin_model_name}", require_ready=True) origin_model = apps.get_model("bookwyrm.%s" % origin_model_name, require_ready=True)
if isinstance(data, str): with transaction.atomic():
existing = model.find_existing_by_remote_id(data) if isinstance(data, str):
if existing: existing = model.find_existing_by_remote_id(data)
data = existing.to_activity() if existing:
else: data = existing.to_activity()
data = get_data(data) else:
activity = model.activity_serializer(**data) data = get_data(data)
activity = model.activity_serializer(**data)
# this must exist because it's the object that triggered this function # this must exist because it's the object that triggered this function
instance = origin_model.find_existing_by_remote_id(related_remote_id) instance = origin_model.find_existing_by_remote_id(related_remote_id)
if not instance: if not instance:
raise ValueError(f"Invalid related remote id: {related_remote_id}") raise ValueError("Invalid related remote id: %s" % related_remote_id)
# set the origin's remote id on the activity so it will be there when # set the origin's remote id on the activity so it will be there when
# the model instance is created # the model instance is created
# edition.parentWork = instance, for example # edition.parentWork = instance, for example
model_field = getattr(model, related_field_name) model_field = getattr(model, related_field_name)
if hasattr(model_field, "activitypub_field"): if hasattr(model_field, "activitypub_field"):
setattr(activity, getattr(model_field, "activitypub_field"), instance.remote_id) setattr(
item = activity.to_model(model=model) activity, getattr(model_field, "activitypub_field"), instance.remote_id
)
item = activity.to_model()
# if the related field isn't serialized (attachments on Status), then # if the related field isn't serialized (attachments on Status), then
# we have to set it post-creation # we have to set it post-creation
if not hasattr(model_field, "activitypub_field"): if not hasattr(model_field, "activitypub_field"):
setattr(item, related_field_name, instance) setattr(item, related_field_name, instance)
item.save() item.save()
def get_model_from_type(activity_type): def get_model_from_type(activity_type):
"""given the activity, what type of model""" """ given the activity, what type of model """
models = apps.get_models() models = apps.get_models()
model = [ model = [
m m
@ -251,7 +247,7 @@ def get_model_from_type(activity_type):
] ]
if not model: if not model:
raise ActivitySerializerError( raise ActivitySerializerError(
f'No model found for activity type "{activity_type}"' 'No model found for activity type "%s"' % activity_type
) )
return model[0] return model[0]
@ -259,10 +255,8 @@ def get_model_from_type(activity_type):
def resolve_remote_id( def resolve_remote_id(
remote_id, model=None, refresh=False, save=True, get_activity=False remote_id, model=None, refresh=False, save=True, get_activity=False
): ):
"""take a remote_id and return an instance, creating if necessary""" """ take a remote_id and return an instance, creating if necessary """
if model: # a bonus check we can do if we already know the model if model: # a bonus check we can do if we already know the model
if isinstance(model, str):
model = apps.get_model(f"bookwyrm.{model}", require_ready=True)
result = model.find_existing_by_remote_id(remote_id) result = model.find_existing_by_remote_id(remote_id)
if result and not refresh: if result and not refresh:
return result if not get_activity else result.to_activity_dataclass() return result if not get_activity else result.to_activity_dataclass()
@ -271,9 +265,9 @@ def resolve_remote_id(
try: try:
data = get_data(remote_id) data = get_data(remote_id)
except ConnectorException: except ConnectorException:
logger.exception("Could not connect to host for remote_id: %s", remote_id) raise ActivitySerializerError(
return None "Could not connect to host for remote_id in: %s" % (remote_id)
)
# determine the model implicitly, if not provided # determine the model implicitly, if not provided
# or if it's a model with subclasses like Status, check again # or if it's a model with subclasses like Status, check again
if not model or hasattr(model.objects, "select_subclasses"): if not model or hasattr(model.objects, "select_subclasses"):
@ -290,28 +284,3 @@ def resolve_remote_id(
# if we're refreshing, "result" will be set and we'll update it # if we're refreshing, "result" will be set and we'll update it
return item.to_model(model=model, instance=result, save=save) return item.to_model(model=model, instance=result, save=save)
@dataclass(init=False)
class Link(ActivityObject):
"""for tagging a book in a status"""
href: str
name: str = None
mediaType: str = None
id: str = None
attributedTo: str = None
availability: str = None
type: str = "Link"
def serialize(self, **kwargs):
"""remove fields"""
omit = ("id", "type", "@context")
return super().serialize(omit=omit)
@dataclass(init=False)
class Mention(Link):
"""a subtype of Link for mentioning an actor"""
type: str = "Mention"

View file

@ -6,32 +6,14 @@ from .base_activity import ActivityObject
from .image import Document from .image import Document
# pylint: disable=invalid-name
@dataclass(init=False) @dataclass(init=False)
class BookData(ActivityObject): class Book(ActivityObject):
"""shared fields for all book data and authors""" """ serializes an edition or work, abstract """
openlibraryKey: str = None
inventaireId: str = None
librarythingKey: str = None
goodreadsKey: str = None
bnfId: str = None
viaf: str = None
wikidata: str = None
asin: str = None
lastEditedBy: str = None
links: List[str] = field(default_factory=lambda: [])
fileLinks: List[str] = field(default_factory=lambda: [])
# pylint: disable=invalid-name
@dataclass(init=False)
class Book(BookData):
"""serializes an edition or work, abstract"""
title: str title: str
sortTitle: str = None lastEditedBy: str = None
subtitle: str = None sortTitle: str = ""
subtitle: str = ""
description: str = "" description: str = ""
languages: List[str] = field(default_factory=lambda: []) languages: List[str] = field(default_factory=lambda: [])
series: str = "" series: str = ""
@ -43,50 +25,53 @@ class Book(BookData):
firstPublishedDate: str = "" firstPublishedDate: str = ""
publishedDate: str = "" publishedDate: str = ""
openlibraryKey: str = ""
librarythingKey: str = ""
goodreadsKey: str = ""
cover: Document = None cover: Document = None
type: str = "Book" type: str = "Book"
# pylint: disable=invalid-name
@dataclass(init=False) @dataclass(init=False)
class Edition(Book): class Edition(Book):
"""Edition instance of a book object""" """ Edition instance of a book object """
work: str work: str
isbn10: str = "" isbn10: str = ""
isbn13: str = "" isbn13: str = ""
oclcNumber: str = "" oclcNumber: str = ""
asin: str = ""
pages: int = None pages: int = None
physicalFormat: str = "" physicalFormat: str = ""
physicalFormatDetail: str = ""
publishers: List[str] = field(default_factory=lambda: []) publishers: List[str] = field(default_factory=lambda: [])
editionRank: int = 0 editionRank: int = 0
type: str = "Edition" type: str = "Edition"
# pylint: disable=invalid-name
@dataclass(init=False) @dataclass(init=False)
class Work(Book): class Work(Book):
"""work instance of a book object""" """ work instance of a book object """
lccn: str = "" lccn: str = ""
defaultEdition: str = ""
editions: List[str] = field(default_factory=lambda: []) editions: List[str] = field(default_factory=lambda: [])
type: str = "Work" type: str = "Work"
# pylint: disable=invalid-name
@dataclass(init=False) @dataclass(init=False)
class Author(BookData): class Author(ActivityObject):
"""author of a book""" """ author of a book """
name: str name: str
isni: str = None lastEditedBy: str = None
viafId: str = None
gutenbergId: str = None
born: str = None born: str = None
died: str = None died: str = None
aliases: List[str] = field(default_factory=lambda: []) aliases: List[str] = field(default_factory=lambda: [])
bio: str = "" bio: str = ""
openlibraryKey: str = ""
librarythingKey: str = ""
goodreadsKey: str = ""
wikipediaLink: str = "" wikipediaLink: str = ""
type: str = "Author" type: str = "Author"

View file

@ -5,7 +5,7 @@ from .base_activity import ActivityObject
@dataclass(init=False) @dataclass(init=False)
class Document(ActivityObject): class Document(ActivityObject):
"""a document""" """ a document """
url: str url: str
name: str = "" name: str = ""
@ -15,6 +15,6 @@ class Document(ActivityObject):
@dataclass(init=False) @dataclass(init=False)
class Image(Document): class Image(Document):
"""an image""" """ an image """
type: str = "Image" type: str = "Image"

View file

@ -9,20 +9,19 @@ from .image import Document
@dataclass(init=False) @dataclass(init=False)
class Tombstone(ActivityObject): class Tombstone(ActivityObject):
"""the placeholder for a deleted status""" """ the placeholder for a deleted status """
type: str = "Tombstone" type: str = "Tombstone"
def to_model(self, *args, **kwargs): # pylint: disable=unused-argument def to_model(self, *args, **kwargs): # pylint: disable=unused-argument
"""this should never really get serialized, just searched for""" """ this should never really get serialized, just searched for """
model = apps.get_model("bookwyrm.Status") model = apps.get_model("bookwyrm.Status")
return model.find_existing_by_remote_id(self.id) return model.find_existing_by_remote_id(self.id)
# pylint: disable=invalid-name
@dataclass(init=False) @dataclass(init=False)
class Note(ActivityObject): class Note(ActivityObject):
"""Note activity""" """ Note activity """
published: str published: str
attributedTo: str attributedTo: str
@ -30,18 +29,17 @@ class Note(ActivityObject):
to: List[str] = field(default_factory=lambda: []) to: List[str] = field(default_factory=lambda: [])
cc: List[str] = field(default_factory=lambda: []) cc: List[str] = field(default_factory=lambda: [])
replies: Dict = field(default_factory=lambda: {}) replies: Dict = field(default_factory=lambda: {})
inReplyTo: str = None inReplyTo: str = ""
summary: str = None summary: str = ""
tag: List[Link] = field(default_factory=lambda: []) tag: List[Link] = field(default_factory=lambda: [])
attachment: List[Document] = field(default_factory=lambda: []) attachment: List[Document] = field(default_factory=lambda: [])
sensitive: bool = False sensitive: bool = False
updated: str = None
type: str = "Note" type: str = "Note"
@dataclass(init=False) @dataclass(init=False)
class Article(Note): class Article(Note):
"""what's an article except a note with more fields""" """ what's an article except a note with more fields """
name: str name: str
type: str = "Article" type: str = "Article"
@ -49,36 +47,30 @@ class Article(Note):
@dataclass(init=False) @dataclass(init=False)
class GeneratedNote(Note): class GeneratedNote(Note):
"""just a re-typed note""" """ just a re-typed note """
type: str = "GeneratedNote" type: str = "GeneratedNote"
# pylint: disable=invalid-name
@dataclass(init=False) @dataclass(init=False)
class Comment(Note): class Comment(Note):
"""like a note but with a book""" """ like a note but with a book """
inReplyToBook: str inReplyToBook: str
readingStatus: str = None
progress: int = None
progressMode: str = None
type: str = "Comment" type: str = "Comment"
@dataclass(init=False) @dataclass(init=False)
class Quotation(Comment): class Quotation(Comment):
"""a quote and commentary on a book""" """ a quote and commentary on a book """
quote: str quote: str
position: int = None
positionMode: str = None
type: str = "Quotation" type: str = "Quotation"
@dataclass(init=False) @dataclass(init=False)
class Review(Comment): class Review(Comment):
"""a full book review""" """ a full book review """
name: str = None name: str = None
rating: int = None rating: int = None
@ -87,9 +79,8 @@ class Review(Comment):
@dataclass(init=False) @dataclass(init=False)
class Rating(Comment): class Rating(Comment):
"""just a star rating""" """ just a star rating """
rating: int rating: int
content: str = None content: str = None
name: str = None # not used, but the model inherits from Review
type: str = "Rating" type: str = "Rating"

View file

@ -5,10 +5,9 @@ from typing import List
from .base_activity import ActivityObject from .base_activity import ActivityObject
# pylint: disable=invalid-name
@dataclass(init=False) @dataclass(init=False)
class OrderedCollection(ActivityObject): class OrderedCollection(ActivityObject):
"""structure of an ordered collection activity""" """ structure of an ordered collection activity """
totalItems: int totalItems: int
first: str first: str
@ -18,10 +17,9 @@ class OrderedCollection(ActivityObject):
type: str = "OrderedCollection" type: str = "OrderedCollection"
# pylint: disable=invalid-name
@dataclass(init=False) @dataclass(init=False)
class OrderedCollectionPrivate(OrderedCollection): class OrderedCollectionPrivate(OrderedCollection):
"""an ordered collection with privacy settings""" """ an ordered collection with privacy settings """
to: List[str] = field(default_factory=lambda: []) to: List[str] = field(default_factory=lambda: [])
cc: List[str] = field(default_factory=lambda: []) cc: List[str] = field(default_factory=lambda: [])
@ -29,24 +27,23 @@ class OrderedCollectionPrivate(OrderedCollection):
@dataclass(init=False) @dataclass(init=False)
class Shelf(OrderedCollectionPrivate): class Shelf(OrderedCollectionPrivate):
"""structure of an ordered collection activity""" """ structure of an ordered collection activity """
type: str = "Shelf" type: str = "Shelf"
@dataclass(init=False) @dataclass(init=False)
class BookList(OrderedCollectionPrivate): class BookList(OrderedCollectionPrivate):
"""structure of an ordered collection activity""" """ structure of an ordered collection activity """
summary: str = None summary: str = None
curation: str = "closed" curation: str = "closed"
type: str = "BookList" type: str = "BookList"
# pylint: disable=invalid-name
@dataclass(init=False) @dataclass(init=False)
class OrderedCollectionPage(ActivityObject): class OrderedCollectionPage(ActivityObject):
"""structure of an ordered collection activity""" """ structure of an ordered collection activity """
partOf: str partOf: str
orderedItems: List orderedItems: List
@ -57,7 +54,7 @@ class OrderedCollectionPage(ActivityObject):
@dataclass(init=False) @dataclass(init=False)
class CollectionItem(ActivityObject): class CollectionItem(ActivityObject):
"""an item in a collection""" """ an item in a collection """
actor: str actor: str
type: str = "CollectionItem" type: str = "CollectionItem"
@ -65,7 +62,7 @@ class CollectionItem(ActivityObject):
@dataclass(init=False) @dataclass(init=False)
class ListItem(CollectionItem): class ListItem(CollectionItem):
"""a book on a list""" """ a book on a list """
book: str book: str
notes: str = None notes: str = None
@ -76,7 +73,7 @@ class ListItem(CollectionItem):
@dataclass(init=False) @dataclass(init=False)
class ShelfItem(CollectionItem): class ShelfItem(CollectionItem):
"""a book on a list""" """ a book on a list """
book: str book: str
type: str = "ShelfItem" type: str = "ShelfItem"

View file

@ -6,25 +6,18 @@ from .base_activity import ActivityObject
from .image import Image from .image import Image
# pylint: disable=invalid-name
@dataclass(init=False) @dataclass(init=False)
class PublicKey(ActivityObject): class PublicKey(ActivityObject):
"""public key block""" """ public key block """
owner: str owner: str
publicKeyPem: str publicKeyPem: str
type: str = "PublicKey" type: str = "PublicKey"
def serialize(self, **kwargs):
"""remove fields"""
omit = ("type", "@context")
return super().serialize(omit=omit)
# pylint: disable=invalid-name
@dataclass(init=False) @dataclass(init=False)
class Person(ActivityObject): class Person(ActivityObject):
"""actor activitypub json""" """ actor activitypub json """
preferredUsername: str preferredUsername: str
inbox: str inbox: str
@ -39,5 +32,4 @@ class Person(ActivityObject):
bookwyrmUser: bool = False bookwyrmUser: bool = False
manuallyApprovesFollowers: str = False manuallyApprovesFollowers: str = False
discoverable: str = False discoverable: str = False
hideFollows: str = False
type: str = "Person" type: str = "Person"

View file

@ -1,4 +1,3 @@
""" ActivityPub-specific json response wrapper """
from django.http import JsonResponse from django.http import JsonResponse
from .base_activity import ActivityEncoder from .base_activity import ActivityEncoder

View file

@ -9,23 +9,22 @@ from .ordered_collection import CollectionItem
@dataclass(init=False) @dataclass(init=False)
class Verb(ActivityObject): class Verb(ActivityObject):
"""generic fields for activities""" """generic fields for activities """
actor: str actor: str
object: ActivityObject object: ActivityObject
def action(self): def action(self):
"""usually we just want to update and save""" """ usually we just want to update and save """
# self.object may return None if the object is invalid in an expected way # self.object may return None if the object is invalid in an expected way
# ie, Question type # ie, Question type
if self.object: if self.object:
self.object.to_model() self.object.to_model()
# pylint: disable=invalid-name
@dataclass(init=False) @dataclass(init=False)
class Create(Verb): class Create(Verb):
"""Create activity""" """ Create activity """
to: List[str] to: List[str]
cc: List[str] = field(default_factory=lambda: []) cc: List[str] = field(default_factory=lambda: [])
@ -33,17 +32,16 @@ class Create(Verb):
type: str = "Create" type: str = "Create"
# pylint: disable=invalid-name
@dataclass(init=False) @dataclass(init=False)
class Delete(Verb): class Delete(Verb):
"""Create activity""" """ Create activity """
to: List[str] = field(default_factory=lambda: []) to: List[str]
cc: List[str] = field(default_factory=lambda: []) cc: List[str] = field(default_factory=lambda: [])
type: str = "Delete" type: str = "Delete"
def action(self): def action(self):
"""find and delete the activity object""" """ find and delete the activity object """
if not self.object: if not self.object:
return return
@ -59,29 +57,27 @@ class Delete(Verb):
# if we can't find it, we don't need to delete it because we don't have it # if we can't find it, we don't need to delete it because we don't have it
# pylint: disable=invalid-name
@dataclass(init=False) @dataclass(init=False)
class Update(Verb): class Update(Verb):
"""Update activity""" """ Update activity """
to: List[str] to: List[str]
type: str = "Update" type: str = "Update"
def action(self): def action(self):
"""update a model instance from the dataclass""" """ update a model instance from the dataclass """
if not self.object: if self.object:
return self.object.to_model(allow_create=False)
self.object.to_model(allow_create=False)
@dataclass(init=False) @dataclass(init=False)
class Undo(Verb): class Undo(Verb):
"""Undo an activity""" """ Undo an activity """
type: str = "Undo" type: str = "Undo"
def action(self): def action(self):
"""find and remove the activity object""" """ find and remove the activity object """
if isinstance(self.object, str): if isinstance(self.object, str):
# it may be that sometihng should be done with these, but idk what # it may be that sometihng should be done with these, but idk what
# this seems just to be coming from pleroma # this seems just to be coming from pleroma
@ -107,64 +103,64 @@ class Undo(Verb):
@dataclass(init=False) @dataclass(init=False)
class Follow(Verb): class Follow(Verb):
"""Follow activity""" """ Follow activity """
object: str object: str
type: str = "Follow" type: str = "Follow"
def action(self): def action(self):
"""relationship save""" """ relationship save """
self.to_model() self.to_model()
@dataclass(init=False) @dataclass(init=False)
class Block(Verb): class Block(Verb):
"""Block activity""" """ Block activity """
object: str object: str
type: str = "Block" type: str = "Block"
def action(self): def action(self):
"""relationship save""" """ relationship save """
self.to_model() self.to_model()
@dataclass(init=False) @dataclass(init=False)
class Accept(Verb): class Accept(Verb):
"""Accept activity""" """ Accept activity """
object: Follow object: Follow
type: str = "Accept" type: str = "Accept"
def action(self): def action(self):
"""accept a request""" """ find and remove the activity object """
obj = self.object.to_model(save=False, allow_create=True) obj = self.object.to_model(save=False, allow_create=False)
obj.accept() obj.accept()
@dataclass(init=False) @dataclass(init=False)
class Reject(Verb): class Reject(Verb):
"""Reject activity""" """ Reject activity """
object: Follow object: Follow
type: str = "Reject" type: str = "Reject"
def action(self): def action(self):
"""reject a follow request""" """ find and remove the activity object """
obj = self.object.to_model(save=False, allow_create=False) obj = self.object.to_model(save=False, allow_create=False)
obj.reject() obj.reject()
@dataclass(init=False) @dataclass(init=False)
class Add(Verb): class Add(Verb):
"""Add activity""" """Add activity """
target: ActivityObject target: ActivityObject
object: CollectionItem object: CollectionItem
type: str = "Add" type: str = "Add"
def action(self): def action(self):
"""figure out the target to assign the item to a collection""" """ figure out the target to assign the item to a collection """
target = resolve_remote_id(self.target) target = resolve_remote_id(self.target)
item = self.object.to_model(save=False) item = self.object.to_model(save=False)
setattr(item, item.collection_field, target) setattr(item, item.collection_field, target)
@ -173,12 +169,12 @@ class Add(Verb):
@dataclass(init=False) @dataclass(init=False)
class Remove(Add): class Remove(Add):
"""Remove activity""" """Remove activity """
type: str = "Remove" type: str = "Remove"
def action(self): def action(self):
"""find and remove the activity object""" """ find and remove the activity object """
obj = self.object.to_model(save=False, allow_create=False) obj = self.object.to_model(save=False, allow_create=False)
if obj: if obj:
obj.delete() obj.delete()
@ -186,20 +182,19 @@ class Remove(Add):
@dataclass(init=False) @dataclass(init=False)
class Like(Verb): class Like(Verb):
"""a user faving an object""" """ a user faving an object """
object: str object: str
type: str = "Like" type: str = "Like"
def action(self): def action(self):
"""like""" """ like """
self.to_model() self.to_model()
# pylint: disable=invalid-name
@dataclass(init=False) @dataclass(init=False)
class Announce(Verb): class Announce(Verb):
"""boosting a status""" """ boosting a status """
published: str published: str
to: List[str] = field(default_factory=lambda: []) to: List[str] = field(default_factory=lambda: [])
@ -208,5 +203,5 @@ class Announce(Verb):
type: str = "Announce" type: str = "Announce"
def action(self): def action(self):
"""boost""" """ boost """
self.to_model() self.to_model()

View file

@ -1,104 +1,73 @@
""" access the activity streams stored in redis """ """ access the activity streams stored in redis """
from datetime import timedelta
from django.dispatch import receiver from django.dispatch import receiver
from django.db import transaction
from django.db.models import signals, Q from django.db.models import signals, Q
from django.utils import timezone
from bookwyrm import models from bookwyrm import models
from bookwyrm.redis_store import RedisStore, r from bookwyrm.redis_store import RedisStore, r
from bookwyrm.tasks import app, LOW, MEDIUM, HIGH from bookwyrm.views.helpers import privacy_filter
class ActivityStream(RedisStore): class ActivityStream(RedisStore):
"""a category of activity stream (like home, local, books)""" """ a category of activity stream (like home, local, federated) """
def stream_id(self, user): def stream_id(self, user):
"""the redis key for this user's instance of this stream""" """ the redis key for this user's instance of this stream """
return f"{user.id}-{self.key}" return "{}-{}".format(user.id, self.key)
def unread_id(self, user): def unread_id(self, user):
"""the redis key for this user's unread count for this stream""" """ the redis key for this user's unread count for this stream """
stream_id = self.stream_id(user) return "{}-unread".format(self.stream_id(user))
return f"{stream_id}-unread"
def unread_by_status_type_id(self, user):
"""the redis key for this user's unread count for this stream"""
stream_id = self.stream_id(user)
return f"{stream_id}-unread-by-type"
def get_rank(self, obj): # pylint: disable=no-self-use def get_rank(self, obj): # pylint: disable=no-self-use
"""statuses are sorted by date published""" """ statuses are sorted by date published """
return obj.published_date.timestamp() return obj.published_date.timestamp()
def add_status(self, status, increment_unread=False): def add_status(self, status):
"""add a status to users' feeds""" """ add a status to users' feeds """
# the pipeline contains all the add-to-stream activities # the pipeline contains all the add-to-stream activities
pipeline = self.add_object_to_related_stores(status, execute=False) pipeline = self.add_object_to_related_stores(status, execute=False)
if increment_unread: for user in self.get_audience(status):
for user in self.get_audience(status): # add to the unread status count
# add to the unread status count pipeline.incr(self.unread_id(user))
pipeline.incr(self.unread_id(user))
# add to the unread status count for status type
pipeline.hincrby(
self.unread_by_status_type_id(user), get_status_type(status), 1
)
# and go! # and go!
pipeline.execute() pipeline.execute()
def add_user_statuses(self, viewer, user): def add_user_statuses(self, viewer, user):
"""add a user's statuses to another user's feed""" """ add a user's statuses to another user's feed """
# only add the statuses that the viewer should be able to see (ie, not dms) # only add the statuses that the viewer should be able to see (ie, not dms)
statuses = models.Status.privacy_filter(viewer).filter(user=user) statuses = privacy_filter(viewer, user.status_set.all())
self.bulk_add_objects_to_store(statuses, self.stream_id(viewer)) self.bulk_add_objects_to_store(statuses, self.stream_id(viewer))
def remove_user_statuses(self, viewer, user): def remove_user_statuses(self, viewer, user):
"""remove a user's status from another user's feed""" """ remove a user's status from another user's feed """
# remove all so that followers only statuses are removed # remove all so that followers only statuses are removed
statuses = user.status_set.all() statuses = user.status_set.all()
self.bulk_remove_objects_from_store(statuses, self.stream_id(viewer)) self.bulk_remove_objects_from_store(statuses, self.stream_id(viewer))
def get_activity_stream(self, user): def get_activity_stream(self, user):
"""load the statuses to be displayed""" """ load the statuses to be displayed """
# clear unreads for this feed # clear unreads for this feed
r.set(self.unread_id(user), 0) r.set(self.unread_id(user), 0)
r.delete(self.unread_by_status_type_id(user))
statuses = self.get_store(self.stream_id(user)) statuses = self.get_store(self.stream_id(user))
return ( return (
models.Status.objects.select_subclasses() models.Status.objects.select_subclasses()
.filter(id__in=statuses) .filter(id__in=statuses)
.select_related(
"user",
"reply_parent",
"comment__book",
"review__book",
"quotation__book",
)
.prefetch_related("mention_books", "mention_users")
.order_by("-published_date") .order_by("-published_date")
) )
def get_unread_count(self, user): def get_unread_count(self, user):
"""get the unread status count for this user's feed""" """ get the unread status count for this user's feed """
return int(r.get(self.unread_id(user)) or 0) return int(r.get(self.unread_id(user)) or 0)
def get_unread_count_by_status_type(self, user):
"""get the unread status count for this user's feed's status types"""
status_types = r.hgetall(self.unread_by_status_type_id(user))
return {
str(key.decode("utf-8")): int(value) or 0
for key, value in status_types.items()
}
def populate_streams(self, user): def populate_streams(self, user):
"""go from zero to a timeline""" """ go from zero to a timeline """
self.populate_store(self.stream_id(user)) self.populate_store(self.stream_id(user))
def get_audience(self, status): # pylint: disable=no-self-use def get_audience(self, status): # pylint: disable=no-self-use
"""given a status, what users should see it""" """ given a status, what users should see it """
# direct messages don't appeard in feeds, direct comments/reviews/etc do # direct messages don't appeard in feeds, direct comments/reviews/etc do
if status.privacy == "direct" and status.status_type == "Note": if status.privacy == "direct" and status.status_type == "Note":
return [] return []
@ -129,9 +98,10 @@ class ActivityStream(RedisStore):
return [self.stream_id(u) for u in self.get_audience(obj)] return [self.stream_id(u) for u in self.get_audience(obj)]
def get_statuses_for_user(self, user): # pylint: disable=no-self-use def get_statuses_for_user(self, user): # pylint: disable=no-self-use
"""given a user, what statuses should they see on this stream""" """ given a user, what statuses should they see on this stream """
return models.Status.privacy_filter( return privacy_filter(
user, user,
models.Status.objects.select_subclasses(),
privacy_levels=["public", "unlisted", "followers"], privacy_levels=["public", "unlisted", "followers"],
) )
@ -141,7 +111,7 @@ class ActivityStream(RedisStore):
class HomeStream(ActivityStream): class HomeStream(ActivityStream):
"""users you follow""" """ users you follow """
key = "home" key = "home"
@ -155,20 +125,16 @@ class HomeStream(ActivityStream):
).distinct() ).distinct()
def get_statuses_for_user(self, user): def get_statuses_for_user(self, user):
return models.Status.privacy_filter( return privacy_filter(
user, user,
models.Status.objects.select_subclasses(),
privacy_levels=["public", "unlisted", "followers"], privacy_levels=["public", "unlisted", "followers"],
).exclude( following_only=True,
~Q( # remove everything except
Q(user__followers=user) # user following
| Q(user=user) # is self
| Q(mention_users=user) # mentions user
),
) )
class LocalStream(ActivityStream): class LocalStream(ActivityStream):
"""users you follow""" """ users you follow """
key = "local" key = "local"
@ -180,372 +146,124 @@ class LocalStream(ActivityStream):
def get_statuses_for_user(self, user): def get_statuses_for_user(self, user):
# all public statuses by a local user # all public statuses by a local user
return models.Status.privacy_filter( return privacy_filter(
user, user,
models.Status.objects.select_subclasses().filter(user__local=True),
privacy_levels=["public"], privacy_levels=["public"],
).filter(user__local=True) )
class BooksStream(ActivityStream): class FederatedStream(ActivityStream):
"""books on your shelves""" """ users you follow """
key = "books" key = "federated"
def get_audience(self, status): def get_audience(self, status):
"""anyone with the mentioned book on their shelves""" # this stream wants no part in non-public statuses
# only show public statuses on the books feed, if status.privacy != "public":
# and only statuses that mention books
if status.privacy != "public" or not (
status.mention_books.exists() or hasattr(status, "book")
):
return [] return []
return super().get_audience(status)
work = (
status.book.parent_work
if hasattr(status, "book")
else status.mention_books.first().parent_work
)
audience = super().get_audience(status)
if not audience:
return []
return audience.filter(shelfbook__book__parent_work=work).distinct()
def get_statuses_for_user(self, user): def get_statuses_for_user(self, user):
"""any public status that mentions the user's books""" return privacy_filter(
books = user.shelfbook_set.values_list( user,
"book__parent_work__id", flat=True models.Status.objects.select_subclasses(),
).distinct() privacy_levels=["public"],
return (
models.Status.privacy_filter(
user,
privacy_levels=["public"],
)
.filter(
Q(comment__book__parent_work__id__in=books)
| Q(quotation__book__parent_work__id__in=books)
| Q(review__book__parent_work__id__in=books)
| Q(mention_books__parent_work__id__in=books)
)
.distinct()
) )
def add_book_statuses(self, user, book):
"""add statuses about a book to a user's feed"""
work = book.parent_work
statuses = (
models.Status.privacy_filter(
user,
privacy_levels=["public"],
)
.filter(
Q(comment__book__parent_work=work)
| Q(quotation__book__parent_work=work)
| Q(review__book__parent_work=work)
| Q(mention_books__parent_work=work)
)
.distinct()
)
self.bulk_add_objects_to_store(statuses, self.stream_id(user))
def remove_book_statuses(self, user, book):
"""add statuses about a book to a user's feed"""
work = book.parent_work
statuses = (
models.Status.privacy_filter(
user,
privacy_levels=["public"],
)
.filter(
Q(comment__book__parent_work=work)
| Q(quotation__book__parent_work=work)
| Q(review__book__parent_work=work)
| Q(mention_books__parent_work=work)
)
.distinct()
)
self.bulk_remove_objects_from_store(statuses, self.stream_id(user))
# determine which streams are enabled in settings.py
streams = { streams = {
"home": HomeStream(), "home": HomeStream(),
"local": LocalStream(), "local": LocalStream(),
"books": BooksStream(), "federated": FederatedStream(),
} }
@receiver(signals.post_save) @receiver(signals.post_save)
# pylint: disable=unused-argument # pylint: disable=unused-argument
def add_status_on_create(sender, instance, created, *args, **kwargs): def add_status_on_create(sender, instance, created, *args, **kwargs):
"""add newly created statuses to activity feeds""" """ add newly created statuses to activity feeds """
# we're only interested in new statuses # we're only interested in new statuses
if not issubclass(sender, models.Status): if not issubclass(sender, models.Status):
return return
if instance.deleted: if instance.deleted:
remove_status_task.delay(instance.id) for stream in streams.values():
stream.remove_object_from_related_stores(instance)
return return
# when creating new things, gotta wait on the transaction if not created:
transaction.on_commit( return
lambda: add_status_on_create_command(sender, instance, created)
)
# iterates through Home, Local, Federated
def add_status_on_create_command(sender, instance, created): for stream in streams.values():
"""runs this code only after the database commit completes""" stream.add_status(instance)
priority = HIGH
# check if this is an old status, de-prioritize if so
# (this will happen if federation is very slow, or, more expectedly, on csv import)
if instance.published_date < timezone.now() - timedelta(
days=1
) or instance.created_date < instance.published_date - timedelta(days=1):
priority = LOW
add_status_task.apply_async(
args=(instance.id,),
kwargs={"increment_unread": created},
queue=priority,
)
if sender == models.Boost:
handle_boost_task.delay(instance.id)
@receiver(signals.post_delete, sender=models.Boost) @receiver(signals.post_delete, sender=models.Boost)
# pylint: disable=unused-argument # pylint: disable=unused-argument
def remove_boost_on_delete(sender, instance, *args, **kwargs): def remove_boost_on_delete(sender, instance, *args, **kwargs):
"""boosts are deleted""" """ boosts are deleted """
# remove the boost # we're only interested in new statuses
remove_status_task.delay(instance.id) for stream in streams.values():
# re-add the original status stream.remove_object_from_related_stores(instance)
add_status_task.delay(instance.boosted_status.id)
@receiver(signals.post_save, sender=models.UserFollows) @receiver(signals.post_save, sender=models.UserFollows)
# pylint: disable=unused-argument # pylint: disable=unused-argument
def add_statuses_on_follow(sender, instance, created, *args, **kwargs): def add_statuses_on_follow(sender, instance, created, *args, **kwargs):
"""add a newly followed user's statuses to feeds""" """ add a newly followed user's statuses to feeds """
if not created or not instance.user_subject.local: if not created or not instance.user_subject.local:
return return
add_user_statuses_task.delay( HomeStream().add_user_statuses(instance.user_subject, instance.user_object)
instance.user_subject.id, instance.user_object.id, stream_list=["home"]
)
@receiver(signals.post_delete, sender=models.UserFollows) @receiver(signals.post_delete, sender=models.UserFollows)
# pylint: disable=unused-argument # pylint: disable=unused-argument
def remove_statuses_on_unfollow(sender, instance, *args, **kwargs): def remove_statuses_on_unfollow(sender, instance, *args, **kwargs):
"""remove statuses from a feed on unfollow""" """ remove statuses from a feed on unfollow """
if not instance.user_subject.local: if not instance.user_subject.local:
return return
remove_user_statuses_task.delay( HomeStream().remove_user_statuses(instance.user_subject, instance.user_object)
instance.user_subject.id, instance.user_object.id, stream_list=["home"]
)
@receiver(signals.post_save, sender=models.UserBlocks) @receiver(signals.post_save, sender=models.UserBlocks)
# pylint: disable=unused-argument # pylint: disable=unused-argument
def remove_statuses_on_block(sender, instance, *args, **kwargs): def remove_statuses_on_block(sender, instance, *args, **kwargs):
"""remove statuses from all feeds on block""" """ remove statuses from all feeds on block """
# blocks apply ot all feeds # blocks apply ot all feeds
if instance.user_subject.local: if instance.user_subject.local:
remove_user_statuses_task.delay( for stream in streams.values():
instance.user_subject.id, instance.user_object.id stream.remove_user_statuses(instance.user_subject, instance.user_object)
)
# and in both directions # and in both directions
if instance.user_object.local: if instance.user_object.local:
remove_user_statuses_task.delay( for stream in streams.values():
instance.user_object.id, instance.user_subject.id stream.remove_user_statuses(instance.user_object, instance.user_subject)
)
@receiver(signals.post_delete, sender=models.UserBlocks) @receiver(signals.post_delete, sender=models.UserBlocks)
# pylint: disable=unused-argument # pylint: disable=unused-argument
def add_statuses_on_unblock(sender, instance, *args, **kwargs): def add_statuses_on_unblock(sender, instance, *args, **kwargs):
"""add statuses back to all feeds on unblock""" """ remove statuses from all feeds on block """
# make sure there isn't a block in the other direction public_streams = [LocalStream(), FederatedStream()]
if models.UserBlocks.objects.filter(
user_subject=instance.user_object,
user_object=instance.user_subject,
).exists():
return
public_streams = [k for (k, v) in streams.items() if k != "home"]
# add statuses back to streams with statuses from anyone # add statuses back to streams with statuses from anyone
if instance.user_subject.local: if instance.user_subject.local:
add_user_statuses_task.delay( for stream in public_streams:
instance.user_subject.id, stream.add_user_statuses(instance.user_subject, instance.user_object)
instance.user_object.id,
stream_list=public_streams,
)
# add statuses back to streams with statuses from anyone # add statuses back to streams with statuses from anyone
if instance.user_object.local: if instance.user_object.local:
add_user_statuses_task.delay( for stream in public_streams:
instance.user_object.id, stream.add_user_statuses(instance.user_object, instance.user_subject)
instance.user_subject.id,
stream_list=public_streams,
)
@receiver(signals.post_save, sender=models.User) @receiver(signals.post_save, sender=models.User)
# pylint: disable=unused-argument # pylint: disable=unused-argument
def populate_streams_on_account_create(sender, instance, created, *args, **kwargs): def populate_streams_on_account_create(sender, instance, created, *args, **kwargs):
"""build a user's feeds when they join""" """ build a user's feeds when they join """
if not created or not instance.local: if not created or not instance.local:
return return
transaction.on_commit(
lambda: populate_streams_on_account_create_command(instance.id)
)
def populate_streams_on_account_create_command(instance_id):
"""wait for the transaction to complete"""
for stream in streams:
populate_stream_task.delay(stream, instance_id)
@receiver(signals.pre_save, sender=models.ShelfBook)
# pylint: disable=unused-argument
def add_statuses_on_shelve(sender, instance, *args, **kwargs):
"""update books stream when user shelves a book"""
if not instance.user.local:
return
book = instance.book
# check if the book is already on the user's shelves
editions = book.parent_work.editions.all()
if models.ShelfBook.objects.filter(user=instance.user, book__in=editions).exists():
return
add_book_statuses_task.delay(instance.user.id, book.id)
@receiver(signals.post_delete, sender=models.ShelfBook)
# pylint: disable=unused-argument
def remove_statuses_on_unshelve(sender, instance, *args, **kwargs):
"""update books stream when user unshelves a book"""
if not instance.user.local:
return
book = instance.book
# check if the book is actually unshelved, not just moved
editions = book.parent_work.editions.all()
if models.ShelfBook.objects.filter(user=instance.user, book__in=editions).exists():
return
remove_book_statuses_task.delay(instance.user.id, book.id)
# ---- TASKS
@app.task(queue=LOW)
def add_book_statuses_task(user_id, book_id):
"""add statuses related to a book on shelve"""
user = models.User.objects.get(id=user_id)
book = models.Edition.objects.get(id=book_id)
BooksStream().add_book_statuses(user, book)
@app.task(queue=LOW)
def remove_book_statuses_task(user_id, book_id):
"""remove statuses about a book from a user's books feed"""
user = models.User.objects.get(id=user_id)
book = models.Edition.objects.get(id=book_id)
BooksStream().remove_book_statuses(user, book)
@app.task(queue=MEDIUM)
def populate_stream_task(stream, user_id):
"""background task for populating an empty activitystream"""
user = models.User.objects.get(id=user_id)
stream = streams[stream]
stream.populate_streams(user)
@app.task(queue=MEDIUM)
def remove_status_task(status_ids):
"""remove a status from any stream it might be in"""
# this can take an id or a list of ids
if not isinstance(status_ids, list):
status_ids = [status_ids]
statuses = models.Status.objects.filter(id__in=status_ids)
for stream in streams.values(): for stream in streams.values():
for status in statuses: stream.populate_streams(instance)
stream.remove_object_from_related_stores(status)
@app.task(queue=HIGH)
def add_status_task(status_id, increment_unread=False):
"""add a status to any stream it should be in"""
status = models.Status.objects.select_subclasses().get(id=status_id)
# we don't want to tick the unread count for csv import statuses, idk how better
# to check than just to see if the states is more than a few days old
if status.created_date < timezone.now() - timedelta(days=2):
increment_unread = False
for stream in streams.values():
stream.add_status(status, increment_unread=increment_unread)
@app.task(queue=MEDIUM)
def remove_user_statuses_task(viewer_id, user_id, stream_list=None):
"""remove all statuses by a user from a viewer's stream"""
stream_list = [streams[s] for s in stream_list] if stream_list else streams.values()
viewer = models.User.objects.get(id=viewer_id)
user = models.User.objects.get(id=user_id)
for stream in stream_list:
stream.remove_user_statuses(viewer, user)
@app.task(queue=MEDIUM)
def add_user_statuses_task(viewer_id, user_id, stream_list=None):
"""add all statuses by a user to a viewer's stream"""
stream_list = [streams[s] for s in stream_list] if stream_list else streams.values()
viewer = models.User.objects.get(id=viewer_id)
user = models.User.objects.get(id=user_id)
for stream in stream_list:
stream.add_user_statuses(viewer, user)
@app.task(queue=MEDIUM)
def handle_boost_task(boost_id):
"""remove the original post and other, earlier boosts"""
instance = models.Status.objects.get(id=boost_id)
boosted = instance.boost.boosted_status
# previous boosts of this status
old_versions = models.Boost.objects.filter(
boosted_status__id=boosted.id,
created_date__lt=instance.created_date,
)
for stream in streams.values():
# people who should see the boost (not people who see the original status)
audience = stream.get_stores_for_object(instance)
stream.remove_object_from_related_stores(boosted, stores=audience)
for status in old_versions:
stream.remove_object_from_related_stores(status, stores=audience)
def get_status_type(status):
"""return status type even for boosted statuses"""
status_type = status.status_type.lower()
# Check if current status is a boost
if hasattr(status, "boost"):
# Act in accordance of your findings
if hasattr(status.boost.boosted_status, "review"):
status_type = "review"
if hasattr(status.boost.boosted_status, "comment"):
status_type = "comment"
if hasattr(status.boost.boosted_status, "quotation"):
status_type = "quotation"
return status_type

View file

@ -2,6 +2,7 @@
from django.contrib import admin from django.contrib import admin
from bookwyrm import models from bookwyrm import models
admin.site.register(models.SiteSettings)
admin.site.register(models.User) admin.site.register(models.User)
admin.site.register(models.FederatedServer) admin.site.register(models.FederatedServer)
admin.site.register(models.Connector) admin.site.register(models.Connector)

View file

@ -1,54 +0,0 @@
"""Do further startup configuration and initialization"""
import os
import urllib
import logging
from django.apps import AppConfig
from bookwyrm import settings
logger = logging.getLogger(__name__)
def download_file(url, destination):
"""Downloads a file to the given path"""
try:
# Ensure our destination directory exists
os.makedirs(os.path.dirname(destination))
with urllib.request.urlopen(url) as stream:
with open(destination, "b+w") as outfile:
outfile.write(stream.read())
except (urllib.error.HTTPError, urllib.error.URLError):
logger.info("Failed to download file %s", url)
except OSError:
logger.info("Couldn't open font file %s for writing", destination)
except: # pylint: disable=bare-except
logger.info("Unknown error in file download")
class BookwyrmConfig(AppConfig):
"""Handles additional configuration"""
name = "bookwyrm"
verbose_name = "BookWyrm"
# pylint: disable=no-self-use
def ready(self):
"""set up OTLP and preview image files, if desired"""
if settings.OTEL_EXPORTER_OTLP_ENDPOINT:
# pylint: disable=import-outside-toplevel
from bookwyrm.telemetry import open_telemetry
open_telemetry.instrumentDjango()
if settings.ENABLE_PREVIEW_IMAGES and settings.FONTS:
# Download any fonts that we don't have yet
logger.debug("Downloading fonts..")
for name, config in settings.FONTS.items():
font_path = os.path.join(
settings.FONT_DIR, config["directory"], config["filename"]
)
if "url" in config and not os.path.exists(font_path):
logger.info("Just a sec, downloading %s", name)
download_file(config["url"], font_path)

View file

@ -1,159 +0,0 @@
""" using a bookwyrm instance as a source of book data """
from dataclasses import asdict, dataclass
from functools import reduce
import operator
from django.contrib.postgres.search import SearchRank, SearchQuery
from django.db.models import OuterRef, Subquery, F, Q
from bookwyrm import models
from bookwyrm.settings import MEDIA_FULL_URL
# pylint: disable=arguments-differ
def search(query, min_confidence=0, filters=None, return_first=False):
"""search your local database"""
filters = filters or []
if not query:
return []
# first, try searching unqiue identifiers
results = search_identifiers(query, *filters, return_first=return_first)
if not results:
# then try searching title/author
results = search_title_author(
query, min_confidence, *filters, return_first=return_first
)
return results
def isbn_search(query):
"""search your local database"""
if not query:
return []
filters = [{f: query} for f in ["isbn_10", "isbn_13"]]
results = models.Edition.objects.filter(
reduce(operator.or_, (Q(**f) for f in filters))
).distinct()
# when there are multiple editions of the same work, pick the default.
# it would be odd for this to happen.
default_editions = models.Edition.objects.filter(
parent_work=OuterRef("parent_work")
).order_by("-edition_rank")
results = (
results.annotate(default_id=Subquery(default_editions.values("id")[:1])).filter(
default_id=F("id")
)
or results
)
return results
def format_search_result(search_result):
"""convert a book object into a search result object"""
cover = None
if search_result.cover:
cover = f"{MEDIA_FULL_URL}{search_result.cover}"
return SearchResult(
title=search_result.title,
key=search_result.remote_id,
author=search_result.author_text,
year=search_result.published_date.year
if search_result.published_date
else None,
cover=cover,
confidence=search_result.rank if hasattr(search_result, "rank") else 1,
connector="",
).json()
def search_identifiers(query, *filters, return_first=False):
"""tries remote_id, isbn; defined as dedupe fields on the model"""
# pylint: disable=W0212
or_filters = [
{f.name: query}
for f in models.Edition._meta.get_fields()
if hasattr(f, "deduplication_field") and f.deduplication_field
]
results = models.Edition.objects.filter(
*filters, reduce(operator.or_, (Q(**f) for f in or_filters))
).distinct()
if results.count() <= 1:
if return_first:
return results.first()
return results
# when there are multiple editions of the same work, pick the default.
# it would be odd for this to happen.
default_editions = models.Edition.objects.filter(
parent_work=OuterRef("parent_work")
).order_by("-edition_rank")
results = (
results.annotate(default_id=Subquery(default_editions.values("id")[:1])).filter(
default_id=F("id")
)
or results
)
if return_first:
return results.first()
return results
def search_title_author(query, min_confidence, *filters, return_first=False):
"""searches for title and author"""
query = SearchQuery(query, config="simple") | SearchQuery(query, config="english")
results = (
models.Edition.objects.filter(*filters, search_vector=query)
.annotate(rank=SearchRank(F("search_vector"), query))
.filter(rank__gt=min_confidence)
.order_by("-rank")
)
# when there are multiple editions of the same work, pick the closest
editions_of_work = results.values("parent_work__id").values_list("parent_work__id")
# filter out multiple editions of the same work
list_results = []
for work_id in set(editions_of_work):
editions = results.filter(parent_work=work_id)
default = editions.order_by("-edition_rank").first()
default_rank = default.rank if default else 0
# if mutliple books have the top rank, pick the default edition
if default_rank == editions.first().rank:
result = default
else:
result = editions.first()
if return_first:
return result
list_results.append(result)
return list_results
@dataclass
class SearchResult:
"""standardized search result object"""
title: str
key: str
connector: object
view_link: str = None
author: str = None
year: str = None
cover: str = None
confidence: int = 1
def __repr__(self):
# pylint: disable=consider-using-f-string
return "<SearchResult key={!r} title={!r} author={!r} confidence={!r}>".format(
self.key, self.title, self.author, self.confidence
)
def json(self):
"""serialize a connector for json response"""
serialized = asdict(self)
del serialized["connector"]
return serialized

View file

@ -3,4 +3,4 @@ from .settings import CONNECTORS
from .abstract_connector import ConnectorException from .abstract_connector import ConnectorException
from .abstract_connector import get_data, get_image from .abstract_connector import get_data, get_image
from .connector_manager import search, first_search_result from .connector_manager import search, local_search, first_search_result

View file

@ -1,24 +1,22 @@
""" functionality outline for a book data connector """ """ functionality outline for a book data connector """
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
import imghdr from dataclasses import asdict, dataclass
import logging import logging
import re from urllib3.exceptions import RequestError
from django.core.files.base import ContentFile
from django.db import transaction from django.db import transaction
import requests import requests
from requests.exceptions import RequestException from requests.exceptions import SSLError
from bookwyrm import activitypub, models, settings from bookwyrm import activitypub, models, settings
from .connector_manager import load_more_data, ConnectorException, raise_not_valid_url from .connector_manager import load_more_data, ConnectorException
from .format_mappings import format_mappings
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class AbstractMinimalConnector(ABC): class AbstractMinimalConnector(ABC):
"""just the bare bones, for other bookwyrm instances""" """ just the bare bones, for other bookwyrm instances """
def __init__(self, identifier): def __init__(self, identifier):
# load connector settings # load connector settings
@ -32,43 +30,67 @@ class AbstractMinimalConnector(ABC):
"covers_url", "covers_url",
"search_url", "search_url",
"isbn_search_url", "isbn_search_url",
"max_query_count",
"name", "name",
"identifier", "identifier",
"local",
] ]
for field in self_fields: for field in self_fields:
setattr(self, field, getattr(info, field)) setattr(self, field, getattr(info, field))
def get_search_url(self, query): def search(self, query, min_confidence=None):
"""format the query url""" """ free text search """
# Check if the query resembles an ISBN params = {}
if maybe_isbn(query) and self.isbn_search_url and self.isbn_search_url != "": if min_confidence:
return f"{self.isbn_search_url}{query}" params["min_confidence"] = min_confidence
# NOTE: previously, we tried searching isbn and if that produces no results, data = get_data(
# searched as free text. This, instead, only searches isbn if it's isbn-y "%s%s" % (self.search_url, query),
return f"{self.search_url}{query}" params=params,
)
results = []
def process_search_response(self, query, data, min_confidence): for doc in self.parse_search_data(data)[:10]:
"""Format the search results based on the formt of the query""" results.append(self.format_search_result(doc))
if maybe_isbn(query): return results
return list(self.parse_isbn_search_data(data))[:10]
return list(self.parse_search_data(data, min_confidence))[:10] def isbn_search(self, query):
""" isbn search """
params = {}
data = get_data(
"%s%s" % (self.isbn_search_url, query),
params=params,
)
results = []
# this shouldn't be returning mutliple results, but just in case
for doc in self.parse_isbn_search_data(data)[:10]:
results.append(self.format_isbn_search_result(doc))
return results
@abstractmethod @abstractmethod
def get_or_create_book(self, remote_id): def get_or_create_book(self, remote_id):
"""pull up a book record by whatever means possible""" """ pull up a book record by whatever means possible """
@abstractmethod @abstractmethod
def parse_search_data(self, data, min_confidence): def parse_search_data(self, data):
"""turn the result json from a search into a list""" """ turn the result json from a search into a list """
@abstractmethod
def format_search_result(self, search_result):
""" create a SearchResult obj from json """
@abstractmethod @abstractmethod
def parse_isbn_search_data(self, data): def parse_isbn_search_data(self, data):
"""turn the result json from a search into a list""" """ turn the result json from a search into a list """
@abstractmethod
def format_isbn_search_result(self, search_result):
""" create a SearchResult obj from json """
class AbstractConnector(AbstractMinimalConnector): class AbstractConnector(AbstractMinimalConnector):
"""generic book data connector""" """ generic book data connector """
def __init__(self, identifier): def __init__(self, identifier):
super().__init__(identifier) super().__init__(identifier)
@ -76,19 +98,27 @@ class AbstractConnector(AbstractMinimalConnector):
# title we handle separately. # title we handle separately.
self.book_mappings = [] self.book_mappings = []
def is_available(self):
""" check if you're allowed to use this connector """
if self.max_query_count is not None:
if self.connector.query_count >= self.max_query_count:
return False
return True
def get_or_create_book(self, remote_id): def get_or_create_book(self, remote_id):
"""translate arbitrary json into an Activitypub dataclass""" """ translate arbitrary json into an Activitypub dataclass """
# first, check if we have the origin_id saved # first, check if we have the origin_id saved
existing = models.Edition.find_existing_by_remote_id( existing = models.Edition.find_existing_by_remote_id(
remote_id remote_id
) or models.Work.find_existing_by_remote_id(remote_id) ) or models.Work.find_existing_by_remote_id(remote_id)
if existing: if existing:
if hasattr(existing, "default_edition"): if hasattr(existing, "get_default_editon"):
return existing.default_edition return existing.get_default_editon()
return existing return existing
# load the json data from the remote data source # load the json
data = self.get_book_data(remote_id) data = get_data(remote_id)
mapped_data = dict_from_mappings(data, self.book_mappings)
if self.is_work_data(data): if self.is_work_data(data):
try: try:
edition_data = self.get_edition_from_work_data(data) edition_data = self.get_edition_from_work_data(data)
@ -96,69 +126,57 @@ class AbstractConnector(AbstractMinimalConnector):
# hack: re-use the work data as the edition data # hack: re-use the work data as the edition data
# this is why remote ids aren't necessarily unique # this is why remote ids aren't necessarily unique
edition_data = data edition_data = data
work_data = data work_data = mapped_data
else: else:
edition_data = data
try: try:
work_data = self.get_work_from_edition_data(data) work_data = self.get_work_from_edition_data(data)
except (KeyError, ConnectorException) as err: work_data = dict_from_mappings(work_data, self.book_mappings)
logger.info(err) except (KeyError, ConnectorException):
work_data = data work_data = mapped_data
edition_data = data
if not work_data or not edition_data: if not work_data or not edition_data:
raise ConnectorException(f"Unable to load book data: {remote_id}") raise ConnectorException("Unable to load book data: %s" % remote_id)
with transaction.atomic(): with transaction.atomic():
# create activitypub object # create activitypub object
work_activity = activitypub.Work( work_activity = activitypub.Work(**work_data)
**dict_from_mappings(work_data, self.book_mappings)
)
# this will dedupe automatically # this will dedupe automatically
work = work_activity.to_model(model=models.Work, overwrite=False) work = work_activity.to_model(model=models.Work)
for author in self.get_authors_from_data(work_data): for author in self.get_authors_from_data(data):
work.authors.add(author) work.authors.add(author)
edition = self.create_edition_from_data(work, edition_data) edition = self.create_edition_from_data(work, edition_data)
load_more_data.delay(self.connector.id, work.id) load_more_data.delay(self.connector.id, work.id)
return edition return edition
def get_book_data(self, remote_id): # pylint: disable=no-self-use def create_edition_from_data(self, work, edition_data):
"""this allows connectors to override the default behavior""" """ if we already have the work, we're ready """
return get_data(remote_id)
def create_edition_from_data(self, work, edition_data, instance=None):
"""if we already have the work, we're ready"""
mapped_data = dict_from_mappings(edition_data, self.book_mappings) mapped_data = dict_from_mappings(edition_data, self.book_mappings)
mapped_data["work"] = work.remote_id mapped_data["work"] = work.remote_id
edition_activity = activitypub.Edition(**mapped_data) edition_activity = activitypub.Edition(**mapped_data)
edition = edition_activity.to_model( edition = edition_activity.to_model(model=models.Edition)
model=models.Edition, overwrite=False, instance=instance edition.connector = self.connector
) edition.save()
# if we're updating an existing instance, we don't need to load authors if not work.default_edition:
if instance: work.default_edition = edition
return edition work.save()
if not edition.connector:
edition.connector = self.connector
edition.save(broadcast=False, update_fields=["connector"])
for author in self.get_authors_from_data(edition_data): for author in self.get_authors_from_data(edition_data):
edition.authors.add(author) edition.authors.add(author)
# use the authors from the work if none are found for the edition
if not edition.authors.exists() and work.authors.exists(): if not edition.authors.exists() and work.authors.exists():
edition.authors.set(work.authors.all()) edition.authors.set(work.authors.all())
return edition return edition
def get_or_create_author(self, remote_id, instance=None): def get_or_create_author(self, remote_id):
"""load that author""" """ load that author """
if not instance: existing = models.Author.find_existing_by_remote_id(remote_id)
existing = models.Author.find_existing_by_remote_id(remote_id) if existing:
if existing: return existing
return existing
data = self.get_book_data(remote_id) data = get_data(remote_id)
mapped_data = dict_from_mappings(data, self.author_mappings) mapped_data = dict_from_mappings(data, self.author_mappings)
try: try:
@ -167,44 +185,27 @@ class AbstractConnector(AbstractMinimalConnector):
return None return None
# this will dedupe # this will dedupe
return activity.to_model( return activity.to_model(model=models.Author)
model=models.Author, overwrite=False, instance=instance
)
def get_remote_id_from_model(self, obj):
"""given the data stored, how can we look this up"""
return getattr(obj, getattr(self, "generated_remote_link_field"))
def update_author_from_remote(self, obj):
"""load the remote data from this connector and add it to an existing author"""
remote_id = self.get_remote_id_from_model(obj)
return self.get_or_create_author(remote_id, instance=obj)
def update_book_from_remote(self, obj):
"""load the remote data from this connector and add it to an existing book"""
remote_id = self.get_remote_id_from_model(obj)
data = self.get_book_data(remote_id)
return self.create_edition_from_data(obj.parent_work, data, instance=obj)
@abstractmethod @abstractmethod
def is_work_data(self, data): def is_work_data(self, data):
"""differentiate works and editions""" """ differentiate works and editions """
@abstractmethod @abstractmethod
def get_edition_from_work_data(self, data): def get_edition_from_work_data(self, data):
"""every work needs at least one edition""" """ every work needs at least one edition """
@abstractmethod @abstractmethod
def get_work_from_edition_data(self, data): def get_work_from_edition_data(self, data):
"""every edition needs a work""" """ every edition needs a work """
@abstractmethod @abstractmethod
def get_authors_from_data(self, data): def get_authors_from_data(self, data):
"""load author data""" """ load author data """
@abstractmethod @abstractmethod
def expand_book_data(self, book): def expand_book_data(self, book):
"""get more info on a book""" """ get more info on a book """
def dict_from_mappings(data, mappings): def dict_from_mappings(data, mappings):
@ -212,75 +213,85 @@ def dict_from_mappings(data, mappings):
the subclass""" the subclass"""
result = {} result = {}
for mapping in mappings: for mapping in mappings:
# sometimes there are multiple mappings for one field, don't
# overwrite earlier writes in that case
if mapping.local_field in result and result[mapping.local_field]:
continue
result[mapping.local_field] = mapping.get_value(data) result[mapping.local_field] = mapping.get_value(data)
return result return result
def get_data(url, params=None, timeout=10): def get_data(url, params=None):
"""wrapper for request.get""" """ wrapper for request.get """
# check if the url is blocked # check if the url is blocked
raise_not_valid_url(url) if models.FederatedServer.is_blocked(url):
raise ConnectorException(
"Attempting to load data from blocked url: {:s}".format(url)
)
try: try:
resp = requests.get( resp = requests.get(
url, url,
params=params, params=params,
headers={ # pylint: disable=line-too-long headers={
"Accept": ( "Accept": "application/json; charset=utf-8",
'application/json, application/activity+json, application/ld+json; profile="https://www.w3.org/ns/activitystreams"; charset=utf-8'
),
"User-Agent": settings.USER_AGENT, "User-Agent": settings.USER_AGENT,
}, },
timeout=timeout,
) )
except RequestException as err: except (RequestError, SSLError, ConnectionError) as e:
logger.info(err) logger.exception(e)
raise ConnectorException(err) raise ConnectorException()
if not resp.ok: if not resp.ok:
raise ConnectorException() raise ConnectorException()
try: try:
data = resp.json() data = resp.json()
except ValueError as err: except ValueError as e:
logger.info(err) logger.exception(e)
raise ConnectorException(err) raise ConnectorException()
return data return data
def get_image(url, timeout=10): def get_image(url):
"""wrapper for requesting an image""" """ wrapper for requesting an image """
raise_not_valid_url(url)
try: try:
resp = requests.get( resp = requests.get(
url, url,
headers={ headers={
"User-Agent": settings.USER_AGENT, "User-Agent": settings.USER_AGENT,
}, },
timeout=timeout,
) )
except RequestException as err: except (RequestError, SSLError) as e:
logger.info(err) logger.exception(e)
return None, None return None
if not resp.ok: if not resp.ok:
return None, None return None
return resp
image_content = ContentFile(resp.content)
extension = imghdr.what(None, image_content.read())
if not extension:
logger.info("File requested was not an image: %s", url)
return None, None
return image_content, extension @dataclass
class SearchResult:
""" standardized search result object """
title: str
key: str
connector: object
author: str = None
year: str = None
cover: str = None
confidence: int = 1
def __repr__(self):
return "<SearchResult key={!r} title={!r} author={!r}>".format(
self.key, self.title, self.author
)
def json(self):
""" serialize a connector for json response """
serialized = asdict(self)
del serialized["connector"]
return serialized
class Mapping: class Mapping:
"""associate a local database field with a field in an external dataset""" """ associate a local database field with a field in an external dataset """
def __init__(self, local_field, remote_field=None, formatter=None): def __init__(self, local_field, remote_field=None, formatter=None):
noop = lambda x: x noop = lambda x: x
@ -290,7 +301,7 @@ class Mapping:
self.formatter = formatter or noop self.formatter = formatter or noop
def get_value(self, data): def get_value(self, data):
"""pull a field from incoming json and return the formatted version""" """ pull a field from incoming json and return the formatted version """
value = data.get(self.remote_field) value = data.get(self.remote_field)
if not value: if not value:
return None return None
@ -298,31 +309,3 @@ class Mapping:
return self.formatter(value) return self.formatter(value)
except: # pylint: disable=bare-except except: # pylint: disable=bare-except
return None return None
def infer_physical_format(format_text):
"""try to figure out what the standardized format is from the free value"""
format_text = format_text.lower()
if format_text in format_mappings:
# try a direct match
return format_mappings[format_text]
# failing that, try substring
matches = [v for k, v in format_mappings.items() if k in format_text]
if not matches:
return None
return matches[0]
def unique_physical_format(format_text):
"""only store the format if it isn't diretly in the format mappings"""
format_text = format_text.lower()
if format_text in format_mappings:
# try a direct match, so saving this would be redundant
return None
return format_text
def maybe_isbn(query):
"""check if a query looks like an isbn"""
isbn = re.sub(r"[\W_]", "", query) # removes filler characters
return len(isbn) in [10, 13] # ISBN10 or ISBN13

View file

@ -1,21 +1,27 @@
""" using another bookwyrm instance as a source of book data """ """ using another bookwyrm instance as a source of book data """
from bookwyrm import activitypub, models from bookwyrm import activitypub, models
from bookwyrm.book_search import SearchResult from .abstract_connector import AbstractMinimalConnector, SearchResult
from .abstract_connector import AbstractMinimalConnector
class Connector(AbstractMinimalConnector): class Connector(AbstractMinimalConnector):
"""this is basically just for search""" """ this is basically just for search """
def get_or_create_book(self, remote_id): def get_or_create_book(self, remote_id):
return activitypub.resolve_remote_id(remote_id, model=models.Edition) edition = activitypub.resolve_remote_id(remote_id, model=models.Edition)
work = edition.parent_work
work.default_edition = work.get_default_edition()
work.save()
return edition
def parse_search_data(self, data, min_confidence): def parse_search_data(self, data):
for search_result in data: return data
search_result["connector"] = self
yield SearchResult(**search_result) def format_search_result(self, search_result):
search_result["connector"] = self
return SearchResult(**search_result)
def parse_isbn_search_data(self, data): def parse_isbn_search_data(self, data):
for search_result in data: return data
search_result["connector"] = self
yield SearchResult(**search_result) def format_isbn_search_result(self, search_result):
return self.format_search_result(search_result)

View file

@ -1,127 +1,101 @@
""" interface with whatever connectors the app has """ """ interface with whatever connectors the app has """
import asyncio
import importlib import importlib
import ipaddress
import logging import logging
import re
from urllib.parse import urlparse from urllib.parse import urlparse
import aiohttp
from django.dispatch import receiver from django.dispatch import receiver
from django.db.models import signals from django.db.models import signals
from requests import HTTPError from requests import HTTPError
from bookwyrm import book_search, models from bookwyrm import models
from bookwyrm.settings import SEARCH_TIMEOUT, USER_AGENT
from bookwyrm.tasks import app from bookwyrm.tasks import app
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class ConnectorException(HTTPError): class ConnectorException(HTTPError):
"""when the connector can't do what was asked""" """ when the connector can't do what was asked """
async def get_results(session, url, min_confidence, query, connector): def search(query, min_confidence=0.1):
"""try this specific connector""" """ find books based on arbitary keywords """
# pylint: disable=line-too-long
headers = {
"Accept": (
'application/json, application/activity+json, application/ld+json; profile="https://www.w3.org/ns/activitystreams"; charset=utf-8'
),
"User-Agent": USER_AGENT,
}
params = {"min_confidence": min_confidence}
try:
async with session.get(url, headers=headers, params=params) as response:
if not response.ok:
logger.info("Unable to connect to %s: %s", url, response.reason)
return
try:
raw_data = await response.json()
except aiohttp.client_exceptions.ContentTypeError as err:
logger.exception(err)
return
return {
"connector": connector,
"results": connector.process_search_response(
query, raw_data, min_confidence
),
}
except asyncio.TimeoutError:
logger.info("Connection timed out for url: %s", url)
except aiohttp.ClientError as err:
logger.exception(err)
async def async_connector_search(query, items, min_confidence):
"""Try a number of requests simultaneously"""
timeout = aiohttp.ClientTimeout(total=SEARCH_TIMEOUT)
async with aiohttp.ClientSession(timeout=timeout) as session:
tasks = []
for url, connector in items:
tasks.append(
asyncio.ensure_future(
get_results(session, url, min_confidence, query, connector)
)
)
results = await asyncio.gather(*tasks)
return results
def search(query, min_confidence=0.1, return_first=False):
"""find books based on arbitary keywords"""
if not query: if not query:
return [] return []
results = [] results = []
items = [] # Have we got a ISBN ?
isbn = re.sub(r"[\W_]", "", query)
maybe_isbn = len(isbn) in [10, 13] # ISBN10 or ISBN13
dedup_slug = lambda r: "%s/%s/%s" % (r.title, r.author, r.year)
result_index = set()
for connector in get_connectors(): for connector in get_connectors():
# get the search url from the connector before sending result_set = None
url = connector.get_search_url(query) if maybe_isbn:
try: # Search on ISBN
raise_not_valid_url(url) if not connector.isbn_search_url or connector.isbn_search_url == "":
except ConnectorException: result_set = []
# if this URL is invalid we should skip it and move on else:
logger.info("Request denied to blocked domain: %s", url) try:
continue result_set = connector.isbn_search(isbn)
items.append((url, connector)) except Exception as e: # pylint: disable=broad-except
logger.exception(e)
continue
# load as many results as we can # if no isbn search or results, we fallback to generic search
results = asyncio.run(async_connector_search(query, items, min_confidence)) if result_set in (None, []):
results = [r for r in results if r] try:
result_set = connector.search(query, min_confidence=min_confidence)
except Exception as e: # pylint: disable=broad-except
# we don't want *any* error to crash the whole search page
logger.exception(e)
continue
if return_first: # if the search results look the same, ignore them
# find the best result from all the responses and return that result_set = [r for r in result_set if dedup_slug(r) not in result_index]
all_results = [r for con in results for r in con["results"]] # `|=` concats two sets. WE ARE GETTING FANCY HERE
all_results = sorted(all_results, key=lambda r: r.confidence, reverse=True) result_index |= set(dedup_slug(r) for r in result_set)
return all_results[0] if all_results else None results.append(
{
"connector": connector,
"results": result_set,
}
)
# failed requests will return None, so filter those out
return results return results
def local_search(query, min_confidence=0.1, raw=False):
""" only look at local search results """
connector = load_connector(models.Connector.objects.get(local=True))
return connector.search(query, min_confidence=min_confidence, raw=raw)
def isbn_local_search(query, raw=False):
""" only look at local search results """
connector = load_connector(models.Connector.objects.get(local=True))
return connector.isbn_search(query, raw=raw)
def first_search_result(query, min_confidence=0.1): def first_search_result(query, min_confidence=0.1):
"""search until you find a result that fits""" """ search until you find a result that fits """
# try local search first for connector in get_connectors():
result = book_search.search(query, min_confidence=min_confidence, return_first=True) result = connector.search(query, min_confidence=min_confidence)
if result: if result:
return result return result[0]
# otherwise, try remote endpoints return None
return search(query, min_confidence=min_confidence, return_first=True) or None
def get_connectors(): def get_connectors():
"""load all connectors""" """ load all connectors """
for info in models.Connector.objects.filter(active=True).order_by("priority").all(): for info in models.Connector.objects.order_by("priority").all():
yield load_connector(info) yield load_connector(info)
def get_or_create_connector(remote_id): def get_or_create_connector(remote_id):
"""get the connector related to the object's server""" """ get the connector related to the object's server """
url = urlparse(remote_id) url = urlparse(remote_id)
identifier = url.netloc identifier = url.netloc
if not identifier: if not identifier:
@ -133,19 +107,19 @@ def get_or_create_connector(remote_id):
connector_info = models.Connector.objects.create( connector_info = models.Connector.objects.create(
identifier=identifier, identifier=identifier,
connector_file="bookwyrm_connector", connector_file="bookwyrm_connector",
base_url=f"https://{identifier}", base_url="https://%s" % identifier,
books_url=f"https://{identifier}/book", books_url="https://%s/book" % identifier,
covers_url=f"https://{identifier}/images/covers", covers_url="https://%s/images/covers" % identifier,
search_url=f"https://{identifier}/search?q=", search_url="https://%s/search?q=" % identifier,
priority=2, priority=2,
) )
return load_connector(connector_info) return load_connector(connector_info)
@app.task(queue="low_priority") @app.task
def load_more_data(connector_id, book_id): def load_more_data(connector_id, book_id):
"""background the work of getting all 10,000 editions of LoTR""" """ background the work of getting all 10,000 editions of LoTR """
connector_info = models.Connector.objects.get(id=connector_id) connector_info = models.Connector.objects.get(id=connector_id)
connector = load_connector(connector_info) connector = load_connector(connector_info)
book = models.Book.objects.select_subclasses().get(id=book_id) book = models.Book.objects.select_subclasses().get(id=book_id)
@ -153,9 +127,9 @@ def load_more_data(connector_id, book_id):
def load_connector(connector_info): def load_connector(connector_info):
"""instantiate the connector class""" """ instantiate the connector class """
connector = importlib.import_module( connector = importlib.import_module(
f"bookwyrm.connectors.{connector_info.connector_file}" "bookwyrm.connectors.%s" % connector_info.connector_file
) )
return connector.Connector(connector_info.identifier) return connector.Connector(connector_info.identifier)
@ -163,23 +137,6 @@ def load_connector(connector_info):
@receiver(signals.post_save, sender="bookwyrm.FederatedServer") @receiver(signals.post_save, sender="bookwyrm.FederatedServer")
# pylint: disable=unused-argument # pylint: disable=unused-argument
def create_connector(sender, instance, created, *args, **kwargs): def create_connector(sender, instance, created, *args, **kwargs):
"""create a connector to an external bookwyrm server""" """ create a connector to an external bookwyrm server """
if instance.application_type == "bookwyrm": if instance.application_type == "bookwyrm":
get_or_create_connector(f"https://{instance.server_name}") get_or_create_connector("https://{:s}".format(instance.server_name))
def raise_not_valid_url(url):
"""do some basic reality checks on the url"""
parsed = urlparse(url)
if not parsed.scheme in ["http", "https"]:
raise ConnectorException("Invalid scheme: ", url)
try:
ipaddress.ip_address(parsed.netloc)
raise ConnectorException("Provided url is an IP address: ", url)
except ValueError:
# it's not an IP address, which is good
pass
if models.FederatedServer.is_blocked(url):
raise ConnectorException(f"Attempting to load data from blocked url: {url}")

View file

@ -1,43 +0,0 @@
""" comparing a free text format to the standardized one """
format_mappings = {
"paperback": "Paperback",
"soft": "Paperback",
"pamphlet": "Paperback",
"peperback": "Paperback",
"tapa blanda": "Paperback",
"turtleback": "Paperback",
"pocket": "Paperback",
"spiral": "Paperback",
"ring": "Paperback",
"平装": "Paperback",
"简装": "Paperback",
"hardcover": "Hardcover",
"hardcocer": "Hardcover",
"hardover": "Hardcover",
"hardback": "Hardcover",
"library": "Hardcover",
"tapa dura": "Hardcover",
"leather": "Hardcover",
"clothbound": "Hardcover",
"精装": "Hardcover",
"ebook": "EBook",
"e-book": "EBook",
"digital": "EBook",
"computer file": "EBook",
"epub": "EBook",
"online": "EBook",
"pdf": "EBook",
"elektronische": "EBook",
"electronic": "EBook",
"audiobook": "AudiobookFormat",
"audio": "AudiobookFormat",
"cd": "AudiobookFormat",
"dvd": "AudiobookFormat",
"mp3": "AudiobookFormat",
"cassette": "AudiobookFormat",
"kindle": "AudiobookFormat",
"talking": "AudiobookFormat",
"sound": "AudiobookFormat",
"comic": "GraphicNovel",
"graphic": "GraphicNovel",
}

View file

@ -1,216 +0,0 @@
""" inventaire data connector """
import re
from bookwyrm import models
from bookwyrm.book_search import SearchResult
from .abstract_connector import AbstractConnector, Mapping
from .abstract_connector import get_data
from .connector_manager import ConnectorException
class Connector(AbstractConnector):
"""instantiate a connector for inventaire"""
generated_remote_link_field = "inventaire_id"
def __init__(self, identifier):
super().__init__(identifier)
get_first = lambda a: a[0]
shared_mappings = [
Mapping("id", remote_field="uri", formatter=self.get_remote_id),
Mapping("bnfId", remote_field="wdt:P268", formatter=get_first),
Mapping("openlibraryKey", remote_field="wdt:P648", formatter=get_first),
]
self.book_mappings = [
Mapping("title", remote_field="wdt:P1476", formatter=get_first),
Mapping("title", remote_field="labels", formatter=get_language_code),
Mapping("subtitle", remote_field="wdt:P1680", formatter=get_first),
Mapping("inventaireId", remote_field="uri"),
Mapping(
"description", remote_field="sitelinks", formatter=self.get_description
),
Mapping("cover", remote_field="image", formatter=self.get_cover_url),
Mapping("isbn13", remote_field="wdt:P212", formatter=get_first),
Mapping("isbn10", remote_field="wdt:P957", formatter=get_first),
Mapping("oclcNumber", remote_field="wdt:P5331", formatter=get_first),
Mapping("goodreadsKey", remote_field="wdt:P2969", formatter=get_first),
Mapping("librarythingKey", remote_field="wdt:P1085", formatter=get_first),
Mapping("languages", remote_field="wdt:P407", formatter=self.resolve_keys),
Mapping("publishers", remote_field="wdt:P123", formatter=self.resolve_keys),
Mapping("publishedDate", remote_field="wdt:P577", formatter=get_first),
Mapping("pages", remote_field="wdt:P1104", formatter=get_first),
Mapping(
"subjectPlaces", remote_field="wdt:P840", formatter=self.resolve_keys
),
Mapping("subjects", remote_field="wdt:P921", formatter=self.resolve_keys),
Mapping("asin", remote_field="wdt:P5749", formatter=get_first),
] + shared_mappings
# TODO: P136: genre, P674 characters, P950 bne
self.author_mappings = [
Mapping("id", remote_field="uri", formatter=self.get_remote_id),
Mapping("name", remote_field="labels", formatter=get_language_code),
Mapping("bio", remote_field="sitelinks", formatter=self.get_description),
Mapping("goodreadsKey", remote_field="wdt:P2963", formatter=get_first),
Mapping("isni", remote_field="wdt:P213", formatter=get_first),
Mapping("viafId", remote_field="wdt:P214", formatter=get_first),
Mapping("gutenberg_id", remote_field="wdt:P1938", formatter=get_first),
Mapping("born", remote_field="wdt:P569", formatter=get_first),
Mapping("died", remote_field="wdt:P570", formatter=get_first),
] + shared_mappings
def get_remote_id(self, value):
"""convert an id/uri into a url"""
return f"{self.books_url}?action=by-uris&uris={value}"
def get_book_data(self, remote_id):
data = get_data(remote_id)
extracted = list(data.get("entities").values())
try:
data = extracted[0]
except (KeyError, IndexError):
raise ConnectorException("Invalid book data")
# flatten the data so that images, uri, and claims are on the same level
return {
**data.get("claims", {}),
**{k: data.get(k) for k in ["uri", "image", "labels", "sitelinks", "type"]},
}
def parse_search_data(self, data, min_confidence):
for search_result in data.get("results", []):
images = search_result.get("image")
cover = f"{self.covers_url}/img/entities/{images[0]}" if images else None
# a deeply messy translation of inventaire's scores
confidence = float(search_result.get("_score", 0.1))
confidence = 0.1 if confidence < 150 else 0.999
if confidence < min_confidence:
continue
yield SearchResult(
title=search_result.get("label"),
key=self.get_remote_id(search_result.get("uri")),
author=search_result.get("description"),
view_link=f"{self.base_url}/entity/{search_result.get('uri')}",
cover=cover,
confidence=confidence,
connector=self,
)
def parse_isbn_search_data(self, data):
"""got some daaaata"""
results = data.get("entities")
if not results:
return
for search_result in list(results.values()):
title = search_result.get("claims", {}).get("wdt:P1476", [])
if not title:
continue
yield SearchResult(
title=title[0],
key=self.get_remote_id(search_result.get("uri")),
author=search_result.get("description"),
view_link=f"{self.base_url}/entity/{search_result.get('uri')}",
cover=self.get_cover_url(search_result.get("image")),
connector=self,
)
def is_work_data(self, data):
return data.get("type") == "work"
def load_edition_data(self, work_uri):
"""get a list of editions for a work"""
# pylint: disable=line-too-long
url = f"{self.books_url}?action=reverse-claims&property=wdt:P629&value={work_uri}&sort=true"
return get_data(url)
def get_edition_from_work_data(self, data):
data = self.load_edition_data(data.get("uri"))
try:
uri = data.get("uris", [])[0]
except IndexError:
raise ConnectorException("Invalid book data")
return self.get_book_data(self.get_remote_id(uri))
def get_work_from_edition_data(self, data):
uri = data.get("wdt:P629", [None])[0]
if not uri:
raise ConnectorException("Invalid book data")
return self.get_book_data(self.get_remote_id(uri))
def get_authors_from_data(self, data):
authors = data.get("wdt:P50", [])
for author in authors:
yield self.get_or_create_author(self.get_remote_id(author))
def expand_book_data(self, book):
work = book
# go from the edition to the work, if necessary
if isinstance(book, models.Edition):
work = book.parent_work
try:
edition_options = self.load_edition_data(work.inventaire_id)
except ConnectorException:
# who knows, man
return
for edition_uri in edition_options.get("uris"):
remote_id = self.get_remote_id(edition_uri)
try:
data = self.get_book_data(remote_id)
except ConnectorException:
# who, indeed, knows
continue
self.create_edition_from_data(work, data)
def get_cover_url(self, cover_blob, *_):
"""format the relative cover url into an absolute one:
{"url": "/img/entities/e794783f01b9d4f897a1ea9820b96e00d346994f"}
"""
# covers may or may not be a list
if isinstance(cover_blob, list) and len(cover_blob) > 0:
cover_blob = cover_blob[0]
cover_id = cover_blob.get("url")
if not cover_id:
return None
# cover may or may not be an absolute url already
if re.match(r"^http", cover_id):
return cover_id
return f"{self.covers_url}{cover_id}"
def resolve_keys(self, keys):
"""cool, it's "wd:Q3156592" now what the heck does that mean"""
results = []
for uri in keys:
try:
data = self.get_book_data(self.get_remote_id(uri))
except ConnectorException:
continue
results.append(get_language_code(data.get("labels")))
return results
def get_description(self, links):
"""grab an extracted excerpt from wikipedia"""
link = links.get("enwiki")
if not link:
return ""
url = f"{self.base_url}/api/data?action=wp-extract&lang=en&title={link}"
try:
data = get_data(url)
except ConnectorException:
return ""
return data.get("extract")
def get_remote_id_from_model(self, obj):
"""use get_remote_id to figure out the link from a model obj"""
remote_id_value = obj.inventaire_id
return self.get_remote_id(remote_id_value)
def get_language_code(options, code="en"):
"""when there are a bunch of translation but we need a single field"""
result = options.get(code)
if result:
return result
values = list(options.values())
return values[0] if values else None

View file

@ -2,23 +2,20 @@
import re import re
from bookwyrm import models from bookwyrm import models
from bookwyrm.book_search import SearchResult from .abstract_connector import AbstractConnector, SearchResult, Mapping
from .abstract_connector import AbstractConnector, Mapping from .abstract_connector import get_data
from .abstract_connector import get_data, infer_physical_format, unique_physical_format
from .connector_manager import ConnectorException from .connector_manager import ConnectorException
from .openlibrary_languages import languages from .openlibrary_languages import languages
class Connector(AbstractConnector): class Connector(AbstractConnector):
"""instantiate a connector for OL""" """ instantiate a connector for OL """
generated_remote_link_field = "openlibrary_link"
def __init__(self, identifier): def __init__(self, identifier):
super().__init__(identifier) super().__init__(identifier)
get_first = lambda a, *args: a[0] get_first = lambda a: a[0]
get_remote_id = lambda a, *args: self.base_url + a get_remote_id = lambda a: self.base_url + a
self.book_mappings = [ self.book_mappings = [
Mapping("title"), Mapping("title"),
Mapping("id", remote_field="key", formatter=get_remote_id), Mapping("id", remote_field="key", formatter=get_remote_id),
@ -46,16 +43,7 @@ class Connector(AbstractConnector):
), ),
Mapping("publishedDate", remote_field="publish_date"), Mapping("publishedDate", remote_field="publish_date"),
Mapping("pages", remote_field="number_of_pages"), Mapping("pages", remote_field="number_of_pages"),
Mapping( Mapping("physicalFormat", remote_field="physical_format"),
"physicalFormat",
remote_field="physical_format",
formatter=infer_physical_format,
),
Mapping(
"physicalFormatDetail",
remote_field="physical_format",
formatter=unique_physical_format,
),
Mapping("publishers"), Mapping("publishers"),
] ]
@ -68,46 +56,15 @@ class Connector(AbstractConnector):
Mapping("born", remote_field="birth_date"), Mapping("born", remote_field="birth_date"),
Mapping("died", remote_field="death_date"), Mapping("died", remote_field="death_date"),
Mapping("bio", formatter=get_description), Mapping("bio", formatter=get_description),
Mapping(
"isni",
remote_field="remote_ids",
formatter=lambda b: get_dict_field(b, "isni"),
),
Mapping(
"asin",
remote_field="remote_ids",
formatter=lambda b: get_dict_field(b, "amazon"),
),
Mapping(
"viaf",
remote_field="remote_ids",
formatter=lambda b: get_dict_field(b, "viaf"),
),
Mapping(
"wikidata",
remote_field="remote_ids",
formatter=lambda b: get_dict_field(b, "wikidata"),
),
Mapping(
"wikipedia_link", remote_field="links", formatter=get_wikipedia_link
),
Mapping("inventaire_id", remote_field="links", formatter=get_inventaire_id),
] ]
def get_book_data(self, remote_id):
data = get_data(remote_id)
if data.get("type", {}).get("key") == "/type/redirect":
remote_id = self.base_url + data.get("location")
return get_data(remote_id)
return data
def get_remote_id_from_data(self, data): def get_remote_id_from_data(self, data):
"""format a url from an openlibrary id field""" """ format a url from an openlibrary id field """
try: try:
key = data["key"] key = data["key"]
except KeyError: except KeyError:
raise ConnectorException("Invalid book data") raise ConnectorException("Invalid book data")
return f"{self.books_url}{key}" return "%s%s" % (self.books_url, key)
def is_work_data(self, data): def is_work_data(self, data):
return bool(re.match(r"^[\/\w]+OL\d+W$", data["key"])) return bool(re.match(r"^[\/\w]+OL\d+W$", data["key"]))
@ -117,81 +74,76 @@ class Connector(AbstractConnector):
key = data["key"] key = data["key"]
except KeyError: except KeyError:
raise ConnectorException("Invalid book data") raise ConnectorException("Invalid book data")
url = f"{self.books_url}{key}/editions" url = "%s%s/editions" % (self.books_url, key)
data = self.get_book_data(url) data = get_data(url)
edition = pick_default_edition(data["entries"]) return pick_default_edition(data["entries"])
if not edition:
raise ConnectorException("No editions for work")
return edition
def get_work_from_edition_data(self, data): def get_work_from_edition_data(self, data):
try: try:
key = data["works"][0]["key"] key = data["works"][0]["key"]
except (IndexError, KeyError): except (IndexError, KeyError):
raise ConnectorException("No work found for edition") raise ConnectorException("No work found for edition")
url = f"{self.books_url}{key}" url = "%s%s" % (self.books_url, key)
return self.get_book_data(url) return get_data(url)
def get_authors_from_data(self, data): def get_authors_from_data(self, data):
"""parse author json and load or create authors""" """ parse author json and load or create authors """
for author_blob in data.get("authors", []): for author_blob in data.get("authors", []):
author_blob = author_blob.get("author", author_blob) author_blob = author_blob.get("author", author_blob)
# this id is "/authors/OL1234567A" # this id is "/authors/OL1234567A"
author_id = author_blob["key"] author_id = author_blob["key"]
url = f"{self.base_url}{author_id}" url = "%s%s" % (self.base_url, author_id)
author = self.get_or_create_author(url) author = self.get_or_create_author(url)
if not author: if not author:
continue continue
yield author yield author
def get_cover_url(self, cover_blob, size="L"): def get_cover_url(self, cover_blob, size="L"):
"""ask openlibrary for the cover""" """ ask openlibrary for the cover """
if not cover_blob: if not cover_blob:
return None return None
cover_id = cover_blob[0] cover_id = cover_blob[0]
image_name = f"{cover_id}-{size}.jpg" image_name = "%s-%s.jpg" % (cover_id, size)
return f"{self.covers_url}/b/id/{image_name}" return "%s/b/id/%s" % (self.covers_url, image_name)
def parse_search_data(self, data, min_confidence): def parse_search_data(self, data):
for idx, search_result in enumerate(data.get("docs")): return data.get("docs")
# build the remote id from the openlibrary key
key = self.books_url + search_result["key"]
author = search_result.get("author_name") or ["Unknown"]
cover_blob = search_result.get("cover_i")
cover = self.get_cover_url([cover_blob], size="M") if cover_blob else None
# OL doesn't provide confidence, but it does sort by an internal ranking, so def format_search_result(self, search_result):
# this confidence value is relative to the list position # build the remote id from the openlibrary key
confidence = 1 / (idx + 1) key = self.books_url + search_result["key"]
author = search_result.get("author_name") or ["Unknown"]
yield SearchResult( cover_blob = search_result.get("cover_i")
title=search_result.get("title"), cover = self.get_cover_url([cover_blob], size="M") if cover_blob else None
key=key, return SearchResult(
author=", ".join(author), title=search_result.get("title"),
connector=self, key=key,
year=search_result.get("first_publish_year"), author=", ".join(author),
cover=cover, connector=self,
confidence=confidence, year=search_result.get("first_publish_year"),
) cover=cover,
)
def parse_isbn_search_data(self, data): def parse_isbn_search_data(self, data):
for search_result in list(data.values()): return list(data.values())
# build the remote id from the openlibrary key
key = self.books_url + search_result["key"] def format_isbn_search_result(self, search_result):
authors = search_result.get("authors") or [{"name": "Unknown"}] # build the remote id from the openlibrary key
author_names = [author.get("name") for author in authors] key = self.books_url + search_result["key"]
yield SearchResult( authors = search_result.get("authors") or [{"name": "Unknown"}]
title=search_result.get("title"), author_names = [author.get("name") for author in authors]
key=key, return SearchResult(
author=", ".join(author_names), title=search_result.get("title"),
connector=self, key=key,
year=search_result.get("publish_date"), author=", ".join(author_names),
) connector=self,
year=search_result.get("publish_date"),
)
def load_edition_data(self, olkey): def load_edition_data(self, olkey):
"""query openlibrary for editions of a work""" """ query openlibrary for editions of a work """
url = f"{self.books_url}/works/{olkey}/editions" url = "%s/works/%s/editions" % (self.books_url, olkey)
return self.get_book_data(url) return get_data(url)
def expand_book_data(self, book): def expand_book_data(self, book):
work = book work = book
@ -214,7 +166,7 @@ class Connector(AbstractConnector):
def ignore_edition(edition_data): def ignore_edition(edition_data):
"""don't load a million editions that have no metadata""" """ don't load a million editions that have no metadata """
# an isbn, we love to see it # an isbn, we love to see it
if edition_data.get("isbn_13") or edition_data.get("isbn_10"): if edition_data.get("isbn_13") or edition_data.get("isbn_10"):
return False return False
@ -233,61 +185,27 @@ def ignore_edition(edition_data):
def get_description(description_blob): def get_description(description_blob):
"""descriptions can be a string or a dict""" """ descriptions can be a string or a dict """
if isinstance(description_blob, dict): if isinstance(description_blob, dict):
return description_blob.get("value") return description_blob.get("value")
return description_blob return description_blob
def get_openlibrary_key(key): def get_openlibrary_key(key):
"""convert /books/OL27320736M into OL27320736M""" """ convert /books/OL27320736M into OL27320736M """
return key.split("/")[-1] return key.split("/")[-1]
def get_languages(language_blob): def get_languages(language_blob):
"""/language/eng -> English""" """ /language/eng -> English """
langs = [] langs = []
for lang in language_blob: for lang in language_blob:
langs.append(languages.get(lang.get("key", ""), None)) langs.append(languages.get(lang.get("key", ""), None))
return langs return langs
def get_dict_field(blob, field_name):
"""extract the isni from the remote id data for the author"""
if not blob or not isinstance(blob, dict):
return None
return blob.get(field_name)
def get_wikipedia_link(links):
"""extract wikipedia links"""
if not isinstance(links, list):
return None
for link in links:
if not isinstance(link, dict):
continue
if link.get("title") == "wikipedia":
return link.get("url")
return None
def get_inventaire_id(links):
"""extract and format inventaire ids"""
if not isinstance(links, list):
return None
for link in links:
if not isinstance(link, dict):
continue
if link.get("title") == "inventaire.io":
iv_link = link.get("url")
return iv_link.split("/")[-1]
return None
def pick_default_edition(options): def pick_default_edition(options):
"""favor physical copies with covers in english""" """ favor physical copies with covers in english """
if not options: if not options:
return None return None
if len(options) == 1: if len(options) == 1:

View file

@ -0,0 +1,148 @@
""" using a bookwyrm instance as a source of book data """
from functools import reduce
import operator
from django.contrib.postgres.search import SearchRank, SearchVector
from django.db.models import Count, F, Q
from bookwyrm import models
from .abstract_connector import AbstractConnector, SearchResult
class Connector(AbstractConnector):
""" instantiate a connector """
# pylint: disable=arguments-differ
def search(self, query, min_confidence=0.1, raw=False):
""" search your local database """
if not query:
return []
# first, try searching unqiue identifiers
results = search_identifiers(query)
if not results:
# then try searching title/author
results = search_title_author(query, min_confidence)
search_results = []
for result in results:
if raw:
search_results.append(result)
else:
search_results.append(self.format_search_result(result))
if len(search_results) >= 10:
break
if not raw:
search_results.sort(key=lambda r: r.confidence, reverse=True)
return search_results
def isbn_search(self, query, raw=False):
""" search your local database """
if not query:
return []
filters = [{f: query} for f in ["isbn_10", "isbn_13"]]
results = models.Edition.objects.filter(
reduce(operator.or_, (Q(**f) for f in filters))
).distinct()
# when there are multiple editions of the same work, pick the default.
# it would be odd for this to happen.
results = results.filter(parent_work__default_edition__id=F("id")) or results
search_results = []
for result in results:
if raw:
search_results.append(result)
else:
search_results.append(self.format_search_result(result))
if len(search_results) >= 10:
break
return search_results
def format_search_result(self, search_result):
return SearchResult(
title=search_result.title,
key=search_result.remote_id,
author=search_result.author_text,
year=search_result.published_date.year
if search_result.published_date
else None,
connector=self,
cover="%s%s" % (self.covers_url, search_result.cover),
confidence=search_result.rank if hasattr(search_result, "rank") else 1,
)
def format_isbn_search_result(self, search_result):
return self.format_search_result(search_result)
def is_work_data(self, data):
pass
def get_edition_from_work_data(self, data):
pass
def get_work_from_edition_data(self, data):
pass
def get_authors_from_data(self, data):
return None
def parse_isbn_search_data(self, data):
""" it's already in the right format, don't even worry about it """
return data
def parse_search_data(self, data):
""" it's already in the right format, don't even worry about it """
return data
def expand_book_data(self, book):
pass
def search_identifiers(query):
""" tries remote_id, isbn; defined as dedupe fields on the model """
filters = [
{f.name: query}
for f in models.Edition._meta.get_fields()
if hasattr(f, "deduplication_field") and f.deduplication_field
]
results = models.Edition.objects.filter(
reduce(operator.or_, (Q(**f) for f in filters))
).distinct()
# when there are multiple editions of the same work, pick the default.
# it would be odd for this to happen.
return results.filter(parent_work__default_edition__id=F("id")) or results
def search_title_author(query, min_confidence):
""" searches for title and author """
vector = (
SearchVector("title", weight="A")
+ SearchVector("subtitle", weight="B")
+ SearchVector("authors__name", weight="C")
+ SearchVector("series", weight="D")
)
results = (
models.Edition.objects.annotate(search=vector)
.annotate(rank=SearchRank(vector, query))
.filter(rank__gt=min_confidence)
.order_by("-rank")
)
# when there are multiple editions of the same work, pick the closest
editions_of_work = (
results.values("parent_work")
.annotate(Count("parent_work"))
.values_list("parent_work")
)
for work_id in set(editions_of_work):
editions = results.filter(parent_work=work_id)
default = editions.filter(parent_work__default_edition=F("id"))
default_rank = default.first().rank if default.exists() else 0
# if mutliple books have the top rank, pick the default edition
if default_rank == editions.first().rank:
yield default.first()
else:
yield editions.first()

View file

@ -1,3 +1,3 @@
""" settings book data connectors """ """ settings book data connectors """
CONNECTORS = ["openlibrary", "inventaire", "bookwyrm_connector"] CONNECTORS = ["openlibrary", "self_connector", "bookwyrm_connector"]

View file

@ -1,31 +1,7 @@
""" customize the info available in context for rendering templates """ """ customize the info available in context for rendering templates """
from bookwyrm import models, settings from bookwyrm import models
def site_settings(request): # pylint: disable=unused-argument def site_settings(request): # pylint: disable=unused-argument
"""include the custom info about the site""" """ include the custom info about the site """
request_protocol = "https://" return {"site": models.SiteSettings.objects.get()}
if not request.is_secure():
request_protocol = "http://"
site = models.SiteSettings.objects.get()
theme = "css/themes/bookwyrm-light.scss"
if (
hasattr(request, "user")
and request.user.is_authenticated
and request.user.theme
):
theme = request.user.theme.path
elif site.default_theme:
theme = site.default_theme.path
return {
"site": site,
"site_theme": theme,
"active_announcements": models.Announcement.active_announcements(),
"thumbnail_generation_enabled": settings.ENABLE_THUMBNAIL_GENERATION,
"media_full_url": settings.MEDIA_FULL_URL,
"preview_images_enabled": settings.ENABLE_PREVIEW_IMAGES,
"request_protocol": request_protocol,
"js_cache": settings.JS_CACHE,
}

View file

@ -8,70 +8,59 @@ from bookwyrm.settings import DOMAIN
def email_data(): def email_data():
"""fields every email needs""" """ fields every email needs """
site = models.SiteSettings.objects.get() site = models.SiteSettings.objects.get()
if site.logo_small:
logo_path = "/images/{}".format(site.logo_small.url)
else:
logo_path = "/static/images/logo-small.png"
return { return {
"site_name": site.name, "site_name": site.name,
"logo": site.logo_small_url, "logo": logo_path,
"domain": DOMAIN, "domain": DOMAIN,
"user": None, "user": None,
} }
def email_confirmation_email(user):
"""newly registered users confirm email address"""
data = email_data()
data["confirmation_code"] = user.confirmation_code
data["confirmation_link"] = user.confirmation_link
send_email.delay(user.email, *format_email("confirm", data))
def invite_email(invite_request): def invite_email(invite_request):
"""send out an invite code""" """ send out an invite code """
data = email_data() data = email_data()
data["invite_link"] = invite_request.invite.link data["invite_link"] = invite_request.invite.link
send_email.delay(invite_request.email, *format_email("invite", data)) send_email.delay(invite_request.email, *format_email("invite", data))
def password_reset_email(reset_code): def password_reset_email(reset_code):
"""generate a password reset email""" """ generate a password reset email """
data = email_data() data = email_data()
data["reset_link"] = reset_code.link data["reset_link"] = reset_code.link
data["user"] = reset_code.user.display_name data["user"] = reset_code.user.display_name
send_email.delay(reset_code.user.email, *format_email("password_reset", data)) send_email.delay(reset_code.user.email, *format_email("password_reset", data))
def moderation_report_email(report):
"""a report was created"""
data = email_data()
data["reporter"] = report.reporter.localname or report.reporter.username
data["reportee"] = report.user.localname or report.user.username
data["report_link"] = report.remote_id
for admin in models.User.objects.filter(
groups__name__in=["admin", "moderator"]
).distinct():
data["user"] = admin.display_name
send_email.delay(admin.email, *format_email("moderation_report", data))
def format_email(email_name, data): def format_email(email_name, data):
"""render the email templates""" """ render the email templates """
subject = get_template(f"email/{email_name}/subject.html").render(data).strip() subject = (
get_template("email/{}/subject.html".format(email_name)).render(data).strip()
)
html_content = ( html_content = (
get_template(f"email/{email_name}/html_content.html").render(data).strip() get_template("email/{}/html_content.html".format(email_name))
.render(data)
.strip()
) )
text_content = ( text_content = (
get_template(f"email/{email_name}/text_content.html").render(data).strip() get_template("email/{}/text_content.html".format(email_name))
.render(data)
.strip()
) )
return (subject, html_content, text_content) return (subject, html_content, text_content)
@app.task(queue="high_priority") @app.task
def send_email(recipient, subject, html_content, text_content): def send_email(recipient, subject, html_content, text_content):
"""use a task to send the email""" """ use a task to send the email """
email = EmailMultiAlternatives( email = EmailMultiAlternatives(
subject, text_content, settings.EMAIL_SENDER, [recipient] subject, text_content, settings.DEFAULT_FROM_EMAIL, [recipient]
) )
email.attach_alternative(html_content, "text/html") email.attach_alternative(html_content, "text/html")
email.send() email.send()

318
bookwyrm/forms.py Normal file
View file

@ -0,0 +1,318 @@
""" using django model forms """
import datetime
from collections import defaultdict
from django import forms
from django.forms import ModelForm, PasswordInput, widgets, ChoiceField
from django.forms.widgets import Textarea
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
from bookwyrm import models
class CustomForm(ModelForm):
""" add css classes to the forms """
def __init__(self, *args, **kwargs):
css_classes = defaultdict(lambda: "")
css_classes["text"] = "input"
css_classes["password"] = "input"
css_classes["email"] = "input"
css_classes["number"] = "input"
css_classes["checkbox"] = "checkbox"
css_classes["textarea"] = "textarea"
super(CustomForm, self).__init__(*args, **kwargs)
for visible in self.visible_fields():
if hasattr(visible.field.widget, "input_type"):
input_type = visible.field.widget.input_type
if isinstance(visible.field.widget, Textarea):
input_type = "textarea"
visible.field.widget.attrs["cols"] = None
visible.field.widget.attrs["rows"] = None
visible.field.widget.attrs["class"] = css_classes[input_type]
# pylint: disable=missing-class-docstring
class LoginForm(CustomForm):
class Meta:
model = models.User
fields = ["localname", "password"]
help_texts = {f: None for f in fields}
widgets = {
"password": PasswordInput(),
}
class RegisterForm(CustomForm):
class Meta:
model = models.User
fields = ["localname", "email", "password"]
help_texts = {f: None for f in fields}
widgets = {"password": PasswordInput()}
class RatingForm(CustomForm):
class Meta:
model = models.ReviewRating
fields = ["user", "book", "rating", "privacy"]
class ReviewForm(CustomForm):
class Meta:
model = models.Review
fields = [
"user",
"book",
"name",
"content",
"rating",
"content_warning",
"sensitive",
"privacy",
]
class CommentForm(CustomForm):
class Meta:
model = models.Comment
fields = [
"user",
"book",
"content",
"content_warning",
"sensitive",
"privacy",
"progress",
"progress_mode",
]
class QuotationForm(CustomForm):
class Meta:
model = models.Quotation
fields = [
"user",
"book",
"quote",
"content",
"content_warning",
"sensitive",
"privacy",
]
class ReplyForm(CustomForm):
class Meta:
model = models.Status
fields = [
"user",
"content",
"content_warning",
"sensitive",
"reply_parent",
"privacy",
]
class StatusForm(CustomForm):
class Meta:
model = models.Status
fields = ["user", "content", "content_warning", "sensitive", "privacy"]
class ListStatusForm(CustomForm):
class Meta:
model = models.GeneratedNote
fields = ["user", "content", "privacy", "mention_books"]
class EditUserForm(CustomForm):
class Meta:
model = models.User
fields = [
"avatar",
"name",
"email",
"summary",
"show_goal",
"manually_approves_followers",
"discoverable",
"preferred_timezone",
]
help_texts = {f: None for f in fields}
class LimitedEditUserForm(CustomForm):
class Meta:
model = models.User
fields = [
"avatar",
"name",
"summary",
"manually_approves_followers",
"discoverable",
]
help_texts = {f: None for f in fields}
class UserGroupForm(CustomForm):
class Meta:
model = models.User
fields = ["groups"]
class TagForm(CustomForm):
class Meta:
model = models.Tag
fields = ["name"]
help_texts = {f: None for f in fields}
labels = {"name": "Add a tag"}
class CoverForm(CustomForm):
class Meta:
model = models.Book
fields = ["cover"]
help_texts = {f: None for f in fields}
class EditionForm(CustomForm):
class Meta:
model = models.Edition
exclude = [
"remote_id",
"origin_id",
"created_date",
"updated_date",
"edition_rank",
"authors",
"parent_work",
"shelves",
"subjects", # TODO
"subject_places", # TODO
"connector",
]
class AuthorForm(CustomForm):
class Meta:
model = models.Author
exclude = [
"remote_id",
"origin_id",
"created_date",
"updated_date",
]
class ImportForm(forms.Form):
csv_file = forms.FileField()
class ExpiryWidget(widgets.Select):
def value_from_datadict(self, data, files, name):
""" human-readable exiration time buckets """
selected_string = super().value_from_datadict(data, files, name)
if selected_string == "day":
interval = datetime.timedelta(days=1)
elif selected_string == "week":
interval = datetime.timedelta(days=7)
elif selected_string == "month":
interval = datetime.timedelta(days=31) # Close enough?
elif selected_string == "forever":
return None
else:
return selected_string # "This will raise
return timezone.now() + interval
class InviteRequestForm(CustomForm):
def clean(self):
""" make sure the email isn't in use by a registered user """
cleaned_data = super().clean()
email = cleaned_data.get("email")
if email and models.User.objects.filter(email=email).exists():
self.add_error("email", _("A user with this email already exists."))
class Meta:
model = models.InviteRequest
fields = ["email"]
class CreateInviteForm(CustomForm):
class Meta:
model = models.SiteInvite
exclude = ["code", "user", "times_used", "invitees"]
widgets = {
"expiry": ExpiryWidget(
choices=[
("day", _("One Day")),
("week", _("One Week")),
("month", _("One Month")),
("forever", _("Does Not Expire")),
]
),
"use_limit": widgets.Select(
choices=[
(i, _("%(count)d uses" % {"count": i}))
for i in [1, 5, 10, 25, 50, 100]
]
+ [(None, _("Unlimited"))]
),
}
class ShelfForm(CustomForm):
class Meta:
model = models.Shelf
fields = ["user", "name", "privacy"]
class GoalForm(CustomForm):
class Meta:
model = models.AnnualGoal
fields = ["user", "year", "goal", "privacy"]
class SiteForm(CustomForm):
class Meta:
model = models.SiteSettings
exclude = []
class ListForm(CustomForm):
class Meta:
model = models.List
fields = ["user", "name", "description", "curation", "privacy"]
class ReportForm(CustomForm):
class Meta:
model = models.Report
fields = ["user", "reporter", "statuses", "note"]
class ServerForm(CustomForm):
class Meta:
model = models.FederatedServer
exclude = ["remote_id"]
class SortListForm(forms.Form):
sort_by = ChoiceField(
choices=(
("order", _("List Order")),
("title", _("Book Title")),
("rating", _("Rating")),
),
label=_("Sort By"),
)
direction = ChoiceField(
choices=(
("ascending", _("Ascending")),
("descending", _("Descending")),
),
)

View file

@ -1,12 +0,0 @@
""" make forms available to the app """
# site admin
from .admin import *
from .author import *
from .books import *
from .edit_user import *
from .forms import *
from .groups import *
from .landing import *
from .links import *
from .lists import *
from .status import *

View file

@ -1,141 +0,0 @@
""" using django model forms """
import datetime
from django import forms
from django.forms import widgets
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
from django_celery_beat.models import IntervalSchedule
from bookwyrm import models
from .custom_form import CustomForm
# pylint: disable=missing-class-docstring
class ExpiryWidget(widgets.Select):
def value_from_datadict(self, data, files, name):
"""human-readable exiration time buckets"""
selected_string = super().value_from_datadict(data, files, name)
if selected_string == "day":
interval = datetime.timedelta(days=1)
elif selected_string == "week":
interval = datetime.timedelta(days=7)
elif selected_string == "month":
interval = datetime.timedelta(days=31) # Close enough?
elif selected_string == "forever":
return None
else:
return selected_string # This will raise
return timezone.now() + interval
class CreateInviteForm(CustomForm):
class Meta:
model = models.SiteInvite
exclude = ["code", "user", "times_used", "invitees"]
widgets = {
"expiry": ExpiryWidget(
choices=[
("day", _("One Day")),
("week", _("One Week")),
("month", _("One Month")),
("forever", _("Does Not Expire")),
]
),
"use_limit": widgets.Select(
choices=[(i, _(f"{i} uses")) for i in [1, 5, 10, 25, 50, 100]]
+ [(None, _("Unlimited"))]
),
}
class SiteForm(CustomForm):
class Meta:
model = models.SiteSettings
exclude = ["admin_code", "install_mode"]
widgets = {
"instance_short_description": forms.TextInput(
attrs={"aria-describedby": "desc_instance_short_description"}
),
"require_confirm_email": forms.CheckboxInput(
attrs={"aria-describedby": "desc_require_confirm_email"}
),
"invite_request_text": forms.Textarea(
attrs={"aria-describedby": "desc_invite_request_text"}
),
}
class ThemeForm(CustomForm):
class Meta:
model = models.Theme
fields = ["name", "path"]
widgets = {
"name": forms.TextInput(attrs={"aria-describedby": "desc_name"}),
"path": forms.TextInput(
attrs={
"aria-describedby": "desc_path",
"placeholder": "css/themes/theme-name.scss",
}
),
}
class AnnouncementForm(CustomForm):
class Meta:
model = models.Announcement
exclude = ["remote_id"]
widgets = {
"preview": forms.TextInput(attrs={"aria-describedby": "desc_preview"}),
"content": forms.Textarea(attrs={"aria-describedby": "desc_content"}),
"event_date": forms.SelectDateWidget(
attrs={"aria-describedby": "desc_event_date"}
),
"start_date": forms.SelectDateWidget(
attrs={"aria-describedby": "desc_start_date"}
),
"end_date": forms.SelectDateWidget(
attrs={"aria-describedby": "desc_end_date"}
),
"active": forms.CheckboxInput(attrs={"aria-describedby": "desc_active"}),
}
class EmailBlocklistForm(CustomForm):
class Meta:
model = models.EmailBlocklist
fields = ["domain"]
widgets = {
"avatar": forms.TextInput(attrs={"aria-describedby": "desc_domain"}),
}
class IPBlocklistForm(CustomForm):
class Meta:
model = models.IPBlocklist
fields = ["address"]
class ServerForm(CustomForm):
class Meta:
model = models.FederatedServer
exclude = ["remote_id"]
class AutoModRuleForm(CustomForm):
class Meta:
model = models.AutoMod
fields = ["string_match", "flag_users", "flag_statuses", "created_by"]
class IntervalScheduleForm(CustomForm):
class Meta:
model = IntervalSchedule
fields = ["every", "period"]
widgets = {
"every": forms.NumberInput(attrs={"aria-describedby": "desc_every"}),
"period": forms.Select(attrs={"aria-describedby": "desc_period"}),
}

View file

@ -1,47 +0,0 @@
""" using django model forms """
from django import forms
from bookwyrm import models
from .custom_form import CustomForm
# pylint: disable=missing-class-docstring
class AuthorForm(CustomForm):
class Meta:
model = models.Author
fields = [
"last_edited_by",
"name",
"aliases",
"bio",
"wikipedia_link",
"born",
"died",
"openlibrary_key",
"inventaire_id",
"librarything_key",
"goodreads_key",
"isni",
]
widgets = {
"name": forms.TextInput(attrs={"aria-describedby": "desc_name"}),
"aliases": forms.TextInput(attrs={"aria-describedby": "desc_aliases"}),
"bio": forms.Textarea(attrs={"aria-describedby": "desc_bio"}),
"wikipedia_link": forms.TextInput(
attrs={"aria-describedby": "desc_wikipedia_link"}
),
"born": forms.SelectDateWidget(attrs={"aria-describedby": "desc_born"}),
"died": forms.SelectDateWidget(attrs={"aria-describedby": "desc_died"}),
"oepnlibrary_key": forms.TextInput(
attrs={"aria-describedby": "desc_oepnlibrary_key"}
),
"inventaire_id": forms.TextInput(
attrs={"aria-describedby": "desc_inventaire_id"}
),
"librarything_key": forms.TextInput(
attrs={"aria-describedby": "desc_librarything_key"}
),
"goodreads_key": forms.TextInput(
attrs={"aria-describedby": "desc_goodreads_key"}
),
}

View file

@ -1,104 +0,0 @@
""" using django model forms """
from django import forms
from bookwyrm import models
from bookwyrm.models.fields import ClearableFileInputWithWarning
from .custom_form import CustomForm
from .widgets import ArrayWidget, SelectDateWidget, Select
# pylint: disable=missing-class-docstring
class CoverForm(CustomForm):
class Meta:
model = models.Book
fields = ["cover"]
help_texts = {f: None for f in fields}
class EditionForm(CustomForm):
class Meta:
model = models.Edition
exclude = [
"remote_id",
"origin_id",
"created_date",
"updated_date",
"edition_rank",
"authors",
"parent_work",
"shelves",
"connector",
"search_vector",
"links",
"file_links",
]
widgets = {
"title": forms.TextInput(attrs={"aria-describedby": "desc_title"}),
"subtitle": forms.TextInput(attrs={"aria-describedby": "desc_subtitle"}),
"description": forms.Textarea(
attrs={"aria-describedby": "desc_description"}
),
"series": forms.TextInput(attrs={"aria-describedby": "desc_series"}),
"series_number": forms.TextInput(
attrs={"aria-describedby": "desc_series_number"}
),
"subjects": ArrayWidget(),
"languages": forms.TextInput(
attrs={"aria-describedby": "desc_languages_help desc_languages"}
),
"publishers": forms.TextInput(
attrs={"aria-describedby": "desc_publishers_help desc_publishers"}
),
"first_published_date": SelectDateWidget(
attrs={"aria-describedby": "desc_first_published_date"}
),
"published_date": SelectDateWidget(
attrs={"aria-describedby": "desc_published_date"}
),
"cover": ClearableFileInputWithWarning(
attrs={"aria-describedby": "desc_cover"}
),
"physical_format": Select(
attrs={"aria-describedby": "desc_physical_format"}
),
"physical_format_detail": forms.TextInput(
attrs={"aria-describedby": "desc_physical_format_detail"}
),
"pages": forms.NumberInput(attrs={"aria-describedby": "desc_pages"}),
"isbn_13": forms.TextInput(attrs={"aria-describedby": "desc_isbn_13"}),
"isbn_10": forms.TextInput(attrs={"aria-describedby": "desc_isbn_10"}),
"openlibrary_key": forms.TextInput(
attrs={"aria-describedby": "desc_openlibrary_key"}
),
"inventaire_id": forms.TextInput(
attrs={"aria-describedby": "desc_inventaire_id"}
),
"oclc_number": forms.TextInput(
attrs={"aria-describedby": "desc_oclc_number"}
),
"ASIN": forms.TextInput(attrs={"aria-describedby": "desc_ASIN"}),
}
class EditionFromWorkForm(CustomForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# make all fields hidden
for visible in self.visible_fields():
visible.field.widget = forms.HiddenInput()
class Meta:
model = models.Work
fields = [
"title",
"subtitle",
"authors",
"description",
"languages",
"series",
"series_number",
"subjects",
"subject_places",
"cover",
"first_published_date",
]

View file

@ -1,26 +0,0 @@
""" Overrides django's default form class """
from collections import defaultdict
from django.forms import ModelForm
from django.forms.widgets import Textarea
class CustomForm(ModelForm):
"""add css classes to the forms"""
def __init__(self, *args, **kwargs):
css_classes = defaultdict(lambda: "")
css_classes["text"] = "input"
css_classes["password"] = "input"
css_classes["email"] = "input"
css_classes["number"] = "input"
css_classes["checkbox"] = "checkbox"
css_classes["textarea"] = "textarea"
# pylint: disable=super-with-arguments
super(CustomForm, self).__init__(*args, **kwargs)
for visible in self.visible_fields():
if hasattr(visible.field.widget, "input_type"):
input_type = visible.field.widget.input_type
if isinstance(visible.field.widget, Textarea):
input_type = "textarea"
visible.field.widget.attrs["rows"] = 5
visible.field.widget.attrs["class"] = css_classes[input_type]

View file

@ -1,68 +0,0 @@
""" using django model forms """
from django import forms
from bookwyrm import models
from bookwyrm.models.fields import ClearableFileInputWithWarning
from .custom_form import CustomForm
# pylint: disable=missing-class-docstring
class EditUserForm(CustomForm):
class Meta:
model = models.User
fields = [
"avatar",
"name",
"email",
"summary",
"show_goal",
"show_suggested_users",
"manually_approves_followers",
"default_post_privacy",
"discoverable",
"hide_follows",
"preferred_timezone",
"preferred_language",
"theme",
]
help_texts = {f: None for f in fields}
widgets = {
"avatar": ClearableFileInputWithWarning(
attrs={"aria-describedby": "desc_avatar"}
),
"name": forms.TextInput(attrs={"aria-describedby": "desc_name"}),
"summary": forms.Textarea(attrs={"aria-describedby": "desc_summary"}),
"email": forms.EmailInput(attrs={"aria-describedby": "desc_email"}),
"discoverable": forms.CheckboxInput(
attrs={"aria-describedby": "desc_discoverable"}
),
}
class LimitedEditUserForm(CustomForm):
class Meta:
model = models.User
fields = [
"avatar",
"name",
"summary",
"manually_approves_followers",
"discoverable",
]
help_texts = {f: None for f in fields}
widgets = {
"avatar": ClearableFileInputWithWarning(
attrs={"aria-describedby": "desc_avatar"}
),
"name": forms.TextInput(attrs={"aria-describedby": "desc_name"}),
"summary": forms.Textarea(attrs={"aria-describedby": "desc_summary"}),
"discoverable": forms.CheckboxInput(
attrs={"aria-describedby": "desc_discoverable"}
),
}
class DeleteUserForm(CustomForm):
class Meta:
model = models.User
fields = ["password"]

View file

@ -1,64 +0,0 @@
""" using django model forms """
from django import forms
from django.forms import widgets
from django.utils.translation import gettext_lazy as _
from bookwyrm import models
from bookwyrm.models.user import FeedFilterChoices
from .custom_form import CustomForm
# pylint: disable=missing-class-docstring
class FeedStatusTypesForm(CustomForm):
class Meta:
model = models.User
fields = ["feed_status_types"]
help_texts = {f: None for f in fields}
widgets = {
"feed_status_types": widgets.CheckboxSelectMultiple(
choices=FeedFilterChoices,
),
}
class ImportForm(forms.Form):
csv_file = forms.FileField()
class ShelfForm(CustomForm):
class Meta:
model = models.Shelf
fields = ["user", "name", "privacy", "description"]
class GoalForm(CustomForm):
class Meta:
model = models.AnnualGoal
fields = ["user", "year", "goal", "privacy"]
class ReportForm(CustomForm):
class Meta:
model = models.Report
fields = ["user", "reporter", "status", "links", "note"]
class ReadThroughForm(CustomForm):
def clean(self):
"""don't let readthroughs end before they start"""
cleaned_data = super().clean()
start_date = cleaned_data.get("start_date")
finish_date = cleaned_data.get("finish_date")
if start_date and finish_date and start_date > finish_date:
self.add_error(
"finish_date", _("Reading finish date cannot be before start date.")
)
stopped_date = cleaned_data.get("stopped_date")
if start_date and stopped_date and start_date > stopped_date:
self.add_error(
"stopped_date", _("Reading stopped date cannot be before start date.")
)
class Meta:
model = models.ReadThrough
fields = ["user", "book", "start_date", "finish_date", "stopped_date"]

View file

@ -1,16 +0,0 @@
""" using django model forms """
from bookwyrm import models
from .custom_form import CustomForm
# pylint: disable=missing-class-docstring
class UserGroupForm(CustomForm):
class Meta:
model = models.User
fields = ["groups"]
class GroupForm(CustomForm):
class Meta:
model = models.Group
fields = ["user", "privacy", "name", "description"]

View file

@ -1,45 +0,0 @@
""" Forms for the landing pages """
from django.forms import PasswordInput
from django.utils.translation import gettext_lazy as _
from bookwyrm import models
from .custom_form import CustomForm
# pylint: disable=missing-class-docstring
class LoginForm(CustomForm):
class Meta:
model = models.User
fields = ["localname", "password"]
help_texts = {f: None for f in fields}
widgets = {
"password": PasswordInput(),
}
class RegisterForm(CustomForm):
class Meta:
model = models.User
fields = ["localname", "email", "password"]
help_texts = {f: None for f in fields}
widgets = {"password": PasswordInput()}
def clean(self):
"""Check if the username is taken"""
cleaned_data = super().clean()
localname = cleaned_data.get("localname").strip()
if models.User.objects.filter(localname=localname).first():
self.add_error("localname", _("User with this username already exists"))
class InviteRequestForm(CustomForm):
def clean(self):
"""make sure the email isn't in use by a registered user"""
cleaned_data = super().clean()
email = cleaned_data.get("email")
if email and models.User.objects.filter(email=email).exists():
self.add_error("email", _("A user with this email already exists."))
class Meta:
model = models.InviteRequest
fields = ["email", "answer"]

View file

@ -1,48 +0,0 @@
""" using django model forms """
from urllib.parse import urlparse
from django.utils.translation import gettext_lazy as _
from bookwyrm import models
from .custom_form import CustomForm
# pylint: disable=missing-class-docstring
class LinkDomainForm(CustomForm):
class Meta:
model = models.LinkDomain
fields = ["name"]
class FileLinkForm(CustomForm):
class Meta:
model = models.FileLink
fields = ["url", "filetype", "availability", "book", "added_by"]
def clean(self):
"""make sure the domain isn't blocked or pending"""
cleaned_data = super().clean()
url = cleaned_data.get("url")
filetype = cleaned_data.get("filetype")
book = cleaned_data.get("book")
domain = urlparse(url).netloc
if models.LinkDomain.objects.filter(domain=domain).exists():
status = models.LinkDomain.objects.get(domain=domain).status
if status == "blocked":
# pylint: disable=line-too-long
self.add_error(
"url",
_(
"This domain is blocked. Please contact your administrator if you think this is an error."
),
)
elif models.FileLink.objects.filter(
url=url, book=book, filetype=filetype
).exists():
# pylint: disable=line-too-long
self.add_error(
"url",
_(
"This link with file type has already been added for this book. If it is not visible, the domain is still pending."
),
)

View file

@ -1,37 +0,0 @@
""" using django model forms """
from django import forms
from django.forms import ChoiceField
from django.utils.translation import gettext_lazy as _
from bookwyrm import models
from .custom_form import CustomForm
# pylint: disable=missing-class-docstring
class ListForm(CustomForm):
class Meta:
model = models.List
fields = ["user", "name", "description", "curation", "privacy", "group"]
class ListItemForm(CustomForm):
class Meta:
model = models.ListItem
fields = ["user", "book", "book_list", "notes"]
class SortListForm(forms.Form):
sort_by = ChoiceField(
choices=(
("order", _("List Order")),
("title", _("Book Title")),
("rating", _("Rating")),
),
label=_("Sort By"),
)
direction = ChoiceField(
choices=(
("ascending", _("Ascending")),
("descending", _("Descending")),
),
)

View file

@ -1,82 +0,0 @@
""" using django model forms """
from bookwyrm import models
from .custom_form import CustomForm
# pylint: disable=missing-class-docstring
class RatingForm(CustomForm):
class Meta:
model = models.ReviewRating
fields = ["user", "book", "rating", "privacy"]
class ReviewForm(CustomForm):
class Meta:
model = models.Review
fields = [
"user",
"book",
"name",
"content",
"rating",
"content_warning",
"sensitive",
"privacy",
]
class CommentForm(CustomForm):
class Meta:
model = models.Comment
fields = [
"user",
"book",
"content",
"content_warning",
"sensitive",
"privacy",
"progress",
"progress_mode",
"reading_status",
]
class QuotationForm(CustomForm):
class Meta:
model = models.Quotation
fields = [
"user",
"book",
"quote",
"content",
"content_warning",
"sensitive",
"privacy",
"position",
"position_mode",
]
class ReplyForm(CustomForm):
class Meta:
model = models.Status
fields = [
"user",
"content",
"content_warning",
"sensitive",
"reply_parent",
"privacy",
]
class StatusForm(CustomForm):
class Meta:
model = models.Status
fields = ["user", "content", "content_warning", "sensitive", "privacy"]
class DirectForm(CustomForm):
class Meta:
model = models.Status
fields = ["user", "content", "content_warning", "sensitive", "privacy"]

View file

@ -1,70 +0,0 @@
""" using django model forms """
from django import forms
class ArrayWidget(forms.widgets.TextInput):
"""Inputs for postgres array fields"""
# pylint: disable=unused-argument
# pylint: disable=no-self-use
def value_from_datadict(self, data, files, name):
"""get all values for this name"""
return [i for i in data.getlist(name) if i]
class Select(forms.Select):
"""custom template for select widget"""
template_name = "widgets/select.html"
class SelectDateWidget(forms.SelectDateWidget):
"""
A widget that splits date input into two <select> boxes and a numerical year.
"""
template_name = "widgets/addon_multiwidget.html"
select_widget = Select
def get_context(self, name, value, attrs):
"""sets individual widgets"""
context = super().get_context(name, value, attrs)
date_context = {}
year_name = self.year_field % name
date_context["year"] = forms.NumberInput().get_context(
name=year_name,
value=context["widget"]["value"]["year"],
attrs={
**context["widget"]["attrs"],
"id": f"id_{year_name}",
"class": "input",
},
)
month_choices = list(self.months.items())
if not self.is_required:
month_choices.insert(0, self.month_none_value)
month_name = self.month_field % name
date_context["month"] = self.select_widget(
attrs, choices=month_choices
).get_context(
name=month_name,
value=context["widget"]["value"]["month"],
attrs={**context["widget"]["attrs"], "id": f"id_{month_name}"},
)
day_choices = [(i, i) for i in range(1, 32)]
if not self.is_required:
day_choices.insert(0, self.day_none_value)
day_name = self.day_field % name
date_context["day"] = self.select_widget(
attrs,
choices=day_choices,
).get_context(
name=day_name,
value=context["widget"]["value"]["day"],
attrs={**context["widget"]["attrs"], "id": f"id_{day_name}"},
)
subwidgets = []
for field in self._parse_date_fmt():
subwidgets.append(date_context[field]["widget"])
context["widget"]["subwidgets"] = subwidgets
return context

View file

@ -1,113 +0,0 @@
"""Generators for all the different thumbnail sizes"""
from imagekit import ImageSpec, register
from imagekit.processors import ResizeToFit
class BookXSmallWebp(ImageSpec):
"""Handles XSmall size in Webp format"""
processors = [ResizeToFit(80, 80)]
format = "WEBP"
options = {"quality": 95}
class BookXSmallJpg(ImageSpec):
"""Handles XSmall size in Jpeg format"""
processors = [ResizeToFit(80, 80)]
format = "JPEG"
options = {"quality": 95}
class BookSmallWebp(ImageSpec):
"""Handles Small size in Webp format"""
processors = [ResizeToFit(100, 100)]
format = "WEBP"
options = {"quality": 95}
class BookSmallJpg(ImageSpec):
"""Handles Small size in Jpeg format"""
processors = [ResizeToFit(100, 100)]
format = "JPEG"
options = {"quality": 95}
class BookMediumWebp(ImageSpec):
"""Handles Medium size in Webp format"""
processors = [ResizeToFit(150, 150)]
format = "WEBP"
options = {"quality": 95}
class BookMediumJpg(ImageSpec):
"""Handles Medium size in Jpeg format"""
processors = [ResizeToFit(150, 150)]
format = "JPEG"
options = {"quality": 95}
class BookLargeWebp(ImageSpec):
"""Handles Large size in Webp format"""
processors = [ResizeToFit(200, 200)]
format = "WEBP"
options = {"quality": 95}
class BookLargeJpg(ImageSpec):
"""Handles Large size in Jpeg format"""
processors = [ResizeToFit(200, 200)]
format = "JPEG"
options = {"quality": 95}
class BookXLargeWebp(ImageSpec):
"""Handles XLarge size in Webp format"""
processors = [ResizeToFit(250, 250)]
format = "WEBP"
options = {"quality": 95}
class BookXLargeJpg(ImageSpec):
"""Handles XLarge size in Jpeg format"""
processors = [ResizeToFit(250, 250)]
format = "JPEG"
options = {"quality": 95}
class BookXxLargeWebp(ImageSpec):
"""Handles XxLarge size in Webp format"""
processors = [ResizeToFit(500, 500)]
format = "WEBP"
options = {"quality": 95}
class BookXxLargeJpg(ImageSpec):
"""Handles XxLarge size in Jpeg format"""
processors = [ResizeToFit(500, 500)]
format = "JPEG"
options = {"quality": 95}
register.generator("bw:book:xsmall:webp", BookXSmallWebp)
register.generator("bw:book:xsmall:jpg", BookXSmallJpg)
register.generator("bw:book:small:webp", BookSmallWebp)
register.generator("bw:book:small:jpg", BookSmallJpg)
register.generator("bw:book:medium:webp", BookMediumWebp)
register.generator("bw:book:medium:jpg", BookMediumJpg)
register.generator("bw:book:large:webp", BookLargeWebp)
register.generator("bw:book:large:jpg", BookLargeJpg)
register.generator("bw:book:xlarge:webp", BookXLargeWebp)
register.generator("bw:book:xlarge:jpg", BookXLargeJpg)
register.generator("bw:book:xxlarge:webp", BookXxLargeWebp)
register.generator("bw:book:xxlarge:jpg", BookXxLargeJpg)

View file

@ -1,8 +1,5 @@
""" import classes """ """ import classes """
from .importer import Importer from .importer import Importer
from .calibre_import import CalibreImporter
from .goodreads_import import GoodreadsImporter from .goodreads_import import GoodreadsImporter
from .librarything_import import LibrarythingImporter from .librarything_import import LibrarythingImporter
from .openlibrary_import import OpenLibraryImporter
from .storygraph_import import StorygraphImporter

View file

@ -1,28 +0,0 @@
""" handle reading a csv from calibre """
from bookwyrm.models import Shelf
from . import Importer
class CalibreImporter(Importer):
"""csv downloads from Calibre"""
service = "Calibre"
def __init__(self, *args, **kwargs):
# Add timestamp to row_mappings_guesses for date_added to avoid
# integrity error
row_mappings_guesses = []
for field, mapping in self.row_mappings_guesses:
if field in ("date_added",):
row_mappings_guesses.append((field, mapping + ["timestamp"]))
else:
row_mappings_guesses.append((field, mapping))
self.row_mappings_guesses = row_mappings_guesses
super().__init__(*args, **kwargs)
def get_shelf(self, normalized_row):
# Calibre export does not indicate which shelf to use. Go with a default one for now
return Shelf.TO_READ

View file

@ -3,7 +3,14 @@ from . import Importer
class GoodreadsImporter(Importer): class GoodreadsImporter(Importer):
"""Goodreads is the default importer, thus Importer follows its structure. """GoodReads is the default importer, thus Importer follows its structure.
For a more complete example of overriding see librarything_import.py""" For a more complete example of overriding see librarything_import.py"""
service = "Goodreads" service = "GoodReads"
def parse_fields(self, entry):
""" handle the specific fields in goodreads csvs """
entry.update({"import_source": self.service})
# add missing 'Date Started' field
entry.update({"Date Started": None})
return entry

View file

@ -1,184 +1,106 @@
""" handle reading a csv from an external service, defaults are from Goodreads """ """ handle reading a csv from an external service, defaults are from GoodReads """
import csv import csv
import logging import logging
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
from bookwyrm import models from bookwyrm import models
from bookwyrm.models import ImportJob, ImportItem from bookwyrm.models import ImportJob, ImportItem
from bookwyrm.tasks import app, LOW from bookwyrm.tasks import app
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class Importer: class Importer:
"""Generic class for csv data import from an outside service""" """ Generic class for csv data import from an outside service """
service = "Import" service = "Unknown"
delimiter = "," delimiter = ","
encoding = "UTF-8" encoding = "UTF-8"
mandatory_fields = ["Title", "Author"]
# these are from Goodreads
row_mappings_guesses = [
("id", ["id", "book id"]),
("title", ["title"]),
("authors", ["author", "authors", "primary author"]),
("isbn_10", ["isbn10", "isbn"]),
("isbn_13", ["isbn13", "isbn", "isbns"]),
("shelf", ["shelf", "exclusive shelf", "read status", "bookshelf"]),
("review_name", ["review name"]),
("review_body", ["my review", "review"]),
("rating", ["my rating", "rating", "star rating"]),
("date_added", ["date added", "entry date", "added"]),
("date_started", ["date started", "started"]),
("date_finished", ["date finished", "last date read", "date read", "finished"]),
]
date_fields = ["date_added", "date_started", "date_finished"]
shelf_mapping_guesses = {
"to-read": ["to-read", "want to read"],
"read": ["read", "already read"],
"reading": ["currently-reading", "reading", "currently reading"],
}
def create_job(self, user, csv_file, include_reviews, privacy): def create_job(self, user, csv_file, include_reviews, privacy):
"""check over a csv and creates a database entry for the job""" """ check over a csv and creates a database entry for the job"""
csv_reader = csv.DictReader(csv_file, delimiter=self.delimiter)
rows = enumerate(list(csv_reader))
job = ImportJob.objects.create( job = ImportJob.objects.create(
user=user, user=user, include_reviews=include_reviews, privacy=privacy
include_reviews=include_reviews,
privacy=privacy,
mappings=self.create_row_mappings(csv_reader.fieldnames),
source=self.service,
) )
for index, entry in enumerate(
for index, entry in rows: list(csv.DictReader(csv_file, delimiter=self.delimiter))
self.create_item(job, index, entry) ):
if not all(x in entry for x in self.mandatory_fields):
raise ValueError("Author and title must be in data.")
entry = self.parse_fields(entry)
self.save_item(job, index, entry)
return job return job
def update_legacy_job(self, job): def save_item(self, job, index, data): # pylint: disable=no-self-use
"""patch up a job that was in the old format""" """ creates and saves an import item """
items = job.items ImportItem(job=job, index=index, data=data).save()
headers = list(items.first().data.keys())
job.mappings = self.create_row_mappings(headers)
job.updated_date = timezone.now()
job.save()
for item in items.all(): def parse_fields(self, entry):
normalized = self.normalize_row(item.data, job.mappings) """ updates csv data with additional info """
normalized["shelf"] = self.get_shelf(normalized) entry.update({"import_source": self.service})
item.normalized_data = normalized return entry
item.save()
def create_row_mappings(self, headers):
"""guess what the headers mean"""
mappings = {}
for (key, guesses) in self.row_mappings_guesses:
value = [h for h in headers if h.lower() in guesses]
value = value[0] if len(value) else None
if value:
headers.remove(value)
mappings[key] = value
return mappings
def create_item(self, job, index, data):
"""creates and saves an import item"""
normalized = self.normalize_row(data, job.mappings)
normalized["shelf"] = self.get_shelf(normalized)
ImportItem(job=job, index=index, data=data, normalized_data=normalized).save()
def get_shelf(self, normalized_row):
"""determine which shelf to use"""
shelf_name = normalized_row.get("shelf")
if not shelf_name:
return None
shelf_name = shelf_name.lower()
shelf = [
s for (s, gs) in self.shelf_mapping_guesses.items() if shelf_name in gs
]
return shelf[0] if shelf else None
def normalize_row(self, entry, mappings): # pylint: disable=no-self-use
"""use the dataclass to create the formatted row of data"""
return {k: entry.get(v) for k, v in mappings.items()}
def create_retry_job(self, user, original_job, items): def create_retry_job(self, user, original_job, items):
"""retry items that didn't import""" """ retry items that didn't import """
job = ImportJob.objects.create( job = ImportJob.objects.create(
user=user, user=user,
include_reviews=original_job.include_reviews, include_reviews=original_job.include_reviews,
privacy=original_job.privacy, privacy=original_job.privacy,
source=original_job.source,
# TODO: allow users to adjust mappings
mappings=original_job.mappings,
retry=True, retry=True,
) )
for item in items: for item in items:
# this will re-normalize the raw data self.save_item(job, item.index, item.data)
self.create_item(job, item.index, item.data)
return job return job
def start_import(self, job): # pylint: disable=no-self-use def start_import(self, job):
"""initalizes a csv import job""" """ initalizes a csv import job """
result = start_import_task.delay(job.id) result = import_data.delay(self.service, job.id)
job.task_id = result.id job.task_id = result.id
job.save() job.save()
@app.task(queue="low_priority") @app.task
def start_import_task(job_id): def import_data(source, job_id):
"""trigger the child tasks for each row""" """ does the actual lookup work in a celery task """
job = ImportJob.objects.get(id=job_id) job = ImportJob.objects.get(id=job_id)
# these are sub-tasks so that one big task doesn't use up all the memory in celery
for item in job.items.values_list("id", flat=True).all():
import_item_task.delay(item)
@app.task(queue="low_priority")
def import_item_task(item_id):
"""resolve a row into a book"""
item = models.ImportItem.objects.get(id=item_id)
try: try:
item.resolve() for item in job.items.all():
except Exception as err: # pylint: disable=broad-except try:
item.fail_reason = _("Error loading book") item.resolve()
item.save() except Exception as e: # pylint: disable=broad-except
item.update_job() logger.exception(e)
raise err item.fail_reason = "Error loading book"
item.save()
continue
if item.book: if item.book:
# shelves book and handles reviews item.save()
handle_imported_book(item)
else:
item.fail_reason = _("Could not find a match for book")
item.save() # shelves book and handles reviews
item.update_job() handle_imported_book(
source, job.user, item, job.include_reviews, job.privacy
)
else:
item.fail_reason = "Could not find a match for book"
item.save()
finally:
job.complete = True
job.save()
def handle_imported_book(item): def handle_imported_book(source, user, item, include_reviews, privacy):
"""process a csv and then post about it""" """ process a csv and then post about it """
job = item.job
user = job.user
if isinstance(item.book, models.Work): if isinstance(item.book, models.Work):
item.book = item.book.default_edition item.book = item.book.default_edition
if not item.book: if not item.book:
item.fail_reason = _("Error loading book")
item.save()
return return
if not isinstance(item.book, models.Edition):
item.book = item.book.edition
existing_shelf = models.ShelfBook.objects.filter(book=item.book, user=user).exists() existing_shelf = models.ShelfBook.objects.filter(book=item.book, user=user).exists()
# shelve the book if it hasn't been shelved already # shelve the book if it hasn't been shelved already
if item.shelf and not existing_shelf: if item.shelf and not existing_shelf:
desired_shelf = models.Shelf.objects.get(identifier=item.shelf, user=user) desired_shelf = models.Shelf.objects.get(identifier=item.shelf, user=user)
shelved_date = item.date_added or timezone.now() models.ShelfBook.objects.create(book=item.book, shelf=desired_shelf, user=user)
models.ShelfBook(
book=item.book, shelf=desired_shelf, user=user, shelved_date=shelved_date
).save(priority=LOW)
for read in item.reads: for read in item.reads:
# check for an existing readthrough with the same dates # check for an existing readthrough with the same dates
@ -193,52 +115,25 @@ def handle_imported_book(item):
read.user = user read.user = user
read.save() read.save()
if job.include_reviews and (item.rating or item.review) and not item.linked_review: if include_reviews and (item.rating or item.review):
review_title = (
"Review of {!r} on {!r}".format(
item.book.title,
source,
)
if item.review
else ""
)
# we don't know the publication date of the review, # we don't know the publication date of the review,
# but "now" is a bad guess # but "now" is a bad guess
published_date_guess = item.date_read or item.date_added published_date_guess = item.date_read or item.date_added
if item.review: models.Review.objects.create(
# pylint: disable=consider-using-f-string user=user,
review_title = "Review of {!r} on {!r}".format( book=item.book,
item.book.title, name=review_title,
job.source, content=item.review,
) rating=item.rating,
review = models.Review.objects.filter( published_date=published_date_guess,
user=user, privacy=privacy,
book=item.book, )
name=review_title,
rating=item.rating,
published_date=published_date_guess,
).first()
if not review:
review = models.Review(
user=user,
book=item.book,
name=review_title,
content=item.review,
rating=item.rating,
published_date=published_date_guess,
privacy=job.privacy,
)
review.save(software="bookwyrm", priority=LOW)
else:
# just a rating
review = models.ReviewRating.objects.filter(
user=user,
book=item.book,
published_date=published_date_guess,
rating=item.rating,
).first()
if not review:
review = models.ReviewRating(
user=user,
book=item.book,
rating=item.rating,
published_date=published_date_guess,
privacy=job.privacy,
)
review.save(software="bookwyrm", priority=LOW)
# only broadcast this review to other bookwyrm instances
item.linked_review = review
item.save()

View file

@ -1,30 +1,42 @@
""" handle reading a tsv from librarything """ """ handle reading a csv from librarything """
import re import re
import math
from bookwyrm.models import Shelf
from . import Importer from . import Importer
class LibrarythingImporter(Importer): class LibrarythingImporter(Importer):
"""csv downloads from librarything""" """ csv downloads from librarything """
service = "LibraryThing" service = "LibraryThing"
delimiter = "\t" delimiter = "\t"
encoding = "ISO-8859-1" encoding = "ISO-8859-1"
# mandatory_fields : fields matching the book title and author
mandatory_fields = ["Title", "Primary Author"]
def normalize_row(self, entry, mappings): # pylint: disable=no-self-use def parse_fields(self, entry):
"""use the dataclass to create the formatted row of data""" """ custom parsing for librarything """
remove_brackets = lambda v: re.sub(r"\[|\]", "", v) if v else None data = {}
normalized = {k: remove_brackets(entry.get(v)) for k, v in mappings.items()} data["import_source"] = self.service
isbn_13 = normalized.get("isbn_13") data["Book Id"] = entry["Book Id"]
isbn_13 = isbn_13.split(", ") if isbn_13 else [] data["Title"] = entry["Title"]
normalized["isbn_13"] = isbn_13[1] if len(isbn_13) > 0 else None data["Author"] = entry["Primary Author"]
return normalized data["ISBN13"] = entry["ISBN"]
data["My Review"] = entry["Review"]
if entry["Rating"]:
data["My Rating"] = math.ceil(float(entry["Rating"]))
else:
data["My Rating"] = ""
data["Date Added"] = re.sub(r"\[|\]", "", entry["Entry Date"])
data["Date Started"] = re.sub(r"\[|\]", "", entry["Date Started"])
data["Date Read"] = re.sub(r"\[|\]", "", entry["Date Read"])
def get_shelf(self, normalized_row): data["Exclusive Shelf"] = None
if normalized_row["date_finished"]: if data["Date Read"]:
return Shelf.READ_FINISHED data["Exclusive Shelf"] = "read"
if normalized_row["date_started"]: elif data["Date Started"]:
return Shelf.READING data["Exclusive Shelf"] = "reading"
return Shelf.TO_READ else:
data["Exclusive Shelf"] = "to-read"
return data

View file

@ -1,13 +0,0 @@
""" handle reading a csv from openlibrary"""
from . import Importer
class OpenLibraryImporter(Importer):
"""csv downloads from OpenLibrary"""
service = "OpenLibrary"
def __init__(self, *args, **kwargs):
self.row_mappings_guesses.append(("openlibrary_key", ["edition id"]))
self.row_mappings_guesses.append(("openlibrary_work_key", ["work id"]))
super().__init__(*args, **kwargs)

View file

@ -1,8 +0,0 @@
""" handle reading a csv from storygraph"""
from . import Importer
class StorygraphImporter(Importer):
"""csv downloads from Storygraph"""
service = "Storygraph"

View file

@ -1,251 +0,0 @@
""" access the list streams stored in redis """
from django.dispatch import receiver
from django.db import transaction
from django.db.models import signals, Count, Q
from bookwyrm import models
from bookwyrm.redis_store import RedisStore
from bookwyrm.tasks import app, MEDIUM, HIGH
class ListsStream(RedisStore):
"""all the lists you can see"""
def stream_id(self, user): # pylint: disable=no-self-use
"""the redis key for this user's instance of this stream"""
if isinstance(user, int):
# allows the function to take an int or an obj
return f"{user}-lists"
return f"{user.id}-lists"
def get_rank(self, obj): # pylint: disable=no-self-use
"""lists are sorted by updated date"""
return obj.updated_date.timestamp()
def add_list(self, book_list):
"""add a list to users' feeds"""
# the pipeline contains all the add-to-stream activities
self.add_object_to_related_stores(book_list)
def add_user_lists(self, viewer, user):
"""add a user's lists to another user's feed"""
# only add the lists that the viewer should be able to see
lists = models.List.privacy_filter(viewer).filter(user=user)
self.bulk_add_objects_to_store(lists, self.stream_id(viewer))
def remove_user_lists(self, viewer, user, exclude_privacy=None):
"""remove a user's list from another user's feed"""
# remove all so that followers only lists are removed
lists = user.list_set
if exclude_privacy:
lists = lists.exclude(privacy=exclude_privacy)
self.bulk_remove_objects_from_store(lists.all(), self.stream_id(viewer))
def get_list_stream(self, user):
"""load the lists to be displayed"""
lists = self.get_store(self.stream_id(user))
return (
models.List.objects.filter(id__in=lists)
.annotate(item_count=Count("listitem", filter=Q(listitem__approved=True)))
# hide lists with no approved books
.filter(item_count__gt=0)
.select_related("user")
.prefetch_related("listitem_set")
.order_by("-updated_date")
.distinct()
)
def populate_lists(self, user):
"""go from zero to a timeline"""
self.populate_store(self.stream_id(user))
def get_audience(self, book_list): # pylint: disable=no-self-use
"""given a list, what users should see it"""
# everybody who could plausibly see this list
audience = models.User.objects.filter(
is_active=True,
local=True, # we only create feeds for users of this instance
).exclude( # not blocked
Q(id__in=book_list.user.blocks.all()) | Q(blocks=book_list.user)
)
group = book_list.group
# only visible to the poster and mentioned users
if book_list.privacy == "direct":
if group:
audience = audience.filter(
Q(id=book_list.user.id) # if the user is the post's author
| ~Q(groups=group.memberships) # if the user is in the group
)
else:
audience = audience.filter(
Q(id=book_list.user.id) # if the user is the post's author
)
# only visible to the poster's followers and tagged users
elif book_list.privacy == "followers":
if group:
audience = audience.filter(
Q(id=book_list.user.id) # if the user is the list's owner
| Q(following=book_list.user) # if the user is following the pwmer
# if a user is in the group
| Q(memberships__group__id=book_list.group.id)
)
else:
audience = audience.filter(
Q(id=book_list.user.id) # if the user is the list's owner
| Q(following=book_list.user) # if the user is following the pwmer
)
return audience.distinct()
def get_stores_for_object(self, obj):
return [self.stream_id(u) for u in self.get_audience(obj)]
def get_lists_for_user(self, user): # pylint: disable=no-self-use
"""given a user, what lists should they see on this stream"""
return models.List.privacy_filter(
user,
privacy_levels=["public", "followers"],
)
def get_objects_for_store(self, store):
user = models.User.objects.get(id=store.split("-")[0])
return self.get_lists_for_user(user)
@receiver(signals.post_save, sender=models.List)
# pylint: disable=unused-argument
def add_list_on_create(sender, instance, created, *args, **kwargs):
"""add newly created lists streamsstreams"""
if not created:
return
# when creating new things, gotta wait on the transaction
transaction.on_commit(lambda: add_list_on_create_command(instance.id))
@receiver(signals.post_delete, sender=models.List)
# pylint: disable=unused-argument
def remove_list_on_delete(sender, instance, *args, **kwargs):
"""remove deleted lists to streams"""
remove_list_task.delay(instance.id)
def add_list_on_create_command(instance_id):
"""runs this code only after the database commit completes"""
add_list_task.delay(instance_id)
@receiver(signals.post_save, sender=models.UserFollows)
# pylint: disable=unused-argument
def add_lists_on_follow(sender, instance, created, *args, **kwargs):
"""add a newly followed user's lists to feeds"""
if not created or not instance.user_subject.local:
return
add_user_lists_task.delay(instance.user_subject.id, instance.user_object.id)
@receiver(signals.post_delete, sender=models.UserFollows)
# pylint: disable=unused-argument
def remove_lists_on_unfollow(sender, instance, *args, **kwargs):
"""remove lists from a feed on unfollow"""
if not instance.user_subject.local:
return
# remove all but public lists
remove_user_lists_task.delay(
instance.user_subject.id, instance.user_object.id, exclude_privacy="public"
)
@receiver(signals.post_save, sender=models.UserBlocks)
# pylint: disable=unused-argument
def remove_lists_on_block(sender, instance, *args, **kwargs):
"""remove lists from all feeds on block"""
# blocks apply ot all feeds
if instance.user_subject.local:
remove_user_lists_task.delay(instance.user_subject.id, instance.user_object.id)
# and in both directions
if instance.user_object.local:
remove_user_lists_task.delay(instance.user_object.id, instance.user_subject.id)
@receiver(signals.post_delete, sender=models.UserBlocks)
# pylint: disable=unused-argument
def add_lists_on_unblock(sender, instance, *args, **kwargs):
"""add lists back to all feeds on unblock"""
# make sure there isn't a block in the other direction
if models.UserBlocks.objects.filter(
user_subject=instance.user_object,
user_object=instance.user_subject,
).exists():
return
# add lists back to streams with lists from anyone
if instance.user_subject.local:
add_user_lists_task.delay(
instance.user_subject.id,
instance.user_object.id,
)
# add lists back to streams with lists from anyone
if instance.user_object.local:
add_user_lists_task.delay(
instance.user_object.id,
instance.user_subject.id,
)
@receiver(signals.post_save, sender=models.User)
# pylint: disable=unused-argument
def populate_lists_on_account_create(sender, instance, created, *args, **kwargs):
"""build a user's feeds when they join"""
if not created or not instance.local:
return
transaction.on_commit(lambda: add_list_on_account_create_command(instance.id))
def add_list_on_account_create_command(user_id):
"""wait for the transaction to complete"""
populate_lists_task.delay(user_id)
# ---- TASKS
@app.task(queue=MEDIUM)
def populate_lists_task(user_id):
"""background task for populating an empty list stream"""
user = models.User.objects.get(id=user_id)
ListsStream().populate_lists(user)
@app.task(queue=MEDIUM)
def remove_list_task(list_id):
"""remove a list from any stream it might be in"""
stores = models.User.objects.filter(local=True, is_active=True).values_list(
"id", flat=True
)
# delete for every store
stores = [ListsStream().stream_id(idx) for idx in stores]
ListsStream().remove_object_from_related_stores(list_id, stores=stores)
@app.task(queue=HIGH)
def add_list_task(list_id):
"""add a list to any stream it should be in"""
book_list = models.List.objects.get(id=list_id)
ListsStream().add_list(book_list)
@app.task(queue=MEDIUM)
def remove_user_lists_task(viewer_id, user_id, exclude_privacy=None):
"""remove all lists by a user from a viewer's stream"""
viewer = models.User.objects.get(id=viewer_id)
user = models.User.objects.get(id=user_id)
ListsStream().remove_user_lists(viewer, user, exclude_privacy=exclude_privacy)
@app.task(queue=MEDIUM)
def add_user_lists_task(viewer_id, user_id):
"""add all lists by a user to a viewer's stream"""
viewer = models.User.objects.get(id=viewer_id)
user = models.User.objects.get(id=user_id)
ListsStream().add_user_lists(viewer, user)

View file

@ -1,23 +0,0 @@
""" Get your admin code to allow install """
from django.core.management.base import BaseCommand
from bookwyrm import models
def get_admin_code():
"""get that code"""
return models.SiteSettings.objects.get().admin_code
class Command(BaseCommand):
"""command-line options"""
help = "Gets admin code for configuring BookWyrm"
# pylint: disable=unused-argument
def handle(self, *args, **options):
"""execute init"""
self.stdout.write("*******************************************")
self.stdout.write("Use this code to create your admin account:")
self.stdout.write(get_admin_code())
self.stdout.write("*******************************************")

View file

@ -6,7 +6,7 @@ from bookwyrm import models
def update_related(canonical, obj): def update_related(canonical, obj):
"""update all the models with fk to the object being removed""" """ update all the models with fk to the object being removed """
# move related models to canonical # move related models to canonical
related_models = [ related_models = [
(r.remote_field.name, r.related_model) for r in canonical._meta.related_objects (r.remote_field.name, r.related_model) for r in canonical._meta.related_objects
@ -24,7 +24,7 @@ def update_related(canonical, obj):
def copy_data(canonical, obj): def copy_data(canonical, obj):
"""try to get the most data possible""" """ try to get the most data possible """
for data_field in obj._meta.get_fields(): for data_field in obj._meta.get_fields():
if not hasattr(data_field, "activitypub_field"): if not hasattr(data_field, "activitypub_field"):
continue continue
@ -38,7 +38,7 @@ def copy_data(canonical, obj):
def dedupe_model(model): def dedupe_model(model):
"""combine duplicate editions and update related models""" """ combine duplicate editions and update related models """
fields = model._meta.get_fields() fields = model._meta.get_fields()
dedupe_fields = [ dedupe_fields = [
f for f in fields if hasattr(f, "deduplication_field") and f.deduplication_field f for f in fields if hasattr(f, "deduplication_field") and f.deduplication_field
@ -68,12 +68,12 @@ def dedupe_model(model):
class Command(BaseCommand): class Command(BaseCommand):
"""dedplucate allllll the book data models""" """ dedplucate allllll the book data models """
help = "merges duplicate book data" help = "merges duplicate book data"
# pylint: disable=no-self-use,unused-argument # pylint: disable=no-self-use,unused-argument
def handle(self, *args, **options): def handle(self, *args, **options):
"""run deudplications""" """ run deudplications """
dedupe_model(models.Edition) dedupe_model(models.Edition)
dedupe_model(models.Work) dedupe_model(models.Work)
dedupe_model(models.Author) dedupe_model(models.Author)

View file

@ -5,23 +5,20 @@ import redis
from bookwyrm import settings from bookwyrm import settings
r = redis.Redis( r = redis.Redis(
host=settings.REDIS_ACTIVITY_HOST, host=settings.REDIS_ACTIVITY_HOST, port=settings.REDIS_ACTIVITY_PORT, db=0
port=settings.REDIS_ACTIVITY_PORT,
password=settings.REDIS_ACTIVITY_PASSWORD,
db=settings.REDIS_ACTIVITY_DB_INDEX,
) )
def erase_streams(): def erase_streams():
"""throw the whole redis away""" """ throw the whole redis away """
r.flushall() r.flushall()
class Command(BaseCommand): class Command(BaseCommand):
"""delete activity streams for all users""" """ delete activity streams for all users """
help = "Delete all the user streams" help = "Delete all the user streams"
# pylint: disable=no-self-use,unused-argument # pylint: disable=no-self-use,unused-argument
def handle(self, *args, **options): def handle(self, *args, **options):
"""flush all, baby""" """ flush all, baby """
erase_streams() erase_streams()

View file

@ -1,73 +0,0 @@
""" Generate preview images """
from django.core.management.base import BaseCommand
from bookwyrm import models, preview_images
# pylint: disable=line-too-long
class Command(BaseCommand):
"""Creates previews for existing objects"""
help = "Generate preview images"
# pylint: disable=no-self-use
def add_arguments(self, parser):
"""options for how the command is run"""
parser.add_argument(
"--all",
"-a",
action="store_true",
help="Generates images for ALL types: site, users and books. Can use a lot of computing power.",
)
# pylint: disable=no-self-use,unused-argument
def handle(self, *args, **options):
"""generate preview images"""
self.stdout.write(
" | Hello! I will be generating preview images for your instance."
)
if options["all"]:
self.stdout.write(
"🧑‍🎨 ⎨ This might take quite long if your instance has a lot of books and users."
)
self.stdout.write(" | ✧ Thank you for your patience ✧")
else:
self.stdout.write("🧑‍🎨 ⎨ I will only generate the instance preview image.")
self.stdout.write(" | ✧ Be right back! ✧")
# Site
self.stdout.write(" → Site preview image: ", ending="")
preview_images.generate_site_preview_image_task.delay()
self.stdout.write(" OK 🖼")
# pylint: disable=consider-using-f-string
if options["all"]:
# Users
users = models.User.objects.filter(
local=True,
is_active=True,
)
self.stdout.write(
" → User preview images ({}): ".format(len(users)), ending=""
)
for user in users:
preview_images.generate_user_preview_image_task.delay(user.id)
self.stdout.write(".", ending="")
self.stdout.write(" OK 🖼")
# Books
book_ids = (
models.Book.objects.select_subclasses()
.filter()
.values_list("id", flat=True)
)
self.stdout.write(
" → Book preview images ({}): ".format(len(book_ids)), ending=""
)
for book_id in book_ids:
preview_images.generate_edition_preview_image_task.delay(book_id)
self.stdout.write(".", ending="")
self.stdout.write(" OK 🖼")
self.stdout.write("🧑‍🎨 ⎨ Im all done! ✧ Enjoy ✧")

View file

@ -1,25 +1,25 @@
""" What you need in the database to make it work """ from django.core.management.base import BaseCommand, CommandError
from django.core.management.base import BaseCommand
from django.contrib.auth.models import Group, Permission from django.contrib.auth.models import Group, Permission
from django.contrib.contenttypes.models import ContentType from django.contrib.contenttypes.models import ContentType
from bookwyrm import models from bookwyrm.models import Connector, FederatedServer, SiteSettings, User
from bookwyrm.settings import DOMAIN
def init_groups(): def init_groups():
"""permission levels"""
groups = ["admin", "moderator", "editor"] groups = ["admin", "moderator", "editor"]
for group in groups: for group in groups:
Group.objects.create(name=group) Group.objects.create(name=group)
def init_permissions(): def init_permissions():
"""permission types"""
permissions = [ permissions = [
{ {
"codename": "edit_instance_settings", "codename": "edit_instance_settings",
"name": "change the instance info", "name": "change the instance info",
"groups": ["admin"], "groups": [
"admin",
],
}, },
{ {
"codename": "set_user_group", "codename": "set_user_group",
@ -53,7 +53,7 @@ def init_permissions():
}, },
] ]
content_type = ContentType.objects.get_for_model(models.User) content_type = ContentType.objects.get_for_model(User)
for permission in permissions: for permission in permissions:
permission_obj = Permission.objects.create( permission_obj = Permission.objects.create(
codename=permission["codename"], codename=permission["codename"],
@ -64,12 +64,27 @@ def init_permissions():
for group_name in permission["groups"]: for group_name in permission["groups"]:
Group.objects.get(name=group_name).permissions.add(permission_obj) Group.objects.get(name=group_name).permissions.add(permission_obj)
# while the groups and permissions shouldn't be changed because the code
# depends on them, what permissions go with what groups should be editable
def init_connectors(): def init_connectors():
"""access book data sources""" Connector.objects.create(
models.Connector.objects.create( identifier=DOMAIN,
name="Local",
local=True,
connector_file="self_connector",
base_url="https://%s" % DOMAIN,
books_url="https://%s/book" % DOMAIN,
covers_url="https://%s/images/" % DOMAIN,
search_url="https://%s/search?q=" % DOMAIN,
isbn_search_url="https://%s/isbn/" % DOMAIN,
priority=1,
)
Connector.objects.create(
identifier="bookwyrm.social", identifier="bookwyrm.social",
name="Bookwyrm.social", name="BookWyrm dot Social",
connector_file="bookwyrm_connector", connector_file="bookwyrm_connector",
base_url="https://bookwyrm.social", base_url="https://bookwyrm.social",
books_url="https://bookwyrm.social/book", books_url="https://bookwyrm.social/book",
@ -79,20 +94,7 @@ def init_connectors():
priority=2, priority=2,
) )
# pylint: disable=line-too-long Connector.objects.create(
models.Connector.objects.create(
identifier="inventaire.io",
name="Inventaire",
connector_file="inventaire",
base_url="https://inventaire.io",
books_url="https://inventaire.io/api/entities",
covers_url="https://inventaire.io",
search_url="https://inventaire.io/api/search?types=works&types=works&search=",
isbn_search_url="https://inventaire.io/api/entities?action=by-uris&uris=isbn%3A",
priority=1,
)
models.Connector.objects.create(
identifier="openlibrary.org", identifier="openlibrary.org",
name="OpenLibrary", name="OpenLibrary",
connector_file="openlibrary", connector_file="openlibrary",
@ -101,71 +103,30 @@ def init_connectors():
covers_url="https://covers.openlibrary.org", covers_url="https://covers.openlibrary.org",
search_url="https://openlibrary.org/search?q=", search_url="https://openlibrary.org/search?q=",
isbn_search_url="https://openlibrary.org/api/books?jscmd=data&format=json&bibkeys=ISBN:", isbn_search_url="https://openlibrary.org/api/books?jscmd=data&format=json&bibkeys=ISBN:",
priority=1, priority=3,
) )
def init_federated_servers():
""" big no to nazis """
built_in_blocks = ["gab.ai", "gab.com"]
for server in built_in_blocks:
FederatedServer.objects.create(
server_name=server,
status="blocked",
)
def init_settings(): def init_settings():
"""info about the instance""" SiteSettings.objects.create()
models.SiteSettings.objects.create(
support_link="https://www.patreon.com/bookwyrm",
support_title="Patreon",
install_mode=True,
)
def init_link_domains():
"""safe book links"""
domains = [
("standardebooks.org", "Standard EBooks"),
("www.gutenberg.org", "Project Gutenberg"),
("archive.org", "Internet Archive"),
("openlibrary.org", "Open Library"),
("theanarchistlibrary.org", "The Anarchist Library"),
]
for domain, name in domains:
models.LinkDomain.objects.create(
domain=domain,
name=name,
status="approved",
)
# pylint: disable=no-self-use
# pylint: disable=unused-argument
class Command(BaseCommand): class Command(BaseCommand):
"""command-line options"""
help = "Initializes the database with starter data" help = "Initializes the database with starter data"
def add_arguments(self, parser):
"""specify which function to run"""
parser.add_argument(
"--limit",
default=None,
help="Limit init to specific table",
)
def handle(self, *args, **options): def handle(self, *args, **options):
"""execute init""" init_groups()
limit = options.get("limit") init_permissions()
tables = [ init_connectors()
"group", init_federated_servers()
"permission", init_settings()
"connector",
"settings",
"linkdomain",
]
if limit and limit not in tables:
raise Exception("Invalid table limit:", limit)
if not limit or limit == "group":
init_groups()
if not limit or limit == "permission":
init_permissions()
if not limit or limit == "connector":
init_connectors()
if not limit or limit == "settings":
init_settings()
if not limit or limit == "linkdomain":
init_link_domains()

View file

@ -1,54 +0,0 @@
""" Get your admin code to allow install """
from django.core.management.base import BaseCommand
from bookwyrm import models
from bookwyrm.settings import VERSION
# pylint: disable=no-self-use
class Command(BaseCommand):
"""command-line options"""
help = "What version is this?"
def add_arguments(self, parser):
"""specify which function to run"""
parser.add_argument(
"--current",
action="store_true",
help="Version stored in database",
)
parser.add_argument(
"--target",
action="store_true",
help="Version stored in settings",
)
parser.add_argument(
"--update",
action="store_true",
help="Update database version",
)
# pylint: disable=unused-argument
def handle(self, *args, **options):
"""execute init"""
site = models.SiteSettings.objects.get()
current = site.version or "0.0.1"
target = VERSION
if options.get("current"):
print(current)
return
if options.get("target"):
print(target)
return
if options.get("update"):
site.version = target
site.save()
return
if current != target:
print(f"{current}/{target}")
else:
print(current)

View file

@ -1,28 +0,0 @@
""" Re-create list streams """
from django.core.management.base import BaseCommand
from bookwyrm import lists_stream, models
def populate_lists_streams():
"""build all the lists streams for all the users"""
print("Populating lists streams")
users = models.User.objects.filter(
local=True,
is_active=True,
).order_by("-last_active_date")
print("This may take a long time! Please be patient.")
for user in users:
print(".", end="")
lists_stream.populate_lists_task.delay(user.id)
print("\nAll done, thank you for your patience!")
class Command(BaseCommand):
"""start all over with lists streams"""
help = "Populate list streams for all users"
# pylint: disable=no-self-use,unused-argument
def handle(self, *args, **options):
"""run feed builder"""
populate_lists_streams()

View file

@ -1,39 +1,30 @@
""" Re-create user streams """ """ Re-create user streams """
from django.core.management.base import BaseCommand from django.core.management.base import BaseCommand
from bookwyrm import activitystreams, lists_stream, models import redis
from bookwyrm import activitystreams, models, settings
r = redis.Redis(
host=settings.REDIS_ACTIVITY_HOST, port=settings.REDIS_ACTIVITY_PORT, db=0
)
def populate_streams(stream=None): def populate_streams():
"""build all the streams for all the users""" """ build all the streams for all the users """
streams = [stream] if stream else activitystreams.streams.keys()
print("Populating streams", streams)
users = models.User.objects.filter( users = models.User.objects.filter(
local=True, local=True,
is_active=True, is_active=True,
).order_by("-last_active_date") )
print("This may take a long time! Please be patient.")
for user in users: for user in users:
print(".", end="") for stream in activitystreams.streams.values():
lists_stream.populate_lists_task.delay(user.id) stream.populate_streams(user)
for stream_key in streams:
print(".", end="")
activitystreams.populate_stream_task.delay(stream_key, user.id)
class Command(BaseCommand): class Command(BaseCommand):
"""start all over with user streams""" """ start all over with user streams """
help = "Populate streams for all users" help = "Populate streams for all users"
def add_arguments(self, parser):
parser.add_argument(
"--stream",
default=None,
help="Specifies which time of stream to populate",
)
# pylint: disable=no-self-use,unused-argument # pylint: disable=no-self-use,unused-argument
def handle(self, *args, **options): def handle(self, *args, **options):
"""run feed builder""" """ run feed builder """
stream = options.get("stream") populate_streams()
populate_streams(stream=stream)

View file

@ -1,25 +0,0 @@
""" Populate suggested users """
from django.core.management.base import BaseCommand
from bookwyrm import models
from bookwyrm.suggested_users import rerank_suggestions_task
def populate_suggestions():
"""build all the streams for all the users"""
users = models.User.objects.filter(
local=True,
is_active=True,
).values_list("id", flat=True)
for user in users:
rerank_suggestions_task.delay(user)
class Command(BaseCommand):
"""start all over with user suggestions"""
help = "Populate suggested users for all users"
# pylint: disable=no-self-use,unused-argument
def handle(self, *args, **options):
"""run builder"""
populate_suggestions()

View file

@ -5,7 +5,7 @@ from bookwyrm import models
def remove_editions(): def remove_editions():
"""combine duplicate editions and update related models""" """ combine duplicate editions and update related models """
# not in use # not in use
filters = { filters = {
"%s__isnull" % r.name: True for r in models.Edition._meta.related_objects "%s__isnull" % r.name: True for r in models.Edition._meta.related_objects
@ -33,10 +33,10 @@ def remove_editions():
class Command(BaseCommand): class Command(BaseCommand):
"""dedplucate allllll the book data models""" """ dedplucate allllll the book data models """
help = "merges duplicate book data" help = "merges duplicate book data"
# pylint: disable=no-self-use,unused-argument # pylint: disable=no-self-use,unused-argument
def handle(self, *args, **options): def handle(self, *args, **options):
"""run deudplications""" """ run deudplications """
remove_editions() remove_editions()

View file

@ -1,3 +0,0 @@
""" look at all this nice middleware! """
from .timezone_middleware import TimezoneMiddleware
from .ip_middleware import IPBlocklistMiddleware

View file

@ -1,16 +0,0 @@
""" Block IP addresses """
from django.http import Http404
from bookwyrm import models
class IPBlocklistMiddleware:
"""check incoming traffic against an IP block-list"""
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
address = request.META.get("REMOTE_ADDR")
if models.IPBlocklist.objects.filter(address=address).exists():
raise Http404()
return self.get_response(request)

View file

@ -8,7 +8,7 @@ from psycopg2.extras import execute_values
def convert_review_rating(app_registry, schema_editor): def convert_review_rating(app_registry, schema_editor):
"""take rating type Reviews and convert them to ReviewRatings""" """ take rating type Reviews and convert them to ReviewRatings """
db_alias = schema_editor.connection.alias db_alias = schema_editor.connection.alias
reviews = ( reviews = (
@ -29,7 +29,7 @@ VALUES %s""",
def unconvert_review_rating(app_registry, schema_editor): def unconvert_review_rating(app_registry, schema_editor):
"""undo the conversion from ratings back to reviews""" """ undo the conversion from ratings back to reviews"""
# All we need to do to revert this is drop the table, which Django will do # All we need to do to revert this is drop the table, which Django will do
# on its own, as long as we have a valid reverse function. So, this is a # on its own, as long as we have a valid reverse function. So, this is a
# no-op function so Django will do its thing # no-op function so Django will do its thing

View file

@ -1,27 +0,0 @@
# Generated by Django 3.0.7 on 2021-02-14 00:39
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0045_auto_20210210_2114"),
]
operations = [
migrations.AddField(
model_name="user",
name="default_post_privacy",
field=models.CharField(
choices=[
("public", "Public"),
("unlisted", "Unlisted"),
("followers", "Followers"),
("direct", "Direct"),
],
default="public",
max_length=255,
),
),
]

View file

@ -1,30 +0,0 @@
# Generated by Django 3.1.6 on 2021-04-06 17:31
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0061_auto_20210402_1435"),
]
operations = [
migrations.RemoveConstraint(
model_name="connector",
name="connector_file_valid",
),
migrations.AlterField(
model_name="connector",
name="connector_file",
field=models.CharField(
choices=[
("openlibrary", "Openlibrary"),
("inventaire", "Inventaire"),
("self_connector", "Self Connector"),
("bookwyrm_connector", "Bookwyrm Connector"),
],
max_length=255,
),
),
]

View file

@ -1,63 +0,0 @@
# Generated by Django 3.1.6 on 2021-04-07 00:45
import bookwyrm.models.fields
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0062_auto_20210406_1731"),
]
operations = [
migrations.AddField(
model_name="author",
name="bnf_id",
field=bookwyrm.models.fields.CharField(
blank=True, max_length=255, null=True
),
),
migrations.AddField(
model_name="author",
name="gutenberg_id",
field=bookwyrm.models.fields.CharField(
blank=True, max_length=255, null=True
),
),
migrations.AddField(
model_name="author",
name="inventaire_id",
field=bookwyrm.models.fields.CharField(
blank=True, max_length=255, null=True
),
),
migrations.AddField(
model_name="author",
name="isni",
field=bookwyrm.models.fields.CharField(
blank=True, max_length=255, null=True
),
),
migrations.AddField(
model_name="author",
name="viaf_id",
field=bookwyrm.models.fields.CharField(
blank=True, max_length=255, null=True
),
),
migrations.AddField(
model_name="book",
name="bnf_id",
field=bookwyrm.models.fields.CharField(
blank=True, max_length=255, null=True
),
),
migrations.AddField(
model_name="book",
name="inventaire_id",
field=bookwyrm.models.fields.CharField(
blank=True, max_length=255, null=True
),
),
]

View file

@ -1,35 +0,0 @@
# Generated by Django 3.1.8 on 2021-04-23 01:21
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0069_auto_20210422_1604"),
]
operations = [
migrations.AlterUniqueTogether(
name="usertag",
unique_together=None,
),
migrations.RemoveField(
model_name="usertag",
name="book",
),
migrations.RemoveField(
model_name="usertag",
name="tag",
),
migrations.RemoveField(
model_name="usertag",
name="user",
),
migrations.DeleteModel(
name="Tag",
),
migrations.DeleteModel(
name="UserTag",
),
]

View file

@ -1,13 +0,0 @@
# Generated by Django 3.2 on 2021-04-26 21:32
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0063_auto_20210407_0045"),
("bookwyrm", "0070_auto_20210423_0121"),
]
operations = []

View file

@ -1,17 +0,0 @@
# Generated by Django 3.2 on 2021-04-28 22:16
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0071_merge_0063_auto_20210407_0045_0070_auto_20210423_0121"),
]
operations = [
migrations.RemoveField(
model_name="work",
name="default_edition",
),
]

View file

@ -1,18 +0,0 @@
# Generated by Django 3.2 on 2021-04-30 17:25
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0072_remove_work_default_edition"),
]
operations = [
migrations.AddField(
model_name="sitesettings",
name="footer_item",
field=models.TextField(blank=True, null=True),
),
]

View file

@ -1,48 +0,0 @@
# Generated by Django 3.2 on 2021-05-11 18:29
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0073_sitesettings_footer_item"),
]
operations = [
migrations.RemoveField(
model_name="connector",
name="max_query_count",
),
migrations.RemoveField(
model_name="connector",
name="politeness_delay",
),
migrations.RemoveField(
model_name="connector",
name="query_count",
),
migrations.RemoveField(
model_name="connector",
name="query_count_expiry",
),
migrations.AddField(
model_name="connector",
name="active",
field=models.BooleanField(default=True),
),
migrations.AddField(
model_name="connector",
name="deactivation_reason",
field=models.CharField(
blank=True,
choices=[
("self_deletion", "Self Deletion"),
("moderator_deletion", "Moderator Deletion"),
("domain_block", "Domain Block"),
],
max_length=255,
null=True,
),
),
]

View file

@ -1,56 +0,0 @@
# Generated by Django 3.2 on 2021-05-20 19:34
import bookwyrm.models.fields
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0074_auto_20210511_1829"),
]
operations = [
migrations.CreateModel(
name="Announcement",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("created_date", models.DateTimeField(auto_now_add=True)),
("updated_date", models.DateTimeField(auto_now=True)),
(
"remote_id",
bookwyrm.models.fields.RemoteIdField(
max_length=255,
null=True,
validators=[bookwyrm.models.fields.validate_remote_id],
),
),
("preview", models.CharField(max_length=255)),
("content", models.TextField(blank=True, null=True)),
("event_date", models.DateTimeField(blank=True, null=True)),
("start_date", models.DateTimeField(blank=True, null=True)),
("end_date", models.DateTimeField(blank=True, null=True)),
("active", models.BooleanField(default=True)),
(
"user",
models.ForeignKey(
on_delete=django.db.models.deletion.PROTECT,
to=settings.AUTH_USER_MODEL,
),
),
],
options={
"abstract": False,
},
),
]

View file

@ -1,32 +0,0 @@
# Generated by Django 3.2 on 2021-05-26 12:55
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0075_announcement"),
]
operations = [
migrations.AddField(
model_name="book",
name="preview_image",
field=models.ImageField(
blank=True, null=True, upload_to="previews/covers/"
),
),
migrations.AddField(
model_name="sitesettings",
name="preview_image",
field=models.ImageField(blank=True, null=True, upload_to="previews/logos/"),
),
migrations.AddField(
model_name="user",
name="preview_image",
field=models.ImageField(
blank=True, null=True, upload_to="previews/avatars/"
),
),
]

View file

@ -1,126 +0,0 @@
# Generated by Django 3.2.4 on 2021-06-23 21:55
import django.contrib.postgres.indexes
import django.contrib.postgres.search
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0076_preview_images"),
]
operations = [
migrations.AddField(
model_name="author",
name="search_vector",
field=django.contrib.postgres.search.SearchVectorField(null=True),
),
migrations.AddField(
model_name="book",
name="search_vector",
field=django.contrib.postgres.search.SearchVectorField(null=True),
),
migrations.AddIndex(
model_name="author",
index=django.contrib.postgres.indexes.GinIndex(
fields=["search_vector"], name="bookwyrm_au_search__b050a8_gin"
),
),
migrations.AddIndex(
model_name="book",
index=django.contrib.postgres.indexes.GinIndex(
fields=["search_vector"], name="bookwyrm_bo_search__51beb3_gin"
),
),
migrations.RunSQL(
sql="""
CREATE FUNCTION book_trigger() RETURNS trigger AS $$
begin
new.search_vector :=
coalesce(
NULLIF(setweight(to_tsvector('english', coalesce(new.title, '')), 'A'), ''),
setweight(to_tsvector('simple', coalesce(new.title, '')), 'A')
) ||
setweight(to_tsvector('english', coalesce(new.subtitle, '')), 'B') ||
(SELECT setweight(to_tsvector('simple', coalesce(array_to_string(array_agg(bookwyrm_author.name), ' '), '')), 'C')
FROM bookwyrm_book
LEFT OUTER JOIN bookwyrm_book_authors
ON bookwyrm_book.id = bookwyrm_book_authors.book_id
LEFT OUTER JOIN bookwyrm_author
ON bookwyrm_book_authors.author_id = bookwyrm_author.id
WHERE bookwyrm_book.id = new.id
) ||
setweight(to_tsvector('english', coalesce(new.series, '')), 'D');
return new;
end
$$ LANGUAGE plpgsql;
CREATE TRIGGER search_vector_trigger
BEFORE INSERT OR UPDATE OF title, subtitle, series, search_vector
ON bookwyrm_book
FOR EACH ROW EXECUTE FUNCTION book_trigger();
UPDATE bookwyrm_book SET search_vector = NULL;
""",
reverse_sql="""
DROP TRIGGER IF EXISTS search_vector_trigger
ON bookwyrm_book;
DROP FUNCTION IF EXISTS book_trigger;
""",
),
# when an author is edited
migrations.RunSQL(
sql="""
CREATE FUNCTION author_trigger() RETURNS trigger AS $$
begin
WITH book AS (
SELECT bookwyrm_book.id as row_id
FROM bookwyrm_author
LEFT OUTER JOIN bookwyrm_book_authors
ON bookwyrm_book_authors.id = new.id
LEFT OUTER JOIN bookwyrm_book
ON bookwyrm_book.id = bookwyrm_book_authors.book_id
)
UPDATE bookwyrm_book SET search_vector = ''
FROM book
WHERE id = book.row_id;
return new;
end
$$ LANGUAGE plpgsql;
CREATE TRIGGER author_search_vector_trigger
AFTER UPDATE OF name
ON bookwyrm_author
FOR EACH ROW EXECUTE FUNCTION author_trigger();
""",
reverse_sql="""
DROP TRIGGER IF EXISTS author_search_vector_trigger
ON bookwyrm_author;
DROP FUNCTION IF EXISTS author_trigger;
""",
),
# when an author is added to or removed from a book
migrations.RunSQL(
sql="""
CREATE FUNCTION book_authors_trigger() RETURNS trigger AS $$
begin
UPDATE bookwyrm_book SET search_vector = ''
WHERE id = coalesce(new.book_id, old.book_id);
return new;
end
$$ LANGUAGE plpgsql;
CREATE TRIGGER book_authors_search_vector_trigger
AFTER INSERT OR DELETE
ON bookwyrm_book_authors
FOR EACH ROW EXECUTE FUNCTION book_authors_trigger();
""",
reverse_sql="""
DROP TRIGGER IF EXISTS book_authors_search_vector_trigger
ON bookwyrm_book_authors;
DROP FUNCTION IF EXISTS book_authors_trigger;
""",
),
]

View file

@ -1,34 +0,0 @@
# Generated by Django 3.2.4 on 2021-07-03 08:25
from django.db import migrations, models
import django.utils.timezone
def copy_created_date(app_registry, schema_editor):
db_alias = schema_editor.connection.alias
ShelfBook = app_registry.get_model("bookwyrm", "ShelfBook")
ShelfBook.objects.all().update(shelved_date=models.F("created_date"))
def do_nothing(app_registry, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0077_auto_20210623_2155"),
]
operations = [
migrations.AlterModelOptions(
name="shelfbook",
options={"ordering": ("-shelved_date",)},
),
migrations.AddField(
model_name="shelfbook",
name="shelved_date",
field=models.DateTimeField(default=django.utils.timezone.now),
),
migrations.RunPython(copy_created_date, reverse_code=do_nothing),
]

View file

@ -1,13 +0,0 @@
# Generated by Django 3.2.4 on 2021-08-04 17:46
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0046_user_default_post_privacy"),
("bookwyrm", "0078_add_shelved_date"),
]
operations = []

View file

@ -1,17 +0,0 @@
# Generated by Django 3.2.4 on 2021-08-05 00:00
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0079_merge_20210804_1746"),
]
operations = [
migrations.AlterModelOptions(
name="shelfbook",
options={"ordering": ("-shelved_date", "-created_date", "-updated_date")},
),
]

View file

@ -1,19 +0,0 @@
# Generated by Django 3.2.4 on 2021-08-06 02:51
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0080_alter_shelfbook_options"),
]
operations = [
migrations.AlterField(
model_name="user",
name="last_active_date",
field=models.DateTimeField(default=django.utils.timezone.now),
),
]

View file

@ -1,56 +0,0 @@
# Generated by Django 3.2.4 on 2021-08-06 23:24
import bookwyrm.models.base_model
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0081_alter_user_last_active_date"),
]
operations = [
migrations.AddField(
model_name="sitesettings",
name="require_confirm_email",
field=models.BooleanField(default=True),
),
migrations.AddField(
model_name="user",
name="confirmation_code",
field=models.CharField(
default=bookwyrm.models.base_model.new_access_code, max_length=32
),
),
migrations.AlterField(
model_name="connector",
name="deactivation_reason",
field=models.CharField(
blank=True,
choices=[
("pending", "Pending"),
("self_deletion", "Self Deletion"),
("moderator_deletion", "Moderator Deletion"),
("domain_block", "Domain Block"),
],
max_length=255,
null=True,
),
),
migrations.AlterField(
model_name="user",
name="deactivation_reason",
field=models.CharField(
blank=True,
choices=[
("pending", "Pending"),
("self_deletion", "Self Deletion"),
("moderator_deletion", "Moderator Deletion"),
("domain_block", "Domain Block"),
],
max_length=255,
null=True,
),
),
]

View file

@ -1,56 +0,0 @@
# Generated by Django 3.2.4 on 2021-08-16 20:22
import bookwyrm.models.fields
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0082_auto_20210806_2324"),
]
operations = [
migrations.AddField(
model_name="comment",
name="reading_status",
field=bookwyrm.models.fields.CharField(
blank=True,
choices=[
("to-read", "Toread"),
("reading", "Reading"),
("read", "Read"),
],
max_length=255,
null=True,
),
),
migrations.AddField(
model_name="quotation",
name="reading_status",
field=bookwyrm.models.fields.CharField(
blank=True,
choices=[
("to-read", "Toread"),
("reading", "Reading"),
("read", "Read"),
],
max_length=255,
null=True,
),
),
migrations.AddField(
model_name="review",
name="reading_status",
field=bookwyrm.models.fields.CharField(
blank=True,
choices=[
("to-read", "Toread"),
("reading", "Reading"),
("read", "Read"),
],
max_length=255,
null=True,
),
),
]

View file

@ -1,56 +0,0 @@
# Generated by Django 3.2.4 on 2021-08-17 19:16
import bookwyrm.models.fields
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0083_auto_20210816_2022"),
]
operations = [
migrations.AlterField(
model_name="comment",
name="reading_status",
field=bookwyrm.models.fields.CharField(
blank=True,
choices=[
("to-read", "To-Read"),
("reading", "Reading"),
("read", "Read"),
],
max_length=255,
null=True,
),
),
migrations.AlterField(
model_name="quotation",
name="reading_status",
field=bookwyrm.models.fields.CharField(
blank=True,
choices=[
("to-read", "To-Read"),
("reading", "Reading"),
("read", "Read"),
],
max_length=255,
null=True,
),
),
migrations.AlterField(
model_name="review",
name="reading_status",
field=bookwyrm.models.fields.CharField(
blank=True,
choices=[
("to-read", "To-Read"),
("reading", "Reading"),
("read", "Read"),
],
max_length=255,
null=True,
),
),
]

View file

@ -1,20 +0,0 @@
# Generated by Django 3.2.4 on 2021-08-23 18:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0084_auto_20210817_1916"),
]
operations = [
migrations.AddField(
model_name="user",
name="saved_lists",
field=models.ManyToManyField(
related_name="saved_lists", to="bookwyrm.List"
),
),
]

View file

@ -1,40 +0,0 @@
# Generated by Django 3.2.4 on 2021-08-27 17:27
from django.db import migrations, models
import django.db.models.expressions
def normalize_readthrough_dates(app_registry, schema_editor):
"""Find any invalid dates and reset them"""
db_alias = schema_editor.connection.alias
app_registry.get_model("bookwyrm", "ReadThrough").objects.using(db_alias).filter(
start_date__gt=models.F("finish_date")
).update(start_date=models.F("finish_date"))
def reverse_func(apps, schema_editor):
"""nothing to do here"""
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0085_user_saved_lists"),
]
operations = [
migrations.RunPython(normalize_readthrough_dates, reverse_func),
migrations.AlterModelOptions(
name="readthrough",
options={"ordering": ("-start_date",)},
),
migrations.AddConstraint(
model_name="readthrough",
constraint=models.CheckConstraint(
check=models.Q(
("finish_date__gte", django.db.models.expressions.F("start_date"))
),
name="chronology",
),
),
]

Some files were not shown because too many files have changed in this diff Show more