mirror of
https://github.com/bookwyrm-social/bookwyrm.git
synced 2024-11-22 09:31:08 +00:00
Merge branch 'main' into markdown-import
This commit is contained in:
commit
f4a4b59a14
43 changed files with 634 additions and 216 deletions
|
@ -2,6 +2,8 @@
|
|||
from dataclasses import dataclass, fields, MISSING
|
||||
from json import JSONEncoder
|
||||
import logging
|
||||
from typing import Optional, Union, TypeVar, overload, Any
|
||||
|
||||
import requests
|
||||
|
||||
from django.apps import apps
|
||||
|
@ -10,12 +12,15 @@ from django.utils.http import http_date
|
|||
|
||||
from bookwyrm import models
|
||||
from bookwyrm.connectors import ConnectorException, get_data
|
||||
from bookwyrm.models import base_model
|
||||
from bookwyrm.signatures import make_signature
|
||||
from bookwyrm.settings import DOMAIN, INSTANCE_ACTOR_USERNAME
|
||||
from bookwyrm.tasks import app, MISC
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
TBookWyrmModel = TypeVar("TBookWyrmModel", bound=base_model.BookWyrmModel)
|
||||
|
||||
|
||||
class ActivitySerializerError(ValueError):
|
||||
"""routine problems serializing activitypub json"""
|
||||
|
@ -65,7 +70,11 @@ class ActivityObject:
|
|||
id: str
|
||||
type: str
|
||||
|
||||
def __init__(self, activity_objects=None, **kwargs):
|
||||
def __init__(
|
||||
self,
|
||||
activity_objects: Optional[list[str, base_model.BookWyrmModel]] = None,
|
||||
**kwargs: dict[str, Any],
|
||||
):
|
||||
"""this lets you pass in an object with fields that aren't in the
|
||||
dataclass, which it ignores. Any field in the dataclass is required or
|
||||
has a default value"""
|
||||
|
@ -101,13 +110,13 @@ class ActivityObject:
|
|||
# pylint: disable=too-many-locals,too-many-branches,too-many-arguments
|
||||
def to_model(
|
||||
self,
|
||||
model=None,
|
||||
instance=None,
|
||||
allow_create=True,
|
||||
save=True,
|
||||
overwrite=True,
|
||||
allow_external_connections=True,
|
||||
):
|
||||
model: Optional[type[TBookWyrmModel]] = None,
|
||||
instance: Optional[TBookWyrmModel] = None,
|
||||
allow_create: bool = True,
|
||||
save: bool = True,
|
||||
overwrite: bool = True,
|
||||
allow_external_connections: bool = True,
|
||||
) -> Optional[TBookWyrmModel]:
|
||||
"""convert from an activity to a model instance. Args:
|
||||
model: the django model that this object is being converted to
|
||||
(will guess if not known)
|
||||
|
@ -296,14 +305,40 @@ def get_model_from_type(activity_type):
|
|||
|
||||
|
||||
# pylint: disable=too-many-arguments
|
||||
@overload
|
||||
def resolve_remote_id(
|
||||
remote_id,
|
||||
model=None,
|
||||
refresh=False,
|
||||
save=True,
|
||||
get_activity=False,
|
||||
allow_external_connections=True,
|
||||
):
|
||||
remote_id: str,
|
||||
model: type[TBookWyrmModel],
|
||||
refresh: bool = False,
|
||||
save: bool = True,
|
||||
get_activity: bool = False,
|
||||
allow_external_connections: bool = True,
|
||||
) -> TBookWyrmModel:
|
||||
...
|
||||
|
||||
|
||||
# pylint: disable=too-many-arguments
|
||||
@overload
|
||||
def resolve_remote_id(
|
||||
remote_id: str,
|
||||
model: Optional[str] = None,
|
||||
refresh: bool = False,
|
||||
save: bool = True,
|
||||
get_activity: bool = False,
|
||||
allow_external_connections: bool = True,
|
||||
) -> base_model.BookWyrmModel:
|
||||
...
|
||||
|
||||
|
||||
# pylint: disable=too-many-arguments
|
||||
def resolve_remote_id(
|
||||
remote_id: str,
|
||||
model: Optional[Union[str, type[base_model.BookWyrmModel]]] = None,
|
||||
refresh: bool = False,
|
||||
save: bool = True,
|
||||
get_activity: bool = False,
|
||||
allow_external_connections: bool = True,
|
||||
) -> base_model.BookWyrmModel:
|
||||
"""take a remote_id and return an instance, creating if necessary. Args:
|
||||
remote_id: the unique url for looking up the object in the db or by http
|
||||
model: a string or object representing the model that corresponds to the object
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
""" book and author data """
|
||||
from dataclasses import dataclass, field
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
|
||||
from .base_activity import ActivityObject
|
||||
from .image import Document
|
||||
|
@ -11,19 +11,19 @@ from .image import Document
|
|||
class BookData(ActivityObject):
|
||||
"""shared fields for all book data and authors"""
|
||||
|
||||
openlibraryKey: str = None
|
||||
inventaireId: str = None
|
||||
librarythingKey: str = None
|
||||
goodreadsKey: str = None
|
||||
bnfId: str = None
|
||||
viaf: str = None
|
||||
wikidata: str = None
|
||||
asin: str = None
|
||||
aasin: str = None
|
||||
isfdb: str = None
|
||||
lastEditedBy: str = None
|
||||
links: List[str] = field(default_factory=lambda: [])
|
||||
fileLinks: List[str] = field(default_factory=lambda: [])
|
||||
openlibraryKey: Optional[str] = None
|
||||
inventaireId: Optional[str] = None
|
||||
librarythingKey: Optional[str] = None
|
||||
goodreadsKey: Optional[str] = None
|
||||
bnfId: Optional[str] = None
|
||||
viaf: Optional[str] = None
|
||||
wikidata: Optional[str] = None
|
||||
asin: Optional[str] = None
|
||||
aasin: Optional[str] = None
|
||||
isfdb: Optional[str] = None
|
||||
lastEditedBy: Optional[str] = None
|
||||
links: list[str] = field(default_factory=list)
|
||||
fileLinks: list[str] = field(default_factory=list)
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
@ -35,17 +35,17 @@ class Book(BookData):
|
|||
sortTitle: str = None
|
||||
subtitle: str = None
|
||||
description: str = ""
|
||||
languages: List[str] = field(default_factory=lambda: [])
|
||||
languages: list[str] = field(default_factory=list)
|
||||
series: str = ""
|
||||
seriesNumber: str = ""
|
||||
subjects: List[str] = field(default_factory=lambda: [])
|
||||
subjectPlaces: List[str] = field(default_factory=lambda: [])
|
||||
subjects: list[str] = field(default_factory=list)
|
||||
subjectPlaces: list[str] = field(default_factory=list)
|
||||
|
||||
authors: List[str] = field(default_factory=lambda: [])
|
||||
authors: list[str] = field(default_factory=list)
|
||||
firstPublishedDate: str = ""
|
||||
publishedDate: str = ""
|
||||
|
||||
cover: Document = None
|
||||
cover: Optional[Document] = None
|
||||
type: str = "Book"
|
||||
|
||||
|
||||
|
@ -58,10 +58,10 @@ class Edition(Book):
|
|||
isbn10: str = ""
|
||||
isbn13: str = ""
|
||||
oclcNumber: str = ""
|
||||
pages: int = None
|
||||
pages: Optional[int] = None
|
||||
physicalFormat: str = ""
|
||||
physicalFormatDetail: str = ""
|
||||
publishers: List[str] = field(default_factory=lambda: [])
|
||||
publishers: list[str] = field(default_factory=list)
|
||||
editionRank: int = 0
|
||||
|
||||
type: str = "Edition"
|
||||
|
@ -73,7 +73,7 @@ class Work(Book):
|
|||
"""work instance of a book object"""
|
||||
|
||||
lccn: str = ""
|
||||
editions: List[str] = field(default_factory=lambda: [])
|
||||
editions: list[str] = field(default_factory=list)
|
||||
type: str = "Work"
|
||||
|
||||
|
||||
|
@ -83,12 +83,12 @@ class Author(BookData):
|
|||
"""author of a book"""
|
||||
|
||||
name: str
|
||||
isni: str = None
|
||||
viafId: str = None
|
||||
gutenbergId: str = None
|
||||
born: str = None
|
||||
died: str = None
|
||||
aliases: List[str] = field(default_factory=lambda: [])
|
||||
isni: Optional[str] = None
|
||||
viafId: Optional[str] = None
|
||||
gutenbergId: Optional[str] = None
|
||||
born: Optional[str] = None
|
||||
died: Optional[str] = None
|
||||
aliases: list[str] = field(default_factory=list)
|
||||
bio: str = ""
|
||||
wikipediaLink: str = ""
|
||||
type: str = "Author"
|
||||
|
|
|
@ -329,10 +329,9 @@ def add_status_on_create(sender, instance, created, *args, **kwargs):
|
|||
remove_status_task.delay(instance.id)
|
||||
return
|
||||
|
||||
# To avoid creating a zillion unnecessary tasks caused by re-saving the model,
|
||||
# check if it's actually ready to send before we go. We're trusting this was
|
||||
# set correctly by the inbox or view
|
||||
if not instance.ready:
|
||||
# We don't want to create multiple add_status_tasks for each status, and because
|
||||
# the transactions are atomic, on_commit won't run until the status is ready to add.
|
||||
if not created:
|
||||
return
|
||||
|
||||
# when creating new things, gotta wait on the transaction
|
||||
|
@ -343,6 +342,10 @@ def add_status_on_create(sender, instance, created, *args, **kwargs):
|
|||
|
||||
def add_status_on_create_command(sender, instance, created):
|
||||
"""runs this code only after the database commit completes"""
|
||||
# boosts trigger 'saves" twice, so don't bother duplicating the task
|
||||
if sender == models.Boost and not created:
|
||||
return
|
||||
|
||||
priority = STREAMS
|
||||
# check if this is an old status, de-prioritize if so
|
||||
# (this will happen if federation is very slow, or, more expectedly, on csv import)
|
||||
|
|
|
@ -1,22 +1,53 @@
|
|||
""" using a bookwyrm instance as a source of book data """
|
||||
from __future__ import annotations
|
||||
from dataclasses import asdict, dataclass
|
||||
from functools import reduce
|
||||
import operator
|
||||
from typing import Optional, Union, Any, Literal, overload
|
||||
|
||||
from django.contrib.postgres.search import SearchRank, SearchQuery
|
||||
from django.db.models import F, Q
|
||||
from django.db.models.query import QuerySet
|
||||
|
||||
from bookwyrm import models
|
||||
from bookwyrm import connectors
|
||||
from bookwyrm.settings import MEDIA_FULL_URL
|
||||
|
||||
|
||||
@overload
|
||||
def search(
|
||||
query: str,
|
||||
*,
|
||||
min_confidence: float = 0,
|
||||
filters: Optional[list[Any]] = None,
|
||||
return_first: Literal[False],
|
||||
) -> QuerySet[models.Edition]:
|
||||
...
|
||||
|
||||
|
||||
@overload
|
||||
def search(
|
||||
query: str,
|
||||
*,
|
||||
min_confidence: float = 0,
|
||||
filters: Optional[list[Any]] = None,
|
||||
return_first: Literal[True],
|
||||
) -> Optional[models.Edition]:
|
||||
...
|
||||
|
||||
|
||||
# pylint: disable=arguments-differ
|
||||
def search(query, min_confidence=0, filters=None, return_first=False):
|
||||
def search(
|
||||
query: str,
|
||||
*,
|
||||
min_confidence: float = 0,
|
||||
filters: Optional[list[Any]] = None,
|
||||
return_first: bool = False,
|
||||
) -> Union[Optional[models.Edition], QuerySet[models.Edition]]:
|
||||
"""search your local database"""
|
||||
filters = filters or []
|
||||
if not query:
|
||||
return []
|
||||
return None if return_first else []
|
||||
query = query.strip()
|
||||
|
||||
results = None
|
||||
|
@ -66,7 +97,9 @@ def format_search_result(search_result):
|
|||
).json()
|
||||
|
||||
|
||||
def search_identifiers(query, *filters, return_first=False):
|
||||
def search_identifiers(
|
||||
query, *filters, return_first=False
|
||||
) -> Union[Optional[models.Edition], QuerySet[models.Edition]]:
|
||||
"""tries remote_id, isbn; defined as dedupe fields on the model"""
|
||||
if connectors.maybe_isbn(query):
|
||||
# Oh did you think the 'S' in ISBN stood for 'standard'?
|
||||
|
@ -87,7 +120,9 @@ def search_identifiers(query, *filters, return_first=False):
|
|||
return results
|
||||
|
||||
|
||||
def search_title_author(query, min_confidence, *filters, return_first=False):
|
||||
def search_title_author(
|
||||
query, min_confidence, *filters, return_first=False
|
||||
) -> QuerySet[models.Edition]:
|
||||
"""searches for title and author"""
|
||||
query = SearchQuery(query, config="simple") | SearchQuery(query, config="english")
|
||||
results = (
|
||||
|
@ -122,11 +157,11 @@ class SearchResult:
|
|||
title: str
|
||||
key: str
|
||||
connector: object
|
||||
view_link: str = None
|
||||
author: str = None
|
||||
year: str = None
|
||||
cover: str = None
|
||||
confidence: int = 1
|
||||
view_link: Optional[str] = None
|
||||
author: Optional[str] = None
|
||||
year: Optional[str] = None
|
||||
cover: Optional[str] = None
|
||||
confidence: float = 1.0
|
||||
|
||||
def __repr__(self):
|
||||
# pylint: disable=consider-using-f-string
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
""" functionality outline for a book data connector """
|
||||
from __future__ import annotations
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Optional, TypedDict, Any, Callable, Union, Iterator
|
||||
from urllib.parse import quote_plus
|
||||
import imghdr
|
||||
import logging
|
||||
|
@ -16,33 +18,38 @@ from bookwyrm import activitypub, models, settings
|
|||
from bookwyrm.settings import USER_AGENT
|
||||
from .connector_manager import load_more_data, ConnectorException, raise_not_valid_url
|
||||
from .format_mappings import format_mappings
|
||||
|
||||
from ..book_search import SearchResult
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
JsonDict = dict[str, Any]
|
||||
|
||||
|
||||
class ConnectorResults(TypedDict):
|
||||
"""TypedDict for results returned by connector"""
|
||||
|
||||
connector: AbstractMinimalConnector
|
||||
results: list[SearchResult]
|
||||
|
||||
|
||||
class AbstractMinimalConnector(ABC):
|
||||
"""just the bare bones, for other bookwyrm instances"""
|
||||
|
||||
def __init__(self, identifier):
|
||||
def __init__(self, identifier: str):
|
||||
# load connector settings
|
||||
info = models.Connector.objects.get(identifier=identifier)
|
||||
self.connector = info
|
||||
|
||||
# the things in the connector model to copy over
|
||||
self_fields = [
|
||||
"base_url",
|
||||
"books_url",
|
||||
"covers_url",
|
||||
"search_url",
|
||||
"isbn_search_url",
|
||||
"name",
|
||||
"identifier",
|
||||
]
|
||||
for field in self_fields:
|
||||
setattr(self, field, getattr(info, field))
|
||||
self.base_url = info.base_url
|
||||
self.books_url = info.books_url
|
||||
self.covers_url = info.covers_url
|
||||
self.search_url = info.search_url
|
||||
self.isbn_search_url = info.isbn_search_url
|
||||
self.name = info.name
|
||||
self.identifier = info.identifier
|
||||
|
||||
def get_search_url(self, query):
|
||||
def get_search_url(self, query: str) -> str:
|
||||
"""format the query url"""
|
||||
# Check if the query resembles an ISBN
|
||||
if maybe_isbn(query) and self.isbn_search_url and self.isbn_search_url != "":
|
||||
|
@ -54,13 +61,21 @@ class AbstractMinimalConnector(ABC):
|
|||
# searched as free text. This, instead, only searches isbn if it's isbn-y
|
||||
return f"{self.search_url}{quote_plus(query)}"
|
||||
|
||||
def process_search_response(self, query, data, min_confidence):
|
||||
def process_search_response(
|
||||
self, query: str, data: Any, min_confidence: float
|
||||
) -> list[SearchResult]:
|
||||
"""Format the search results based on the format of the query"""
|
||||
if maybe_isbn(query):
|
||||
return list(self.parse_isbn_search_data(data))[:10]
|
||||
return list(self.parse_search_data(data, min_confidence))[:10]
|
||||
|
||||
async def get_results(self, session, url, min_confidence, query):
|
||||
async def get_results(
|
||||
self,
|
||||
session: aiohttp.ClientSession,
|
||||
url: str,
|
||||
min_confidence: float,
|
||||
query: str,
|
||||
) -> Optional[ConnectorResults]:
|
||||
"""try this specific connector"""
|
||||
# pylint: disable=line-too-long
|
||||
headers = {
|
||||
|
@ -74,55 +89,63 @@ class AbstractMinimalConnector(ABC):
|
|||
async with session.get(url, headers=headers, params=params) as response:
|
||||
if not response.ok:
|
||||
logger.info("Unable to connect to %s: %s", url, response.reason)
|
||||
return
|
||||
return None
|
||||
|
||||
try:
|
||||
raw_data = await response.json()
|
||||
except aiohttp.client_exceptions.ContentTypeError as err:
|
||||
logger.exception(err)
|
||||
return
|
||||
return None
|
||||
|
||||
return {
|
||||
"connector": self,
|
||||
"results": self.process_search_response(
|
||||
return ConnectorResults(
|
||||
connector=self,
|
||||
results=self.process_search_response(
|
||||
query, raw_data, min_confidence
|
||||
),
|
||||
}
|
||||
)
|
||||
except asyncio.TimeoutError:
|
||||
logger.info("Connection timed out for url: %s", url)
|
||||
except aiohttp.ClientError as err:
|
||||
logger.info(err)
|
||||
return None
|
||||
|
||||
@abstractmethod
|
||||
def get_or_create_book(self, remote_id):
|
||||
def get_or_create_book(self, remote_id: str) -> Optional[models.Book]:
|
||||
"""pull up a book record by whatever means possible"""
|
||||
|
||||
@abstractmethod
|
||||
def parse_search_data(self, data, min_confidence):
|
||||
def parse_search_data(
|
||||
self, data: Any, min_confidence: float
|
||||
) -> Iterator[SearchResult]:
|
||||
"""turn the result json from a search into a list"""
|
||||
|
||||
@abstractmethod
|
||||
def parse_isbn_search_data(self, data):
|
||||
def parse_isbn_search_data(self, data: Any) -> Iterator[SearchResult]:
|
||||
"""turn the result json from a search into a list"""
|
||||
|
||||
|
||||
class AbstractConnector(AbstractMinimalConnector):
|
||||
"""generic book data connector"""
|
||||
|
||||
def __init__(self, identifier):
|
||||
generated_remote_link_field = ""
|
||||
|
||||
def __init__(self, identifier: str):
|
||||
super().__init__(identifier)
|
||||
# fields we want to look for in book data to copy over
|
||||
# title we handle separately.
|
||||
self.book_mappings = []
|
||||
self.book_mappings: list[Mapping] = []
|
||||
self.author_mappings: list[Mapping] = []
|
||||
|
||||
def get_or_create_book(self, remote_id):
|
||||
def get_or_create_book(self, remote_id: str) -> Optional[models.Book]:
|
||||
"""translate arbitrary json into an Activitypub dataclass"""
|
||||
# first, check if we have the origin_id saved
|
||||
existing = models.Edition.find_existing_by_remote_id(
|
||||
remote_id
|
||||
) or models.Work.find_existing_by_remote_id(remote_id)
|
||||
if existing:
|
||||
if hasattr(existing, "default_edition"):
|
||||
if hasattr(existing, "default_edition") and isinstance(
|
||||
existing.default_edition, models.Edition
|
||||
):
|
||||
return existing.default_edition
|
||||
return existing
|
||||
|
||||
|
@ -154,6 +177,9 @@ class AbstractConnector(AbstractMinimalConnector):
|
|||
)
|
||||
# this will dedupe automatically
|
||||
work = work_activity.to_model(model=models.Work, overwrite=False)
|
||||
if not work:
|
||||
return None
|
||||
|
||||
for author in self.get_authors_from_data(work_data):
|
||||
work.authors.add(author)
|
||||
|
||||
|
@ -161,12 +187,21 @@ class AbstractConnector(AbstractMinimalConnector):
|
|||
load_more_data.delay(self.connector.id, work.id)
|
||||
return edition
|
||||
|
||||
def get_book_data(self, remote_id): # pylint: disable=no-self-use
|
||||
def get_book_data(self, remote_id: str) -> JsonDict: # pylint: disable=no-self-use
|
||||
"""this allows connectors to override the default behavior"""
|
||||
return get_data(remote_id)
|
||||
|
||||
def create_edition_from_data(self, work, edition_data, instance=None):
|
||||
def create_edition_from_data(
|
||||
self,
|
||||
work: models.Work,
|
||||
edition_data: Union[str, JsonDict],
|
||||
instance: Optional[models.Edition] = None,
|
||||
) -> Optional[models.Edition]:
|
||||
"""if we already have the work, we're ready"""
|
||||
if isinstance(edition_data, str):
|
||||
# We don't expect a string here
|
||||
return None
|
||||
|
||||
mapped_data = dict_from_mappings(edition_data, self.book_mappings)
|
||||
mapped_data["work"] = work.remote_id
|
||||
edition_activity = activitypub.Edition(**mapped_data)
|
||||
|
@ -174,6 +209,9 @@ class AbstractConnector(AbstractMinimalConnector):
|
|||
model=models.Edition, overwrite=False, instance=instance
|
||||
)
|
||||
|
||||
if not edition:
|
||||
return None
|
||||
|
||||
# if we're updating an existing instance, we don't need to load authors
|
||||
if instance:
|
||||
return edition
|
||||
|
@ -190,7 +228,9 @@ class AbstractConnector(AbstractMinimalConnector):
|
|||
|
||||
return edition
|
||||
|
||||
def get_or_create_author(self, remote_id, instance=None):
|
||||
def get_or_create_author(
|
||||
self, remote_id: str, instance: Optional[models.Author] = None
|
||||
) -> Optional[models.Author]:
|
||||
"""load that author"""
|
||||
if not instance:
|
||||
existing = models.Author.find_existing_by_remote_id(remote_id)
|
||||
|
@ -210,46 +250,51 @@ class AbstractConnector(AbstractMinimalConnector):
|
|||
model=models.Author, overwrite=False, instance=instance
|
||||
)
|
||||
|
||||
def get_remote_id_from_model(self, obj):
|
||||
def get_remote_id_from_model(self, obj: models.BookDataModel) -> Optional[str]:
|
||||
"""given the data stored, how can we look this up"""
|
||||
return getattr(obj, getattr(self, "generated_remote_link_field"))
|
||||
remote_id: Optional[str] = getattr(obj, self.generated_remote_link_field)
|
||||
return remote_id
|
||||
|
||||
def update_author_from_remote(self, obj):
|
||||
def update_author_from_remote(self, obj: models.Author) -> Optional[models.Author]:
|
||||
"""load the remote data from this connector and add it to an existing author"""
|
||||
remote_id = self.get_remote_id_from_model(obj)
|
||||
if not remote_id:
|
||||
return None
|
||||
return self.get_or_create_author(remote_id, instance=obj)
|
||||
|
||||
def update_book_from_remote(self, obj):
|
||||
def update_book_from_remote(self, obj: models.Edition) -> Optional[models.Edition]:
|
||||
"""load the remote data from this connector and add it to an existing book"""
|
||||
remote_id = self.get_remote_id_from_model(obj)
|
||||
if not remote_id:
|
||||
return None
|
||||
data = self.get_book_data(remote_id)
|
||||
return self.create_edition_from_data(obj.parent_work, data, instance=obj)
|
||||
|
||||
@abstractmethod
|
||||
def is_work_data(self, data):
|
||||
def is_work_data(self, data: JsonDict) -> bool:
|
||||
"""differentiate works and editions"""
|
||||
|
||||
@abstractmethod
|
||||
def get_edition_from_work_data(self, data):
|
||||
def get_edition_from_work_data(self, data: JsonDict) -> JsonDict:
|
||||
"""every work needs at least one edition"""
|
||||
|
||||
@abstractmethod
|
||||
def get_work_from_edition_data(self, data):
|
||||
def get_work_from_edition_data(self, data: JsonDict) -> JsonDict:
|
||||
"""every edition needs a work"""
|
||||
|
||||
@abstractmethod
|
||||
def get_authors_from_data(self, data):
|
||||
def get_authors_from_data(self, data: JsonDict) -> Iterator[models.Author]:
|
||||
"""load author data"""
|
||||
|
||||
@abstractmethod
|
||||
def expand_book_data(self, book):
|
||||
def expand_book_data(self, book: models.Book) -> None:
|
||||
"""get more info on a book"""
|
||||
|
||||
|
||||
def dict_from_mappings(data, mappings):
|
||||
def dict_from_mappings(data: JsonDict, mappings: list[Mapping]) -> JsonDict:
|
||||
"""create a dict in Activitypub format, using mappings supplies by
|
||||
the subclass"""
|
||||
result = {}
|
||||
result: JsonDict = {}
|
||||
for mapping in mappings:
|
||||
# sometimes there are multiple mappings for one field, don't
|
||||
# overwrite earlier writes in that case
|
||||
|
@ -259,7 +304,11 @@ def dict_from_mappings(data, mappings):
|
|||
return result
|
||||
|
||||
|
||||
def get_data(url, params=None, timeout=settings.QUERY_TIMEOUT):
|
||||
def get_data(
|
||||
url: str,
|
||||
params: Optional[dict[str, str]] = None,
|
||||
timeout: int = settings.QUERY_TIMEOUT,
|
||||
) -> JsonDict:
|
||||
"""wrapper for request.get"""
|
||||
# check if the url is blocked
|
||||
raise_not_valid_url(url)
|
||||
|
@ -292,10 +341,15 @@ def get_data(url, params=None, timeout=settings.QUERY_TIMEOUT):
|
|||
logger.info(err)
|
||||
raise ConnectorException(err)
|
||||
|
||||
if not isinstance(data, dict):
|
||||
raise ConnectorException("Unexpected data format")
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def get_image(url, timeout=10):
|
||||
def get_image(
|
||||
url: str, timeout: int = 10
|
||||
) -> Union[tuple[ContentFile[bytes], str], tuple[None, None]]:
|
||||
"""wrapper for requesting an image"""
|
||||
raise_not_valid_url(url)
|
||||
try:
|
||||
|
@ -325,14 +379,19 @@ def get_image(url, timeout=10):
|
|||
class Mapping:
|
||||
"""associate a local database field with a field in an external dataset"""
|
||||
|
||||
def __init__(self, local_field, remote_field=None, formatter=None):
|
||||
def __init__(
|
||||
self,
|
||||
local_field: str,
|
||||
remote_field: Optional[str] = None,
|
||||
formatter: Optional[Callable[[Any], Any]] = None,
|
||||
):
|
||||
noop = lambda x: x
|
||||
|
||||
self.local_field = local_field
|
||||
self.remote_field = remote_field or local_field
|
||||
self.formatter = formatter or noop
|
||||
|
||||
def get_value(self, data):
|
||||
def get_value(self, data: JsonDict) -> Optional[Any]:
|
||||
"""pull a field from incoming json and return the formatted version"""
|
||||
value = data.get(self.remote_field)
|
||||
if not value:
|
||||
|
@ -343,7 +402,7 @@ class Mapping:
|
|||
return None
|
||||
|
||||
|
||||
def infer_physical_format(format_text):
|
||||
def infer_physical_format(format_text: str) -> Optional[str]:
|
||||
"""try to figure out what the standardized format is from the free value"""
|
||||
format_text = format_text.lower()
|
||||
if format_text in format_mappings:
|
||||
|
@ -356,7 +415,7 @@ def infer_physical_format(format_text):
|
|||
return matches[0]
|
||||
|
||||
|
||||
def unique_physical_format(format_text):
|
||||
def unique_physical_format(format_text: str) -> Optional[str]:
|
||||
"""only store the format if it isn't directly in the format mappings"""
|
||||
format_text = format_text.lower()
|
||||
if format_text in format_mappings:
|
||||
|
@ -365,7 +424,7 @@ def unique_physical_format(format_text):
|
|||
return format_text
|
||||
|
||||
|
||||
def maybe_isbn(query):
|
||||
def maybe_isbn(query: str) -> bool:
|
||||
"""check if a query looks like an isbn"""
|
||||
isbn = re.sub(r"[\W_]", "", query) # removes filler characters
|
||||
# ISBNs must be numeric except an ISBN10 checkdigit can be 'X'
|
||||
|
|
|
@ -1,4 +1,7 @@
|
|||
""" using another bookwyrm instance as a source of book data """
|
||||
from __future__ import annotations
|
||||
from typing import Any, Iterator
|
||||
|
||||
from bookwyrm import activitypub, models
|
||||
from bookwyrm.book_search import SearchResult
|
||||
from .abstract_connector import AbstractMinimalConnector
|
||||
|
@ -7,15 +10,19 @@ from .abstract_connector import AbstractMinimalConnector
|
|||
class Connector(AbstractMinimalConnector):
|
||||
"""this is basically just for search"""
|
||||
|
||||
def get_or_create_book(self, remote_id):
|
||||
def get_or_create_book(self, remote_id: str) -> models.Edition:
|
||||
return activitypub.resolve_remote_id(remote_id, model=models.Edition)
|
||||
|
||||
def parse_search_data(self, data, min_confidence):
|
||||
def parse_search_data(
|
||||
self, data: list[dict[str, Any]], min_confidence: float
|
||||
) -> Iterator[SearchResult]:
|
||||
for search_result in data:
|
||||
search_result["connector"] = self
|
||||
yield SearchResult(**search_result)
|
||||
|
||||
def parse_isbn_search_data(self, data):
|
||||
def parse_isbn_search_data(
|
||||
self, data: list[dict[str, Any]]
|
||||
) -> Iterator[SearchResult]:
|
||||
for search_result in data:
|
||||
search_result["connector"] = self
|
||||
yield SearchResult(**search_result)
|
||||
|
|
|
@ -1,8 +1,11 @@
|
|||
""" interface with whatever connectors the app has """
|
||||
from __future__ import annotations
|
||||
import asyncio
|
||||
import importlib
|
||||
import ipaddress
|
||||
import logging
|
||||
from asyncio import Future
|
||||
from typing import Iterator, Any, Optional, Union, overload, Literal
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import aiohttp
|
||||
|
@ -12,6 +15,8 @@ from django.db.models import signals
|
|||
from requests import HTTPError
|
||||
|
||||
from bookwyrm import book_search, models
|
||||
from bookwyrm.book_search import SearchResult
|
||||
from bookwyrm.connectors import abstract_connector
|
||||
from bookwyrm.settings import SEARCH_TIMEOUT
|
||||
from bookwyrm.tasks import app, CONNECTORS
|
||||
|
||||
|
@ -22,11 +27,15 @@ class ConnectorException(HTTPError):
|
|||
"""when the connector can't do what was asked"""
|
||||
|
||||
|
||||
async def async_connector_search(query, items, min_confidence):
|
||||
async def async_connector_search(
|
||||
query: str,
|
||||
items: list[tuple[str, abstract_connector.AbstractConnector]],
|
||||
min_confidence: float,
|
||||
) -> list[Optional[abstract_connector.ConnectorResults]]:
|
||||
"""Try a number of requests simultaneously"""
|
||||
timeout = aiohttp.ClientTimeout(total=SEARCH_TIMEOUT)
|
||||
async with aiohttp.ClientSession(timeout=timeout) as session:
|
||||
tasks = []
|
||||
tasks: list[Future[Optional[abstract_connector.ConnectorResults]]] = []
|
||||
for url, connector in items:
|
||||
tasks.append(
|
||||
asyncio.ensure_future(
|
||||
|
@ -35,14 +44,29 @@ async def async_connector_search(query, items, min_confidence):
|
|||
)
|
||||
|
||||
results = await asyncio.gather(*tasks)
|
||||
return results
|
||||
return list(results)
|
||||
|
||||
|
||||
def search(query, min_confidence=0.1, return_first=False):
|
||||
@overload
|
||||
def search(
|
||||
query: str, *, min_confidence: float = 0.1, return_first: Literal[False]
|
||||
) -> list[abstract_connector.ConnectorResults]:
|
||||
...
|
||||
|
||||
|
||||
@overload
|
||||
def search(
|
||||
query: str, *, min_confidence: float = 0.1, return_first: Literal[True]
|
||||
) -> Optional[SearchResult]:
|
||||
...
|
||||
|
||||
|
||||
def search(
|
||||
query: str, *, min_confidence: float = 0.1, return_first: bool = False
|
||||
) -> Union[list[abstract_connector.ConnectorResults], Optional[SearchResult]]:
|
||||
"""find books based on arbitrary keywords"""
|
||||
if not query:
|
||||
return []
|
||||
results = []
|
||||
return None if return_first else []
|
||||
|
||||
items = []
|
||||
for connector in get_connectors():
|
||||
|
@ -57,8 +81,12 @@ def search(query, min_confidence=0.1, return_first=False):
|
|||
items.append((url, connector))
|
||||
|
||||
# load as many results as we can
|
||||
results = asyncio.run(async_connector_search(query, items, min_confidence))
|
||||
results = [r for r in results if r]
|
||||
# failed requests will return None, so filter those out
|
||||
results = [
|
||||
r
|
||||
for r in asyncio.run(async_connector_search(query, items, min_confidence))
|
||||
if r
|
||||
]
|
||||
|
||||
if return_first:
|
||||
# find the best result from all the responses and return that
|
||||
|
@ -66,11 +94,12 @@ def search(query, min_confidence=0.1, return_first=False):
|
|||
all_results = sorted(all_results, key=lambda r: r.confidence, reverse=True)
|
||||
return all_results[0] if all_results else None
|
||||
|
||||
# failed requests will return None, so filter those out
|
||||
return results
|
||||
|
||||
|
||||
def first_search_result(query, min_confidence=0.1):
|
||||
def first_search_result(
|
||||
query: str, min_confidence: float = 0.1
|
||||
) -> Union[models.Edition, SearchResult, None]:
|
||||
"""search until you find a result that fits"""
|
||||
# try local search first
|
||||
result = book_search.search(query, min_confidence=min_confidence, return_first=True)
|
||||
|
@ -80,13 +109,13 @@ def first_search_result(query, min_confidence=0.1):
|
|||
return search(query, min_confidence=min_confidence, return_first=True) or None
|
||||
|
||||
|
||||
def get_connectors():
|
||||
def get_connectors() -> Iterator[abstract_connector.AbstractConnector]:
|
||||
"""load all connectors"""
|
||||
for info in models.Connector.objects.filter(active=True).order_by("priority").all():
|
||||
yield load_connector(info)
|
||||
|
||||
|
||||
def get_or_create_connector(remote_id):
|
||||
def get_or_create_connector(remote_id: str) -> abstract_connector.AbstractConnector:
|
||||
"""get the connector related to the object's server"""
|
||||
url = urlparse(remote_id)
|
||||
identifier = url.netloc
|
||||
|
@ -110,7 +139,7 @@ def get_or_create_connector(remote_id):
|
|||
|
||||
|
||||
@app.task(queue=CONNECTORS)
|
||||
def load_more_data(connector_id, book_id):
|
||||
def load_more_data(connector_id: str, book_id: str) -> None:
|
||||
"""background the work of getting all 10,000 editions of LoTR"""
|
||||
connector_info = models.Connector.objects.get(id=connector_id)
|
||||
connector = load_connector(connector_info)
|
||||
|
@ -119,7 +148,9 @@ def load_more_data(connector_id, book_id):
|
|||
|
||||
|
||||
@app.task(queue=CONNECTORS)
|
||||
def create_edition_task(connector_id, work_id, data):
|
||||
def create_edition_task(
|
||||
connector_id: int, work_id: int, data: Union[str, abstract_connector.JsonDict]
|
||||
) -> None:
|
||||
"""separate task for each of the 10,000 editions of LoTR"""
|
||||
connector_info = models.Connector.objects.get(id=connector_id)
|
||||
connector = load_connector(connector_info)
|
||||
|
@ -127,23 +158,31 @@ def create_edition_task(connector_id, work_id, data):
|
|||
connector.create_edition_from_data(work, data)
|
||||
|
||||
|
||||
def load_connector(connector_info):
|
||||
def load_connector(
|
||||
connector_info: models.Connector,
|
||||
) -> abstract_connector.AbstractConnector:
|
||||
"""instantiate the connector class"""
|
||||
connector = importlib.import_module(
|
||||
f"bookwyrm.connectors.{connector_info.connector_file}"
|
||||
)
|
||||
return connector.Connector(connector_info.identifier)
|
||||
return connector.Connector(connector_info.identifier) # type: ignore[no-any-return]
|
||||
|
||||
|
||||
@receiver(signals.post_save, sender="bookwyrm.FederatedServer")
|
||||
# pylint: disable=unused-argument
|
||||
def create_connector(sender, instance, created, *args, **kwargs):
|
||||
def create_connector(
|
||||
sender: Any,
|
||||
instance: models.FederatedServer,
|
||||
created: Any,
|
||||
*args: Any,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""create a connector to an external bookwyrm server"""
|
||||
if instance.application_type == "bookwyrm":
|
||||
get_or_create_connector(f"https://{instance.server_name}")
|
||||
|
||||
|
||||
def raise_not_valid_url(url):
|
||||
def raise_not_valid_url(url: str) -> None:
|
||||
"""do some basic reality checks on the url"""
|
||||
parsed = urlparse(url)
|
||||
if not parsed.scheme in ["http", "https"]:
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
""" inventaire data connector """
|
||||
import re
|
||||
from typing import Any, Union, Optional, Iterator, Iterable
|
||||
|
||||
from bookwyrm import models
|
||||
from bookwyrm.book_search import SearchResult
|
||||
from .abstract_connector import AbstractConnector, Mapping
|
||||
from .abstract_connector import AbstractConnector, Mapping, JsonDict
|
||||
from .abstract_connector import get_data
|
||||
from .connector_manager import ConnectorException, create_edition_task
|
||||
|
||||
|
@ -13,7 +14,7 @@ class Connector(AbstractConnector):
|
|||
|
||||
generated_remote_link_field = "inventaire_id"
|
||||
|
||||
def __init__(self, identifier):
|
||||
def __init__(self, identifier: str):
|
||||
super().__init__(identifier)
|
||||
|
||||
get_first = lambda a: a[0]
|
||||
|
@ -60,13 +61,13 @@ class Connector(AbstractConnector):
|
|||
Mapping("died", remote_field="wdt:P570", formatter=get_first),
|
||||
] + shared_mappings
|
||||
|
||||
def get_remote_id(self, value):
|
||||
def get_remote_id(self, value: str) -> str:
|
||||
"""convert an id/uri into a url"""
|
||||
return f"{self.books_url}?action=by-uris&uris={value}"
|
||||
|
||||
def get_book_data(self, remote_id):
|
||||
def get_book_data(self, remote_id: str) -> JsonDict:
|
||||
data = get_data(remote_id)
|
||||
extracted = list(data.get("entities").values())
|
||||
extracted = list(data.get("entities", {}).values())
|
||||
try:
|
||||
data = extracted[0]
|
||||
except (KeyError, IndexError):
|
||||
|
@ -74,10 +75,16 @@ class Connector(AbstractConnector):
|
|||
# flatten the data so that images, uri, and claims are on the same level
|
||||
return {
|
||||
**data.get("claims", {}),
|
||||
**{k: data.get(k) for k in ["uri", "image", "labels", "sitelinks", "type"]},
|
||||
**{
|
||||
k: data.get(k)
|
||||
for k in ["uri", "image", "labels", "sitelinks", "type"]
|
||||
if k in data
|
||||
},
|
||||
}
|
||||
|
||||
def parse_search_data(self, data, min_confidence):
|
||||
def parse_search_data(
|
||||
self, data: JsonDict, min_confidence: float
|
||||
) -> Iterator[SearchResult]:
|
||||
for search_result in data.get("results", []):
|
||||
images = search_result.get("image")
|
||||
cover = f"{self.covers_url}/img/entities/{images[0]}" if images else None
|
||||
|
@ -96,7 +103,7 @@ class Connector(AbstractConnector):
|
|||
connector=self,
|
||||
)
|
||||
|
||||
def parse_isbn_search_data(self, data):
|
||||
def parse_isbn_search_data(self, data: JsonDict) -> Iterator[SearchResult]:
|
||||
"""got some data"""
|
||||
results = data.get("entities")
|
||||
if not results:
|
||||
|
@ -114,35 +121,44 @@ class Connector(AbstractConnector):
|
|||
connector=self,
|
||||
)
|
||||
|
||||
def is_work_data(self, data):
|
||||
def is_work_data(self, data: JsonDict) -> bool:
|
||||
return data.get("type") == "work"
|
||||
|
||||
def load_edition_data(self, work_uri):
|
||||
def load_edition_data(self, work_uri: str) -> JsonDict:
|
||||
"""get a list of editions for a work"""
|
||||
# pylint: disable=line-too-long
|
||||
url = f"{self.books_url}?action=reverse-claims&property=wdt:P629&value={work_uri}&sort=true"
|
||||
return get_data(url)
|
||||
|
||||
def get_edition_from_work_data(self, data):
|
||||
data = self.load_edition_data(data.get("uri"))
|
||||
def get_edition_from_work_data(self, data: JsonDict) -> JsonDict:
|
||||
work_uri = data.get("uri")
|
||||
if not work_uri:
|
||||
raise ConnectorException("Invalid URI")
|
||||
data = self.load_edition_data(work_uri)
|
||||
try:
|
||||
uri = data.get("uris", [])[0]
|
||||
except IndexError:
|
||||
raise ConnectorException("Invalid book data")
|
||||
return self.get_book_data(self.get_remote_id(uri))
|
||||
|
||||
def get_work_from_edition_data(self, data):
|
||||
uri = data.get("wdt:P629", [None])[0]
|
||||
def get_work_from_edition_data(self, data: JsonDict) -> JsonDict:
|
||||
try:
|
||||
uri = data.get("wdt:P629", [])[0]
|
||||
except IndexError:
|
||||
raise ConnectorException("Invalid book data")
|
||||
|
||||
if not uri:
|
||||
raise ConnectorException("Invalid book data")
|
||||
return self.get_book_data(self.get_remote_id(uri))
|
||||
|
||||
def get_authors_from_data(self, data):
|
||||
def get_authors_from_data(self, data: JsonDict) -> Iterator[models.Author]:
|
||||
authors = data.get("wdt:P50", [])
|
||||
for author in authors:
|
||||
yield self.get_or_create_author(self.get_remote_id(author))
|
||||
model = self.get_or_create_author(self.get_remote_id(author))
|
||||
if model:
|
||||
yield model
|
||||
|
||||
def expand_book_data(self, book):
|
||||
def expand_book_data(self, book: models.Book) -> None:
|
||||
work = book
|
||||
# go from the edition to the work, if necessary
|
||||
if isinstance(book, models.Edition):
|
||||
|
@ -154,11 +170,16 @@ class Connector(AbstractConnector):
|
|||
# who knows, man
|
||||
return
|
||||
|
||||
for edition_uri in edition_options.get("uris"):
|
||||
for edition_uri in edition_options.get("uris", []):
|
||||
remote_id = self.get_remote_id(edition_uri)
|
||||
create_edition_task.delay(self.connector.id, work.id, remote_id)
|
||||
|
||||
def create_edition_from_data(self, work, edition_data, instance=None):
|
||||
def create_edition_from_data(
|
||||
self,
|
||||
work: models.Work,
|
||||
edition_data: Union[str, JsonDict],
|
||||
instance: Optional[models.Edition] = None,
|
||||
) -> Optional[models.Edition]:
|
||||
"""pass in the url as data and then call the version in abstract connector"""
|
||||
if isinstance(edition_data, str):
|
||||
try:
|
||||
|
@ -168,22 +189,26 @@ class Connector(AbstractConnector):
|
|||
return None
|
||||
return super().create_edition_from_data(work, edition_data, instance=instance)
|
||||
|
||||
def get_cover_url(self, cover_blob, *_):
|
||||
def get_cover_url(
|
||||
self, cover_blob: Union[list[JsonDict], JsonDict], *_: Any
|
||||
) -> Optional[str]:
|
||||
"""format the relative cover url into an absolute one:
|
||||
{"url": "/img/entities/e794783f01b9d4f897a1ea9820b96e00d346994f"}
|
||||
"""
|
||||
# covers may or may not be a list
|
||||
if isinstance(cover_blob, list) and len(cover_blob) > 0:
|
||||
if isinstance(cover_blob, list):
|
||||
if len(cover_blob) == 0:
|
||||
return None
|
||||
cover_blob = cover_blob[0]
|
||||
cover_id = cover_blob.get("url")
|
||||
if not cover_id:
|
||||
if not isinstance(cover_id, str):
|
||||
return None
|
||||
# cover may or may not be an absolute url already
|
||||
if re.match(r"^http", cover_id):
|
||||
return cover_id
|
||||
return f"{self.covers_url}{cover_id}"
|
||||
|
||||
def resolve_keys(self, keys):
|
||||
def resolve_keys(self, keys: Iterable[str]) -> list[str]:
|
||||
"""cool, it's "wd:Q3156592" now what the heck does that mean"""
|
||||
results = []
|
||||
for uri in keys:
|
||||
|
@ -191,10 +216,10 @@ class Connector(AbstractConnector):
|
|||
data = self.get_book_data(self.get_remote_id(uri))
|
||||
except ConnectorException:
|
||||
continue
|
||||
results.append(get_language_code(data.get("labels")))
|
||||
results.append(get_language_code(data.get("labels", {})))
|
||||
return results
|
||||
|
||||
def get_description(self, links):
|
||||
def get_description(self, links: JsonDict) -> str:
|
||||
"""grab an extracted excerpt from wikipedia"""
|
||||
link = links.get("enwiki")
|
||||
if not link:
|
||||
|
@ -204,15 +229,15 @@ class Connector(AbstractConnector):
|
|||
data = get_data(url)
|
||||
except ConnectorException:
|
||||
return ""
|
||||
return data.get("extract")
|
||||
return data.get("extract", "")
|
||||
|
||||
def get_remote_id_from_model(self, obj):
|
||||
def get_remote_id_from_model(self, obj: models.BookDataModel) -> str:
|
||||
"""use get_remote_id to figure out the link from a model obj"""
|
||||
remote_id_value = obj.inventaire_id
|
||||
return self.get_remote_id(remote_id_value)
|
||||
|
||||
|
||||
def get_language_code(options, code="en"):
|
||||
def get_language_code(options: JsonDict, code: str = "en") -> Any:
|
||||
"""when there are a bunch of translation but we need a single field"""
|
||||
result = options.get(code)
|
||||
if result:
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
""" openlibrary data connector """
|
||||
import re
|
||||
from typing import Any, Optional, Union, Iterator, Iterable
|
||||
|
||||
from markdown import markdown
|
||||
|
||||
from bookwyrm import models
|
||||
from bookwyrm.book_search import SearchResult
|
||||
from .abstract_connector import AbstractConnector, Mapping
|
||||
from .abstract_connector import AbstractConnector, Mapping, JsonDict
|
||||
from .abstract_connector import get_data, infer_physical_format, unique_physical_format
|
||||
from .connector_manager import ConnectorException, create_edition_task
|
||||
from .openlibrary_languages import languages
|
||||
|
@ -16,7 +17,7 @@ class Connector(AbstractConnector):
|
|||
|
||||
generated_remote_link_field = "openlibrary_link"
|
||||
|
||||
def __init__(self, identifier):
|
||||
def __init__(self, identifier: str):
|
||||
super().__init__(identifier)
|
||||
|
||||
get_first = lambda a, *args: a[0]
|
||||
|
@ -96,14 +97,14 @@ class Connector(AbstractConnector):
|
|||
Mapping("inventaire_id", remote_field="links", formatter=get_inventaire_id),
|
||||
]
|
||||
|
||||
def get_book_data(self, remote_id):
|
||||
def get_book_data(self, remote_id: str) -> JsonDict:
|
||||
data = get_data(remote_id)
|
||||
if data.get("type", {}).get("key") == "/type/redirect":
|
||||
remote_id = self.base_url + data.get("location")
|
||||
remote_id = self.base_url + data.get("location", "")
|
||||
return get_data(remote_id)
|
||||
return data
|
||||
|
||||
def get_remote_id_from_data(self, data):
|
||||
def get_remote_id_from_data(self, data: JsonDict) -> str:
|
||||
"""format a url from an openlibrary id field"""
|
||||
try:
|
||||
key = data["key"]
|
||||
|
@ -111,10 +112,10 @@ class Connector(AbstractConnector):
|
|||
raise ConnectorException("Invalid book data")
|
||||
return f"{self.books_url}{key}"
|
||||
|
||||
def is_work_data(self, data):
|
||||
def is_work_data(self, data: JsonDict) -> bool:
|
||||
return bool(re.match(r"^[\/\w]+OL\d+W$", data["key"]))
|
||||
|
||||
def get_edition_from_work_data(self, data):
|
||||
def get_edition_from_work_data(self, data: JsonDict) -> JsonDict:
|
||||
try:
|
||||
key = data["key"]
|
||||
except KeyError:
|
||||
|
@ -126,7 +127,7 @@ class Connector(AbstractConnector):
|
|||
raise ConnectorException("No editions for work")
|
||||
return edition
|
||||
|
||||
def get_work_from_edition_data(self, data):
|
||||
def get_work_from_edition_data(self, data: JsonDict) -> JsonDict:
|
||||
try:
|
||||
key = data["works"][0]["key"]
|
||||
except (IndexError, KeyError):
|
||||
|
@ -134,7 +135,7 @@ class Connector(AbstractConnector):
|
|||
url = f"{self.books_url}{key}"
|
||||
return self.get_book_data(url)
|
||||
|
||||
def get_authors_from_data(self, data):
|
||||
def get_authors_from_data(self, data: JsonDict) -> Iterator[models.Author]:
|
||||
"""parse author json and load or create authors"""
|
||||
for author_blob in data.get("authors", []):
|
||||
author_blob = author_blob.get("author", author_blob)
|
||||
|
@ -146,7 +147,7 @@ class Connector(AbstractConnector):
|
|||
continue
|
||||
yield author
|
||||
|
||||
def get_cover_url(self, cover_blob, size="L"):
|
||||
def get_cover_url(self, cover_blob: list[str], size: str = "L") -> Optional[str]:
|
||||
"""ask openlibrary for the cover"""
|
||||
if not cover_blob:
|
||||
return None
|
||||
|
@ -154,8 +155,10 @@ class Connector(AbstractConnector):
|
|||
image_name = f"{cover_id}-{size}.jpg"
|
||||
return f"{self.covers_url}/b/id/{image_name}"
|
||||
|
||||
def parse_search_data(self, data, min_confidence):
|
||||
for idx, search_result in enumerate(data.get("docs")):
|
||||
def parse_search_data(
|
||||
self, data: JsonDict, min_confidence: float
|
||||
) -> Iterator[SearchResult]:
|
||||
for idx, search_result in enumerate(data.get("docs", [])):
|
||||
# build the remote id from the openlibrary key
|
||||
key = self.books_url + search_result["key"]
|
||||
author = search_result.get("author_name") or ["Unknown"]
|
||||
|
@ -176,7 +179,7 @@ class Connector(AbstractConnector):
|
|||
confidence=confidence,
|
||||
)
|
||||
|
||||
def parse_isbn_search_data(self, data):
|
||||
def parse_isbn_search_data(self, data: JsonDict) -> Iterator[SearchResult]:
|
||||
for search_result in list(data.values()):
|
||||
# build the remote id from the openlibrary key
|
||||
key = self.books_url + search_result["key"]
|
||||
|
@ -190,12 +193,12 @@ class Connector(AbstractConnector):
|
|||
year=search_result.get("publish_date"),
|
||||
)
|
||||
|
||||
def load_edition_data(self, olkey):
|
||||
def load_edition_data(self, olkey: str) -> JsonDict:
|
||||
"""query openlibrary for editions of a work"""
|
||||
url = f"{self.books_url}/works/{olkey}/editions"
|
||||
return self.get_book_data(url)
|
||||
|
||||
def expand_book_data(self, book):
|
||||
def expand_book_data(self, book: models.Book) -> None:
|
||||
work = book
|
||||
# go from the edition to the work, if necessary
|
||||
if isinstance(book, models.Edition):
|
||||
|
@ -208,14 +211,14 @@ class Connector(AbstractConnector):
|
|||
# who knows, man
|
||||
return
|
||||
|
||||
for edition_data in edition_options.get("entries"):
|
||||
for edition_data in edition_options.get("entries", []):
|
||||
# does this edition have ANY interesting data?
|
||||
if ignore_edition(edition_data):
|
||||
continue
|
||||
create_edition_task.delay(self.connector.id, work.id, edition_data)
|
||||
|
||||
|
||||
def ignore_edition(edition_data):
|
||||
def ignore_edition(edition_data: JsonDict) -> bool:
|
||||
"""don't load a million editions that have no metadata"""
|
||||
# an isbn, we love to see it
|
||||
if edition_data.get("isbn_13") or edition_data.get("isbn_10"):
|
||||
|
@ -234,7 +237,7 @@ def ignore_edition(edition_data):
|
|||
return True
|
||||
|
||||
|
||||
def get_description(description_blob):
|
||||
def get_description(description_blob: Union[JsonDict, str]) -> Optional[str]:
|
||||
"""descriptions can be a string or a dict"""
|
||||
if isinstance(description_blob, dict):
|
||||
description = markdown(description_blob.get("value"))
|
||||
|
@ -252,12 +255,12 @@ def get_description(description_blob):
|
|||
return description
|
||||
|
||||
|
||||
def get_openlibrary_key(key):
|
||||
def get_openlibrary_key(key: str) -> str:
|
||||
"""convert /books/OL27320736M into OL27320736M"""
|
||||
return key.split("/")[-1]
|
||||
|
||||
|
||||
def get_languages(language_blob):
|
||||
def get_languages(language_blob: Iterable[JsonDict]) -> list[Optional[str]]:
|
||||
"""/language/eng -> English"""
|
||||
langs = []
|
||||
for lang in language_blob:
|
||||
|
@ -265,14 +268,14 @@ def get_languages(language_blob):
|
|||
return langs
|
||||
|
||||
|
||||
def get_dict_field(blob, field_name):
|
||||
def get_dict_field(blob: Optional[JsonDict], field_name: str) -> Optional[Any]:
|
||||
"""extract the isni from the remote id data for the author"""
|
||||
if not blob or not isinstance(blob, dict):
|
||||
return None
|
||||
return blob.get(field_name)
|
||||
|
||||
|
||||
def get_wikipedia_link(links):
|
||||
def get_wikipedia_link(links: list[Any]) -> Optional[str]:
|
||||
"""extract wikipedia links"""
|
||||
if not isinstance(links, list):
|
||||
return None
|
||||
|
@ -285,7 +288,7 @@ def get_wikipedia_link(links):
|
|||
return None
|
||||
|
||||
|
||||
def get_inventaire_id(links):
|
||||
def get_inventaire_id(links: list[Any]) -> Optional[str]:
|
||||
"""extract and format inventaire ids"""
|
||||
if not isinstance(links, list):
|
||||
return None
|
||||
|
@ -295,11 +298,13 @@ def get_inventaire_id(links):
|
|||
continue
|
||||
if link.get("title") == "inventaire.io":
|
||||
iv_link = link.get("url")
|
||||
if not isinstance(iv_link, str):
|
||||
return None
|
||||
return iv_link.split("/")[-1]
|
||||
return None
|
||||
|
||||
|
||||
def pick_default_edition(options):
|
||||
def pick_default_edition(options: list[JsonDict]) -> Optional[JsonDict]:
|
||||
"""favor physical copies with covers in english"""
|
||||
if not options:
|
||||
return None
|
||||
|
|
|
@ -111,6 +111,7 @@ class EditionFromWorkForm(CustomForm):
|
|||
model = models.Work
|
||||
fields = [
|
||||
"title",
|
||||
"sort_title",
|
||||
"subtitle",
|
||||
"authors",
|
||||
"description",
|
||||
|
|
21
bookwyrm/management/commands/repair_editions.py
Normal file
21
bookwyrm/management/commands/repair_editions.py
Normal file
|
@ -0,0 +1,21 @@
|
|||
""" Repair editions with missing works """
|
||||
from django.core.management.base import BaseCommand
|
||||
from bookwyrm import models
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""command-line options"""
|
||||
|
||||
help = "Repairs an edition that is in a broken state"
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def handle(self, *args, **options):
|
||||
"""Find and repair broken editions"""
|
||||
# Find broken editions
|
||||
editions = models.Edition.objects.filter(parent_work__isnull=True)
|
||||
self.stdout.write(f"Repairing {editions.count()} edition(s):")
|
||||
|
||||
# Do repair
|
||||
for edition in editions:
|
||||
edition.repair()
|
||||
self.stdout.write(".", ending="")
|
|
@ -6,8 +6,9 @@ from functools import reduce
|
|||
import json
|
||||
import operator
|
||||
import logging
|
||||
from typing import List
|
||||
from typing import Any, Optional
|
||||
from uuid import uuid4
|
||||
from typing_extensions import Self
|
||||
|
||||
import aiohttp
|
||||
from Crypto.PublicKey import RSA
|
||||
|
@ -85,7 +86,7 @@ class ActivitypubMixin:
|
|||
super().__init__(*args, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def find_existing_by_remote_id(cls, remote_id):
|
||||
def find_existing_by_remote_id(cls, remote_id: str) -> Self:
|
||||
"""look up a remote id in the db"""
|
||||
return cls.find_existing({"id": remote_id})
|
||||
|
||||
|
@ -137,7 +138,7 @@ class ActivitypubMixin:
|
|||
queue=queue,
|
||||
)
|
||||
|
||||
def get_recipients(self, software=None) -> List[str]:
|
||||
def get_recipients(self, software=None) -> list[str]:
|
||||
"""figure out which inbox urls to post to"""
|
||||
# first we have to figure out who should receive this activity
|
||||
privacy = self.privacy if hasattr(self, "privacy") else "public"
|
||||
|
@ -198,7 +199,14 @@ class ActivitypubMixin:
|
|||
class ObjectMixin(ActivitypubMixin):
|
||||
"""add this mixin for object models that are AP serializable"""
|
||||
|
||||
def save(self, *args, created=None, software=None, priority=BROADCAST, **kwargs):
|
||||
def save(
|
||||
self,
|
||||
*args: Any,
|
||||
created: Optional[bool] = None,
|
||||
software: Any = None,
|
||||
priority: str = BROADCAST,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""broadcast created/updated/deleted objects as appropriate"""
|
||||
broadcast = kwargs.get("broadcast", True)
|
||||
# this bonus kwarg would cause an error in the base save method
|
||||
|
@ -507,14 +515,14 @@ def unfurl_related_field(related_field, sort_field=None):
|
|||
|
||||
|
||||
@app.task(queue=BROADCAST)
|
||||
def broadcast_task(sender_id: int, activity: str, recipients: List[str]):
|
||||
def broadcast_task(sender_id: int, activity: str, recipients: list[str]):
|
||||
"""the celery task for broadcast"""
|
||||
user_model = apps.get_model("bookwyrm.User", require_ready=True)
|
||||
sender = user_model.objects.select_related("key_pair").get(id=sender_id)
|
||||
asyncio.run(async_broadcast(recipients, sender, activity))
|
||||
|
||||
|
||||
async def async_broadcast(recipients: List[str], sender, data: str):
|
||||
async def async_broadcast(recipients: list[str], sender, data: str):
|
||||
"""Send all the broadcasts simultaneously"""
|
||||
timeout = aiohttp.ClientTimeout(total=10)
|
||||
async with aiohttp.ClientSession(timeout=timeout) as session:
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
""" database schema for books and shelves """
|
||||
from itertools import chain
|
||||
import re
|
||||
from typing import Any
|
||||
|
||||
from django.contrib.postgres.search import SearchVectorField
|
||||
from django.contrib.postgres.indexes import GinIndex
|
||||
|
@ -90,7 +91,7 @@ class BookDataModel(ObjectMixin, BookWyrmModel):
|
|||
|
||||
abstract = True
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
def save(self, *args: Any, **kwargs: Any) -> None:
|
||||
"""ensure that the remote_id is within this instance"""
|
||||
if self.id:
|
||||
self.remote_id = self.get_remote_id()
|
||||
|
@ -204,7 +205,7 @@ class Book(BookDataModel):
|
|||
text += f" ({self.edition_info})"
|
||||
return text
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
def save(self, *args: Any, **kwargs: Any) -> None:
|
||||
"""can't be abstract for query reasons, but you shouldn't USE it"""
|
||||
if not isinstance(self, Edition) and not isinstance(self, Work):
|
||||
raise ValueError("Books should be added as Editions or Works")
|
||||
|
@ -343,7 +344,7 @@ class Edition(Book):
|
|||
# max rank is 9
|
||||
return rank
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
def save(self, *args: Any, **kwargs: Any) -> None:
|
||||
"""set some fields on the edition object"""
|
||||
# calculate isbn 10/13
|
||||
if self.isbn_13 and self.isbn_13[:3] == "978" and not self.isbn_10:
|
||||
|
@ -380,6 +381,19 @@ class Edition(Book):
|
|||
|
||||
return super().save(*args, **kwargs)
|
||||
|
||||
@transaction.atomic
|
||||
def repair(self):
|
||||
"""If an edition is in a bad state (missing a work), let's fix that"""
|
||||
# made sure it actually NEEDS reapir
|
||||
if self.parent_work:
|
||||
return
|
||||
|
||||
new_work = Work.objects.create(title=self.title)
|
||||
new_work.authors.set(self.authors.all())
|
||||
|
||||
self.parent_work = new_work
|
||||
self.save(update_fields=["parent_work"], broadcast=False)
|
||||
|
||||
@classmethod
|
||||
def viewer_aware_objects(cls, viewer):
|
||||
"""annotate a book query with metadata related to the user"""
|
||||
|
|
|
@ -61,7 +61,7 @@ class FederatedServer(BookWyrmModel):
|
|||
).update(active=True, deactivation_reason=None)
|
||||
|
||||
@classmethod
|
||||
def is_blocked(cls, url):
|
||||
def is_blocked(cls, url: str) -> bool:
|
||||
"""look up if a domain is blocked"""
|
||||
url = urlparse(url)
|
||||
domain = url.netloc
|
||||
|
|
|
@ -368,10 +368,16 @@ class TagField(ManyToManyField):
|
|||
activity_type = item.__class__.__name__
|
||||
if activity_type == "User":
|
||||
activity_type = "Mention"
|
||||
|
||||
if activity_type == "Hashtag":
|
||||
name = item.name
|
||||
else:
|
||||
name = f"@{getattr(item, item.name_field)}"
|
||||
|
||||
tags.append(
|
||||
activitypub.Link(
|
||||
href=item.remote_id,
|
||||
name=f"@{getattr(item, item.name_field)}",
|
||||
name=name,
|
||||
type=activity_type,
|
||||
)
|
||||
)
|
||||
|
|
|
@ -28,3 +28,31 @@
|
|||
.vertical-copy button {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.copy-tooltip {
|
||||
overflow: visible;
|
||||
visibility: hidden;
|
||||
width: 140px;
|
||||
background-color: #555;
|
||||
color: #fff;
|
||||
text-align: center;
|
||||
border-radius: 6px;
|
||||
padding: 5px;
|
||||
position: absolute;
|
||||
z-index: 1;
|
||||
margin-left: -30px;
|
||||
margin-top: -45px;
|
||||
opacity: 0;
|
||||
transition: opacity 0.3s;
|
||||
}
|
||||
|
||||
.copy-tooltip::after {
|
||||
content: "";
|
||||
position: absolute;
|
||||
top: 100%;
|
||||
left: 50%;
|
||||
margin-left: -60px;
|
||||
border-width: 5px;
|
||||
border-style: solid;
|
||||
border-color: #555 transparent transparent transparent;
|
||||
}
|
||||
|
|
Binary file not shown.
|
@ -39,6 +39,7 @@
|
|||
<glyph unicode="" glyph-name="graphic-heart" d="M934.176 791.52c-116.128 115.072-301.824 117.472-422.112 9.216-120.32 108.256-305.952 105.856-422.144-9.216-119.712-118.528-119.712-310.688 0-429.28 34.208-33.888 353.696-350.112 353.696-350.112 37.856-37.504 99.072-37.504 136.896 0 0 0 349.824 346.304 353.696 350.112 119.744 118.592 119.744 310.752-0.032 429.28zM888.576 407.424l-353.696-350.112c-12.576-12.512-33.088-12.512-45.6 0l-353.696 350.112c-94.4 93.44-94.4 245.472 0 338.912 91.008 90.080 237.312 93.248 333.088 7.104l43.392-39.040 43.36 39.040c95.808 86.144 242.112 83.008 333.12-7.104 94.4-93.408 94.4-245.44 0.032-338.912zM296.096 719.968c8.864 0 16-7.168 16-16s-7.168-16-16-16h-0.032c-57.408 0-103.968-46.56-103.968-103.968v-0.032c0-8.832-7.168-16-16-16s-16 7.168-16 16v0c0 75.072 60.832 135.904 135.872 135.968 0.064 0 0.064 0.032 0.128 0.032z" />
|
||||
<glyph unicode="" glyph-name="graphic-paperplane" d="M1009.376 954.88c-5.312 3.424-11.36 5.12-17.376 5.12-6.176 0-12.384-1.76-17.76-5.376l-960-640c-9.888-6.56-15.328-18.112-14.048-29.952 1.216-11.808 8.896-22.016 19.936-26.368l250.368-100.192 117.728-206.016c5.632-9.888 16.096-16 27.424-16.128 0.128 0 0.224 0 0.352 0 11.232 0 21.664 5.952 27.424 15.552l66.464 110.816 310.24-124.064c3.808-1.536 7.808-2.272 11.872-2.272 5.44 0 10.816 1.376 15.68 4.128 8.448 4.736 14.24 13.056 15.872 22.624l160 960c2.080 12.576-3.488 25.184-14.176 32.128zM100.352 295.136l741.6 494.432-539.2-577.184c-2.848 1.696-5.376 3.936-8.512 5.184l-193.888 77.568zM326.048 189.888c-0.064 0.128-0.16 0.192-0.224 0.32l606.176 648.8-516.768-805.184-89.184 156.064zM806.944 12.512l-273.312 109.312c-6.496 2.56-13.248 3.424-19.936 3.808l420.864 652.416-127.616-765.536z" />
|
||||
<glyph unicode="" glyph-name="graphic-banknote" d="M1005.28 621.248l-320 320c-15.872 15.872-38.88 22.24-60.672 16.864-11.488-2.816-21.76-8.736-29.888-16.864-7.264-7.264-12.736-16.256-15.872-26.304-14.496-47.008-39.552-87.872-76.64-124.928-49.536-49.504-114.048-87.008-182.304-126.656-72.448-41.984-147.296-85.504-208.64-146.816-52.128-52.192-87.616-110.24-108.416-177.632-7.008-22.752-0.896-47.36 15.872-64.192l320-320c15.872-15.872 38.88-22.24 60.672-16.864 11.488 2.88 21.76 8.736 29.888 16.864 7.264 7.264 12.736 16.256 15.872 26.368 14.528 47.008 39.584 87.872 76.704 124.928 49.504 49.504 113.984 86.944 182.304 126.56 72.384 42.048 147.264 85.568 208.576 146.88 52.128 52.128 87.616 110.24 108.448 177.632 6.976 22.72 0.832 47.424-15.904 64.16zM384 0c-105.984 105.984-214.016 214.048-320 320 90.944 294.432 485.12 281.568 576 576 105.984-105.952 214.048-214.016 320.064-320-90.976-294.368-485.152-281.568-576.064-576zM625.984 483.2c-10.432 8.736-20.928 14.688-31.488 17.632-10.496 2.944-20.992 4.128-31.616 3.36-10.496-0.8-21.248-3.2-32-7.328-10.752-4.192-21.568-8.736-32.448-14.016-17.184 19.744-34.368 39.264-51.552 57.376 7.744 7.008 15.264 10.56 22.496 10.816 7.264 0.32 14.24-0.448 20.864-2.112 6.752-1.696 12.928-3.136 18.624-4.256 5.76-1.12 10.752 0.128 15.136 3.808 4.64 4 7.2 9.184 7.552 15.424 0.32 6.304-2.048 12.448-7.328 18.432-6.752 7.744-14.88 12.448-24.64 14.176-9.632 1.696-19.488 1.568-29.76-0.672-10.112-2.304-19.744-6.112-28.864-11.488s-16.448-10.88-21.888-16.256c-2.080 1.984-4.16 3.936-6.24 5.888-2.304 2.112-5.184 3.264-8.64 3.2-3.488 0-6.368-1.504-8.736-4.256-2.304-2.688-3.36-5.824-2.944-9.12 0.32-3.424 1.696-6.048 4.064-8.064 2.080-1.76 4.16-3.488 6.24-5.312-8.192-9.888-14.944-20.8-20.256-32.32-5.376-11.552-8.576-23.008-9.76-34.112-1.248-11.2-0.064-21.44 3.36-30.944 3.424-9.568 9.76-17.696 19.008-25.376 15.072-12.512 32.8-17.824 53.376-16.64 20.512 1.248 42.624 7.36 66.4 20.128 18.88-21.824 37.824-43.488 56.736-63.616-8-6.752-15.008-10.624-21.184-11.872-6.176-1.312-11.68-1.184-16.672 0.32-4.992 1.568-9.632 3.808-13.888 6.688-4.256 2.944-8.448 5.44-12.64 7.488-4.128 2.048-8.384 3.2-12.736 3.264s-8.992-2.048-14.112-6.432c-5.248-4.576-7.872-9.888-7.872-15.872 0-5.952 2.752-12 8.128-18.112 5.44-6.112 12.512-11.264 21.056-15.328s18.208-6.624 28.832-7.328c10.624-0.736 21.824 0.864 33.632 5.248 11.872 4.32 23.616 12.128 35.2 23.744 5.568-5.44 11.2-10.624 16.8-15.616 2.368-2.048 5.248-3.072 8.736-2.816 3.36 0.128 6.304 1.696 8.64 4.512 2.368 2.88 3.36 6.048 3.008 9.376-0.32 3.36-1.696 5.952-4 7.808-5.632 4.512-11.264 9.248-16.864 14.24 9.568 11.744 17.248 24.128 22.944 36.384 5.696 12.32 9.056 24.192 10.176 35.2 1.12 11.072-0.192 21.056-3.808 30.112-3.584 9.184-9.952 17.056-19.072 24.64zM447.072 461.504c-9.056-0.384-16.96 2.624-23.872 9.312-2.944 2.816-4.992 6.24-6.24 10.304-1.312 4.064-1.76 8.512-1.248 13.376 0.448 4.8 1.888 9.824 4.384 14.88 2.368 5.056 5.888 10.112 10.368 15.008 16.224-16.128 32.416-33.824 48.64-52.128-12.288-6.752-22.976-10.368-32.032-10.752zM598.016 397.44c-2.88-5.312-6.176-10.048-10.048-14.176-17.952 18.112-35.872 38.016-53.76 58.432 4.576 2.048 9.376 4.192 14.56 6.368s10.368 3.616 15.552 4.512c5.312 0.8 10.56 0.576 15.808-0.672 5.184-1.312 10.112-4.128 14.688-8.576 4.512-4.512 7.36-9.184 8.512-14.24 1.248-5.12 1.312-10.304 0.448-15.616-0.928-5.344-2.816-10.656-5.76-16.032zM470.944 250.24c6.304 5.088 15.584 4.832 21.376-1.056 6.272-6.24 6.272-16.448 0-22.688-0.512-0.512-1.056-0.864-1.632-1.312l0.064-0.064c-20.256-15.392-36.896-29.248-54.848-47.2-16.224-16.192-30.88-33.248-43.552-50.56l-20.448-28c-0.64-1.152-1.408-2.208-2.368-3.2-6.272-6.24-16.48-6.24-22.72 0-5.44 5.44-6.112 13.824-2.112 20.064l-0.064 0.064 21.888 29.888c13.664 18.688 29.376 36.992 46.752 54.368 18.080 18.144 37.6 34.336 57.6 49.696h0.064zM588.096 713.12c16.192 16.192 30.816 33.184 43.52 50.592l21.248 29.12c0.768 1.376 1.632 2.752 2.816 3.936 6.304 6.304 16.512 6.304 22.816 0 5.984-6.016 6.24-15.52 0.8-21.888l0.064-0.064-21.888-30.016c-13.696-18.688-29.376-36.928-46.752-54.304-18.080-18.080-37.568-34.336-57.568-49.696l-0.128 0.064c-6.368-5.856-16.256-5.728-22.368 0.448-6.304 6.304-6.304 16.576 0 22.88 1.12 1.184 2.432 2.016 3.744 2.752 18.816 14.368 36.96 29.44 53.696 46.176z" />
|
||||
<glyph unicode="" glyph-name="copy" d="M640 704v256h-448l-192-192v-576h384v-256h640v768h-384zM192 869.49v-101.49h-101.49l101.49 101.49zM64 256v448h192v192h320v-192l-192-192v-256h-320zM576 613.49v-101.49h-101.49l101.49 101.49zM960 0h-512v448h192v192h320v-640z" />
|
||||
<glyph unicode="" glyph-name="barcode" d="M0 832h128v-640h-128zM192 832h64v-640h-64zM320 832h64v-640h-64zM512 832h64v-640h-64zM768 832h64v-640h-64zM960 832h64v-640h-64zM640 832h32v-640h-32zM448 832h32v-640h-32zM864 832h32v-640h-32zM0 128h64v-64h-64zM192 128h64v-64h-64zM320 128h64v-64h-64zM640 128h64v-64h-64zM960 128h64v-64h-64zM768 128h128v-64h-128zM448 128h128v-64h-128z" />
|
||||
<glyph unicode="" glyph-name="spinner" d="M384 832c0 70.692 57.308 128 128 128s128-57.308 128-128c0-70.692-57.308-128-128-128s-128 57.308-128 128zM655.53 719.53c0 70.692 57.308 128 128 128s128-57.308 128-128c0-70.692-57.308-128-128-128s-128 57.308-128 128zM832 448c0 35.346 28.654 64 64 64s64-28.654 64-64c0-35.346-28.654-64-64-64s-64 28.654-64 64zM719.53 176.47c0 35.346 28.654 64 64 64s64-28.654 64-64c0-35.346-28.654-64-64-64s-64 28.654-64 64zM448.002 64c0 0 0 0 0 0 0 35.346 28.654 64 64 64s64-28.654 64-64c0 0 0 0 0 0 0-35.346-28.654-64-64-64s-64 28.654-64 64zM176.472 176.47c0 0 0 0 0 0 0 35.346 28.654 64 64 64s64-28.654 64-64c0 0 0 0 0 0 0-35.346-28.654-64-64-64s-64 28.654-64 64zM144.472 719.53c0 0 0 0 0 0 0 53.019 42.981 96 96 96s96-42.981 96-96c0 0 0 0 0 0 0-53.019-42.981-96-96-96s-96 42.981-96 96zM56 448c0 39.765 32.235 72 72 72s72-32.235 72-72c0-39.765-32.235-72-72-72s-72 32.235-72 72z" />
|
||||
<glyph unicode="" glyph-name="search" d="M992.262 88.604l-242.552 206.294c-25.074 22.566-51.89 32.926-73.552 31.926 57.256 67.068 91.842 154.078 91.842 249.176 0 212.078-171.922 384-384 384-212.076 0-384-171.922-384-384s171.922-384 384-384c95.098 0 182.108 34.586 249.176 91.844-1-21.662 9.36-48.478 31.926-73.552l206.294-242.552c35.322-39.246 93.022-42.554 128.22-7.356s31.892 92.898-7.354 128.22zM384 320c-141.384 0-256 114.616-256 256s114.616 256 256 256 256-114.616 256-256-114.614-256-256-256z" />
|
||||
|
|
Before Width: | Height: | Size: 34 KiB After Width: | Height: | Size: 34 KiB |
Binary file not shown.
Binary file not shown.
13
bookwyrm/static/css/vendor/icons.css
vendored
13
bookwyrm/static/css/vendor/icons.css
vendored
|
@ -1,10 +1,10 @@
|
|||
@font-face {
|
||||
font-family: 'icomoon';
|
||||
src: url('../fonts/icomoon.eot?r7jc98');
|
||||
src: url('../fonts/icomoon.eot?r7jc98#iefix') format('embedded-opentype'),
|
||||
url('../fonts/icomoon.ttf?r7jc98') format('truetype'),
|
||||
url('../fonts/icomoon.woff?r7jc98') format('woff'),
|
||||
url('../fonts/icomoon.svg?r7jc98#icomoon') format('svg');
|
||||
src: url('../fonts/icomoon.eot?nr4nq7');
|
||||
src: url('../fonts/icomoon.eot?nr4nq7#iefix') format('embedded-opentype'),
|
||||
url('../fonts/icomoon.ttf?nr4nq7') format('truetype'),
|
||||
url('../fonts/icomoon.woff?nr4nq7') format('woff'),
|
||||
url('../fonts/icomoon.svg?nr4nq7#icomoon') format('svg');
|
||||
font-weight: normal;
|
||||
font-style: normal;
|
||||
font-display: block;
|
||||
|
@ -122,6 +122,9 @@
|
|||
.icon-graphic-banknote:before {
|
||||
content: "\e920";
|
||||
}
|
||||
.icon-copy:before {
|
||||
content: "\e92c";
|
||||
}
|
||||
.icon-search:before {
|
||||
content: "\e986";
|
||||
}
|
||||
|
|
|
@ -65,6 +65,9 @@ let BookWyrm = new (class {
|
|||
.querySelectorAll('input[type="file"]')
|
||||
.forEach(bookwyrm.disableIfTooLarge.bind(bookwyrm));
|
||||
document.querySelectorAll("[data-copytext]").forEach(bookwyrm.copyText.bind(bookwyrm));
|
||||
document
|
||||
.querySelectorAll("[data-copywithtooltip]")
|
||||
.forEach(bookwyrm.copyWithTooltip.bind(bookwyrm));
|
||||
document
|
||||
.querySelectorAll(".modal.is-active")
|
||||
.forEach(bookwyrm.handleActiveModal.bind(bookwyrm));
|
||||
|
@ -524,6 +527,21 @@ let BookWyrm = new (class {
|
|||
textareaEl.parentNode.appendChild(copyButtonEl);
|
||||
}
|
||||
|
||||
copyWithTooltip(copyButtonEl) {
|
||||
const text = document.getElementById(copyButtonEl.dataset.contentId).innerHTML;
|
||||
const tooltipEl = document.getElementById(copyButtonEl.dataset.tooltipId);
|
||||
|
||||
copyButtonEl.addEventListener("click", () => {
|
||||
navigator.clipboard.writeText(text);
|
||||
tooltipEl.style.visibility = "visible";
|
||||
tooltipEl.style.opacity = 1;
|
||||
setTimeout(function () {
|
||||
tooltipEl.style.visibility = "hidden";
|
||||
tooltipEl.style.opacity = 0;
|
||||
}, 3000);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle the details dropdown component.
|
||||
*
|
||||
|
|
|
@ -4,42 +4,50 @@
|
|||
{% if book.isbn_13 or book.oclc_number or book.asin or book.aasin or book.isfdb %}
|
||||
<dl>
|
||||
{% if book.isbn_13 %}
|
||||
<div class="is-flex">
|
||||
<div class="is-flex is-flex-wrap-wrap">
|
||||
<dt class="mr-1">{% trans "ISBN:" %}</dt>
|
||||
<dd itemprop="isbn">{{ book.isbn_13 }}</dd>
|
||||
<dd itemprop="isbn" class="mr-1" id="isbn_content">{{ book.isbn_13 }}</dd>
|
||||
<div>
|
||||
<button class="button is-small" data-copywithtooltip data-content-id="isbn_content" data-tooltip-id="isbn_tooltip">
|
||||
<span class="icon icon-copy" title="{% trans "Copy ISBN" %}">
|
||||
<span class="is-sr-only">{% trans "Copy ISBN" %}</span>
|
||||
</span>
|
||||
</button>
|
||||
<span class="copy-tooltip" id="isbn_tooltip">{% trans "Copied ISBN!" %}</span>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if book.oclc_number %}
|
||||
<div class="is-flex">
|
||||
<div class="is-flex is-flex-wrap-wrap">
|
||||
<dt class="mr-1">{% trans "OCLC Number:" %}</dt>
|
||||
<dd>{{ book.oclc_number }}</dd>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if book.asin %}
|
||||
<div class="is-flex">
|
||||
<div class="is-flex is-flex-wrap-wrap">
|
||||
<dt class="mr-1">{% trans "ASIN:" %}</dt>
|
||||
<dd>{{ book.asin }}</dd>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if book.aasin %}
|
||||
<div class="is-flex">
|
||||
<div class="is-flex is-flex-wrap-wrap">
|
||||
<dt class="mr-1">{% trans "Audible ASIN:" %}</dt>
|
||||
<dd>{{ book.aasin }}</dd>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if book.isfdb %}
|
||||
<div class="is-flex">
|
||||
<div class="is-flex is-flex-wrap-wrap">
|
||||
<dt class="mr-1">{% trans "ISFDB ID:" %}</dt>
|
||||
<dd>{{ book.isfdb }}</dd>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if book.goodreads_key %}
|
||||
<div class="is-flex">
|
||||
<div class="is-flex is-flex-wrap-wrap">
|
||||
<dt class="mr-1">{% trans "Goodreads:" %}</dt>
|
||||
<dd>{{ book.goodreads_key }}</dd>
|
||||
</div>
|
||||
|
|
|
@ -111,11 +111,11 @@
|
|||
{% endif %}
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% else %}
|
||||
{% elif add_author %}
|
||||
<p class="column is-half">{% blocktrans with name=add_author %}Creating a new author: {{ name }}{% endblocktrans %}</p>
|
||||
{% endif %}
|
||||
|
||||
{% if not book %}
|
||||
{% if not book.parent_work %}
|
||||
<div class="column is-half">
|
||||
<fieldset>
|
||||
<legend class="title is-5 mb-1">
|
||||
|
|
|
@ -10,7 +10,9 @@
|
|||
{% csrf_token %}
|
||||
|
||||
<input type="hidden" name="last_edited_by" value="{{ request.user.id }}">
|
||||
{% if form.parent_work %}
|
||||
<input type="hidden" name="parent_work" value="{% firstof book.parent_work.id form.parent_work %}">
|
||||
{% endif %}
|
||||
|
||||
<div class="columns">
|
||||
<div class="column is-half">
|
||||
|
|
|
@ -58,6 +58,7 @@
|
|||
<form action="{% url 'create-book-data' %}" method="POST" name="add-edition-form">
|
||||
{% csrf_token %}
|
||||
{{ work_form.title }}
|
||||
{{ work_form.sort_title }}
|
||||
{{ work_form.subtitle }}
|
||||
{{ work_form.authors }}
|
||||
{{ work_form.description }}
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
{% block filter %}
|
||||
<div class="control">
|
||||
<label class="label" for="id_search">{% trans "Search editions" %}</label>
|
||||
<input type="text" class="input" name="q" value="{{ request.GET.q|default:'' }}" id="id_search">
|
||||
<input type="text" class="input" name="q" value="{{ request.GET.q|default:'' }}" id="id_search" spellcheck="false">
|
||||
</div>
|
||||
{% endblock %}
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<h2 class="title is-4">{% trans "What are you reading?" %}</h2>
|
||||
<form class="field has-addons" method="get" action="{% url 'get-started-books' %}">
|
||||
<div class="control">
|
||||
<input type="text" name="query" value="{{ request.GET.query }}" class="input" placeholder="{% trans 'Search for a book' %}" aria-label="{% trans 'Search for a book' %}">
|
||||
<input type="text" name="query" value="{{ request.GET.query }}" class="input" placeholder="{% trans 'Search for a book' %}" aria-label="{% trans 'Search for a book' %}" spellcheck="false">
|
||||
{% if request.GET.query and not book_results %}
|
||||
<p class="help">{% blocktrans with query=request.GET.query %}No books found for "{{ query }}"{% endblocktrans %}. {% blocktrans %}You can add books when you start using {{ site_name }}.{% endblocktrans %}</p>
|
||||
{% endif %}
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
<p class="subtitle is-6">{% trans "You can follow users on other BookWyrm instances and federated services like Mastodon." %}</p>
|
||||
<form class="field has-addons" method="get" action="{% url 'get-started-users' %}">
|
||||
<div class="control">
|
||||
<input type="text" name="query" value="{{ request.GET.query }}" class="input" placeholder="{% trans 'Search for a user' %}" aria-label="{% trans 'Search for a user' %}">
|
||||
<input type="text" name="query" value="{{ request.GET.query }}" class="input" placeholder="{% trans 'Search for a user' %}" aria-label="{% trans 'Search for a user' %}" spellcheck="false">
|
||||
{% if request.GET.query and no_results %}
|
||||
<p class="help">{% blocktrans with query=request.GET.query %}No users found for "{{ query }}"{% endblocktrans %}</p>
|
||||
{% endif %}
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
<div class="block">
|
||||
<form class="field has-addons" method="get" action="{% url 'group-find-users' group.id %}">
|
||||
<div class="control">
|
||||
<input type="text" name="user_query" value="{{ request.GET.user_query }}" class="input" placeholder="{% trans 'Search to add a user' %}" aria-label="{% trans 'Search to add a user' %}">
|
||||
<input type="text" name="user_query" value="{{ request.GET.user_query }}" class="input" placeholder="{% trans 'Search to add a user' %}" aria-label="{% trans 'Search to add a user' %}" spellcheck="false">
|
||||
</div>
|
||||
<div class="control" id="tour-group-member-search">
|
||||
<button class="button" type="submit">
|
||||
|
|
|
@ -38,7 +38,7 @@
|
|||
{% else %}
|
||||
{% trans "Search for a book" as search_placeholder %}
|
||||
{% endif %}
|
||||
<input aria-label="{{ search_placeholder }}" id="tour-search" class="input" type="text" name="q" placeholder="{{ search_placeholder }}" value="{{ query }}">
|
||||
<input aria-label="{{ search_placeholder }}" id="tour-search" class="input" type="text" name="q" placeholder="{{ search_placeholder }}" value="{{ query }}" spellcheck="false">
|
||||
</div>
|
||||
<div class="control">
|
||||
<button class="button" type="submit">
|
||||
|
|
|
@ -210,7 +210,7 @@
|
|||
<form name="search" action="{% url 'list' list_id=list.id slug=list.name|slugify %}" method="GET" class="block">
|
||||
<div class="field has-addons">
|
||||
<div class="control">
|
||||
<input aria-label="{% trans 'Search for a book' %}" class="input" type="text" name="q" placeholder="{% trans 'Search for a book' %}" value="{{ query }}">
|
||||
<input aria-label="{% trans 'Search for a book' %}" class="input" type="text" name="q" placeholder="{% trans 'Search for a book' %}" value="{{ query }}" spellcheck="false">
|
||||
</div>
|
||||
<div class="control">
|
||||
<button class="button" type="submit">
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
<form class="block" action="{% url 'search' %}" method="GET">
|
||||
<div class="field has-addons">
|
||||
<div class="control">
|
||||
<input type="text" class="input" name="q" value="{{ query }}" aria-label="{% trans 'Search query' %}" id="tour-search-page-input">
|
||||
<input type="text" class="input" name="q" value="{{ query }}" aria-label="{% trans 'Search query' %}" id="tour-search-page-input" spellcheck="false">
|
||||
</div>
|
||||
<div class="control">
|
||||
<div class="select" aria-label="{% trans 'Search type' %}">
|
||||
|
|
|
@ -15,7 +15,11 @@
|
|||
<div class="control">
|
||||
<button class="button is-link" type="submit">
|
||||
<span class="icon icon-spinner" aria-hidden="true"></span>
|
||||
{% if draft %}
|
||||
<span>{% trans "Update" %}</span>
|
||||
{% else %}
|
||||
<span>{% trans "Post" %}</span>
|
||||
{% endif %}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -12,6 +12,10 @@ register = template.Library()
|
|||
@register.filter(name="rating")
|
||||
def get_rating(book, user):
|
||||
"""get the overall rating of a book"""
|
||||
# this shouldn't happen, but it CAN
|
||||
if not book.parent_work:
|
||||
return None
|
||||
|
||||
return cache.get_or_set(
|
||||
f"book-rating-{book.parent_work.id}",
|
||||
lambda u, b: models.Review.objects.filter(
|
||||
|
|
|
@ -280,3 +280,13 @@ class Openlibrary(TestCase):
|
|||
self.assertFalse(ignore_edition({"languages": "languages/fr"}))
|
||||
self.assertTrue(ignore_edition({"languages": "languages/eng"}))
|
||||
self.assertTrue(ignore_edition({"format": "paperback"}))
|
||||
|
||||
def test_remote_id_from_model(self):
|
||||
"""figure out a url from an id"""
|
||||
obj = models.Author.objects.create(
|
||||
name="George Elliott", openlibrary_key="OL453734A"
|
||||
)
|
||||
self.assertEqual(
|
||||
self.connector.get_remote_id_from_model(obj),
|
||||
"https://openlibrary.org/authors/OL453734A",
|
||||
)
|
||||
|
|
|
@ -24,8 +24,7 @@ class Book(TestCase):
|
|||
title="Example Work", remote_id="https://example.com/book/1"
|
||||
)
|
||||
self.first_edition = models.Edition.objects.create(
|
||||
title="Example Edition",
|
||||
parent_work=self.work,
|
||||
title="Example Edition", parent_work=self.work
|
||||
)
|
||||
self.second_edition = models.Edition.objects.create(
|
||||
title="Another Example Edition",
|
||||
|
@ -143,3 +142,15 @@ class Book(TestCase):
|
|||
for article in articles
|
||||
)
|
||||
self.assertTrue(all(book.sort_title == "test edition" for book in books))
|
||||
|
||||
def test_repair_edition(self):
|
||||
"""Fix editions with no works"""
|
||||
edition = models.Edition.objects.create(title="test")
|
||||
edition.authors.set([models.Author.objects.create(name="Author Name")])
|
||||
self.assertIsNone(edition.parent_work)
|
||||
|
||||
edition.repair()
|
||||
edition.refresh_from_db()
|
||||
|
||||
self.assertEqual(edition.parent_work.title, "test")
|
||||
self.assertEqual(edition.parent_work.authors.count(), 1)
|
||||
|
|
|
@ -135,6 +135,41 @@ class Status(TestCase):
|
|||
self.assertEqual(activity["content"], "<p>test content</p>")
|
||||
self.assertEqual(activity["sensitive"], False)
|
||||
|
||||
def test_status_with_hashtag_to_activity(self, *_):
|
||||
"""status with hashtag with a "pure" serializer"""
|
||||
tag = models.Hashtag.objects.create(name="#content")
|
||||
status = models.Status.objects.create(
|
||||
content="test #content", user=self.local_user
|
||||
)
|
||||
status.mention_hashtags.add(tag)
|
||||
|
||||
activity = status.to_activity(pure=True)
|
||||
self.assertEqual(activity["id"], status.remote_id)
|
||||
self.assertEqual(activity["type"], "Note")
|
||||
self.assertEqual(activity["content"], "<p>test #content</p>")
|
||||
self.assertEqual(activity["sensitive"], False)
|
||||
self.assertEqual(activity["tag"][0]["type"], "Hashtag")
|
||||
self.assertEqual(activity["tag"][0]["name"], "#content")
|
||||
self.assertEqual(
|
||||
activity["tag"][0]["href"], f"https://{settings.DOMAIN}/hashtag/{tag.id}"
|
||||
)
|
||||
|
||||
def test_status_with_mention_to_activity(self, *_):
|
||||
"""status with mention with a "pure" serializer"""
|
||||
status = models.Status.objects.create(
|
||||
content="test @rat@rat.com", user=self.local_user
|
||||
)
|
||||
status.mention_users.add(self.remote_user)
|
||||
|
||||
activity = status.to_activity(pure=True)
|
||||
self.assertEqual(activity["id"], status.remote_id)
|
||||
self.assertEqual(activity["type"], "Note")
|
||||
self.assertEqual(activity["content"], "<p>test @rat@rat.com</p>")
|
||||
self.assertEqual(activity["sensitive"], False)
|
||||
self.assertEqual(activity["tag"][0]["type"], "Mention")
|
||||
self.assertEqual(activity["tag"][0]["name"], f"@{self.remote_user.username}")
|
||||
self.assertEqual(activity["tag"][0]["href"], self.remote_user.remote_id)
|
||||
|
||||
def test_status_to_activity_tombstone(self, *_):
|
||||
"""subclass of the base model version with a "pure" serializer"""
|
||||
status = models.Status.objects.create(
|
||||
|
|
|
@ -71,6 +71,12 @@ class RatingTags(TestCase):
|
|||
)
|
||||
self.assertEqual(rating_tags.get_rating(self.book, self.local_user), 5)
|
||||
|
||||
def test_get_rating_broken_edition(self, *_):
|
||||
"""Don't have a server error if an edition is missing a work"""
|
||||
broken_book = models.Edition.objects.create(title="Test")
|
||||
broken_book.parent_work = None
|
||||
self.assertIsNone(rating_tags.get_rating(broken_book, self.local_user))
|
||||
|
||||
def test_get_user_rating(self, *_):
|
||||
"""get a user's most recent rating of a book"""
|
||||
with patch("bookwyrm.models.activitypub_mixin.broadcast_task.apply_async"):
|
||||
|
|
|
@ -62,7 +62,7 @@ class StatusTransactions(TransactionTestCase):
|
|||
with patch("bookwyrm.activitystreams.add_status_task.apply_async") as mock:
|
||||
view(request, "comment")
|
||||
|
||||
self.assertEqual(mock.call_count, 2)
|
||||
self.assertEqual(mock.call_count, 1)
|
||||
|
||||
|
||||
@patch("bookwyrm.suggested_users.rerank_suggestions_task.delay")
|
||||
|
@ -428,6 +428,14 @@ http://www.fish.com/"""
|
|||
f'(<a href="{url}">www.fish.com/</a>)',
|
||||
)
|
||||
|
||||
def test_format_links_punctuation(self, *_):
|
||||
"""don’t take trailing punctuation into account pls"""
|
||||
url = "http://www.fish.com/"
|
||||
self.assertEqual(
|
||||
views.status.format_links(f"{url}."),
|
||||
f'<a href="{url}">www.fish.com/</a>.',
|
||||
)
|
||||
|
||||
def test_format_links_special_chars(self, *_):
|
||||
"""find and format urls into a tags"""
|
||||
url = "https://archive.org/details/dli.granth.72113/page/n25/mode/2up"
|
||||
|
|
|
@ -157,6 +157,7 @@ def add_authors(request, data):
|
|||
"""helper for adding authors"""
|
||||
add_author = [author for author in request.POST.getlist("add_author") if author]
|
||||
if not add_author:
|
||||
data["add_author"] = []
|
||||
return data
|
||||
|
||||
data["add_author"] = add_author
|
||||
|
|
|
@ -6,6 +6,7 @@ from urllib.parse import urlparse
|
|||
from django.contrib.auth.decorators import login_required
|
||||
from django.core.validators import URLValidator
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db import transaction
|
||||
from django.db.models import Q
|
||||
from django.http import HttpResponse, HttpResponseBadRequest, Http404
|
||||
from django.shortcuts import get_object_or_404, redirect
|
||||
|
@ -56,6 +57,7 @@ class CreateStatus(View):
|
|||
return TemplateResponse(request, "compose.html", data)
|
||||
|
||||
# pylint: disable=too-many-branches
|
||||
@transaction.atomic
|
||||
def post(self, request, status_type, existing_status_id=None):
|
||||
"""create status of whatever type"""
|
||||
created = not existing_status_id
|
||||
|
@ -83,7 +85,6 @@ class CreateStatus(View):
|
|||
return redirect_to_referer(request)
|
||||
|
||||
status = form.save(request, commit=False)
|
||||
status.ready = False
|
||||
# save the plain, unformatted version of the status for future editing
|
||||
status.raw_content = status.content
|
||||
if hasattr(status, "quote"):
|
||||
|
@ -123,7 +124,6 @@ class CreateStatus(View):
|
|||
if hasattr(status, "quote"):
|
||||
status.quote = to_markdown(status.quote)
|
||||
|
||||
status.ready = True
|
||||
status.save(created=created)
|
||||
|
||||
# update a readthrough, if needed
|
||||
|
@ -305,6 +305,11 @@ def format_links(content):
|
|||
formatted_content += potential_link[0]
|
||||
potential_link = potential_link[1:-1]
|
||||
|
||||
ends_with_punctuation = _ends_with_punctuation(potential_link)
|
||||
if ends_with_punctuation:
|
||||
punctuation_glyph = potential_link[-1]
|
||||
potential_link = potential_link[0:-1]
|
||||
|
||||
try:
|
||||
# raises an error on anything that's not a valid link
|
||||
validator(potential_link)
|
||||
|
@ -324,6 +329,9 @@ def format_links(content):
|
|||
if wrapped:
|
||||
formatted_content += wrapper_close
|
||||
|
||||
if ends_with_punctuation:
|
||||
formatted_content += punctuation_glyph
|
||||
|
||||
return formatted_content
|
||||
|
||||
|
||||
|
@ -336,6 +344,15 @@ def _wrapped(text):
|
|||
return False
|
||||
|
||||
|
||||
def _ends_with_punctuation(text):
|
||||
"""check if a line of text ends with a punctuation glyph"""
|
||||
glyphs = [".", ",", ";", ":", "!", "?", "”", "’", '"', "»"]
|
||||
for glyph in glyphs:
|
||||
if text[-1] == glyph:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def to_markdown(content):
|
||||
"""catch links and convert to markdown"""
|
||||
content = format_links(content)
|
||||
|
|
3
mypy.ini
3
mypy.ini
|
@ -10,6 +10,9 @@ django_settings_module = "bookwyrm.settings"
|
|||
ignore_errors = True
|
||||
implicit_reexport = True
|
||||
|
||||
[mypy-bookwyrm.connectors.*]
|
||||
ignore_errors = False
|
||||
|
||||
[mypy-celerywyrm.*]
|
||||
ignore_errors = False
|
||||
|
||||
|
|
Loading…
Reference in a new issue