moviewyrm/bookwyrm/connectors/abstract_connector.py

369 lines
12 KiB
Python
Raw Normal View History

2021-03-08 16:49:10 +00:00
""" functionality outline for a book data connector """
2020-03-07 20:22:28 +00:00
from abc import ABC, abstractmethod
2022-02-02 05:18:25 +00:00
import imghdr
2022-02-03 23:11:01 +00:00
import ipaddress
2020-12-30 17:14:07 +00:00
import logging
2022-02-03 23:11:01 +00:00
from urllib.parse import urlparse
2020-03-07 20:22:28 +00:00
2022-02-02 05:18:25 +00:00
from django.core.files.base import ContentFile
2020-05-10 19:56:59 +00:00
from django.db import transaction
import requests
2021-06-20 16:23:57 +00:00
from requests.exceptions import RequestException
2020-05-10 19:56:59 +00:00
from bookwyrm import activitypub, models, settings
2021-01-02 16:14:28 +00:00
from .connector_manager import load_more_data, ConnectorException
2021-09-29 19:21:19 +00:00
from .format_mappings import format_mappings
2020-03-07 20:22:28 +00:00
2020-12-30 17:14:07 +00:00
logger = logging.getLogger(__name__)
2021-03-08 16:49:10 +00:00
2020-11-29 02:56:28 +00:00
class AbstractMinimalConnector(ABC):
2021-04-26 16:15:42 +00:00
"""just the bare bones, for other bookwyrm instances"""
2021-03-08 16:49:10 +00:00
def __init__(self, identifier):
2020-03-07 20:22:28 +00:00
# load connector settings
info = models.Connector.objects.get(identifier=identifier)
2020-03-28 23:01:02 +00:00
self.connector = info
2020-03-07 20:22:28 +00:00
2020-05-10 23:41:24 +00:00
# the things in the connector model to copy over
self_fields = [
2021-03-08 16:49:10 +00:00
"base_url",
"books_url",
"covers_url",
"search_url",
"isbn_search_url",
"name",
"identifier",
2020-05-10 18:29:10 +00:00
]
2020-05-10 23:41:24 +00:00
for field in self_fields:
2020-05-10 18:29:10 +00:00
setattr(self, field, getattr(info, field))
2022-01-07 15:42:05 +00:00
def search(self, query, min_confidence=None, timeout=settings.QUERY_TIMEOUT):
2021-04-26 16:15:42 +00:00
"""free text search"""
params = {}
if min_confidence:
2021-03-08 16:49:10 +00:00
params["min_confidence"] = min_confidence
data = self.get_search_data(
2021-09-18 18:32:00 +00:00
f"{self.search_url}{query}",
params=params,
2021-06-17 19:34:54 +00:00
timeout=timeout,
)
results = []
for doc in self.parse_search_data(data)[:10]:
results.append(self.format_search_result(doc))
return results
2022-01-07 15:42:05 +00:00
def isbn_search(self, query, timeout=settings.QUERY_TIMEOUT):
2021-04-26 16:15:42 +00:00
"""isbn search"""
2021-03-01 20:09:21 +00:00
params = {}
data = self.get_search_data(
2021-09-18 18:32:00 +00:00
f"{self.isbn_search_url}{query}",
2021-03-01 20:09:21 +00:00
params=params,
2022-01-07 15:42:05 +00:00
timeout=timeout,
2021-03-01 20:09:21 +00:00
)
results = []
# this shouldn't be returning mutliple results, but just in case
for doc in self.parse_isbn_search_data(data)[:10]:
2021-03-01 20:09:21 +00:00
results.append(self.format_isbn_search_result(doc))
return results
def get_search_data(self, remote_id, **kwargs): # pylint: disable=no-self-use
"""this allows connectors to override the default behavior"""
return get_data(remote_id, **kwargs)
2020-11-29 02:56:28 +00:00
@abstractmethod
def get_or_create_book(self, remote_id):
2021-04-26 16:15:42 +00:00
"""pull up a book record by whatever means possible"""
2020-11-29 02:56:28 +00:00
@abstractmethod
def parse_search_data(self, data):
2021-04-26 16:15:42 +00:00
"""turn the result json from a search into a list"""
2020-11-29 02:56:28 +00:00
@abstractmethod
def format_search_result(self, search_result):
2021-04-26 16:15:42 +00:00
"""create a SearchResult obj from json"""
2020-11-29 02:56:28 +00:00
2021-03-01 20:09:21 +00:00
@abstractmethod
def parse_isbn_search_data(self, data):
2021-04-26 16:15:42 +00:00
"""turn the result json from a search into a list"""
2021-03-01 20:09:21 +00:00
@abstractmethod
def format_isbn_search_result(self, search_result):
2021-04-26 16:15:42 +00:00
"""create a SearchResult obj from json"""
2021-03-01 20:09:21 +00:00
2020-11-29 02:56:28 +00:00
class AbstractConnector(AbstractMinimalConnector):
2021-04-26 16:15:42 +00:00
"""generic book data connector"""
2021-03-08 16:49:10 +00:00
2020-11-29 02:56:28 +00:00
def __init__(self, identifier):
super().__init__(identifier)
# fields we want to look for in book data to copy over
# title we handle separately.
self.book_mappings = []
def get_or_create_book(self, remote_id):
2021-04-26 16:15:42 +00:00
"""translate arbitrary json into an Activitypub dataclass"""
# first, check if we have the origin_id saved
2021-03-08 16:49:10 +00:00
existing = models.Edition.find_existing_by_remote_id(
remote_id
) or models.Work.find_existing_by_remote_id(remote_id)
if existing:
if hasattr(existing, "default_edition"):
return existing.default_edition
return existing
2021-12-05 20:37:19 +00:00
# load the json data from the remote data source
data = self.get_book_data(remote_id)
2020-05-10 19:56:59 +00:00
if self.is_work_data(data):
try:
edition_data = self.get_edition_from_work_data(data)
except (KeyError, ConnectorException):
2020-05-10 19:56:59 +00:00
# hack: re-use the work data as the edition data
# this is why remote ids aren't necessarily unique
2020-12-20 00:14:05 +00:00
edition_data = data
2021-04-30 22:48:52 +00:00
work_data = data
2020-05-10 19:56:59 +00:00
else:
2020-12-20 00:14:05 +00:00
edition_data = data
try:
work_data = self.get_work_from_edition_data(data)
2021-06-18 21:12:56 +00:00
except (KeyError, ConnectorException) as err:
logger.info(err)
2021-04-30 22:48:52 +00:00
work_data = data
if not work_data or not edition_data:
2021-09-18 18:32:00 +00:00
raise ConnectorException(f"Unable to load book data: {remote_id}")
with transaction.atomic():
# create activitypub object
2021-04-30 22:48:52 +00:00
work_activity = activitypub.Work(
**dict_from_mappings(work_data, self.book_mappings)
)
# this will dedupe automatically
work = work_activity.to_model(model=models.Work, overwrite=False)
2021-04-30 22:48:52 +00:00
for author in self.get_authors_from_data(work_data):
work.authors.add(author)
edition = self.create_edition_from_data(work, edition_data)
load_more_data.delay(self.connector.id, work.id)
return edition
2021-04-07 00:46:06 +00:00
def get_book_data(self, remote_id): # pylint: disable=no-self-use
"""this allows connectors to override the default behavior"""
return get_data(remote_id)
2021-12-05 20:37:19 +00:00
def create_edition_from_data(self, work, edition_data, instance=None):
2021-04-26 16:15:42 +00:00
"""if we already have the work, we're ready"""
2020-12-20 00:14:05 +00:00
mapped_data = dict_from_mappings(edition_data, self.book_mappings)
2021-03-08 16:49:10 +00:00
mapped_data["work"] = work.remote_id
2020-12-20 00:14:05 +00:00
edition_activity = activitypub.Edition(**mapped_data)
2021-12-05 20:37:19 +00:00
edition = edition_activity.to_model(
model=models.Edition, overwrite=False, instance=instance
)
# if we're updating an existing instance, we don't need to load authors
if instance:
return edition
if not edition.connector:
edition.connector = self.connector
edition.save(broadcast=False, update_fields=["connector"])
2020-12-20 00:14:05 +00:00
for author in self.get_authors_from_data(edition_data):
edition.authors.add(author)
2021-12-05 20:37:19 +00:00
# use the authors from the work if none are found for the edition
2020-12-20 00:14:05 +00:00
if not edition.authors.exists() and work.authors.exists():
edition.authors.set(work.authors.all())
return edition
2020-05-10 19:56:59 +00:00
2021-12-05 20:37:19 +00:00
def get_or_create_author(self, remote_id, instance=None):
2021-04-26 16:15:42 +00:00
"""load that author"""
2021-12-05 21:24:40 +00:00
if not instance:
existing = models.Author.find_existing_by_remote_id(remote_id)
if existing:
return existing
2020-12-19 23:20:31 +00:00
data = self.get_book_data(remote_id)
2020-12-19 23:20:31 +00:00
2020-12-20 00:14:05 +00:00
mapped_data = dict_from_mappings(data, self.author_mappings)
try:
activity = activitypub.Author(**mapped_data)
except activitypub.ActivitySerializerError:
return None
2020-12-19 23:20:31 +00:00
# this will dedupe
2021-12-05 20:37:19 +00:00
return activity.to_model(
model=models.Author, overwrite=False, instance=instance
)
2021-12-05 21:38:15 +00:00
def get_remote_id_from_model(self, obj):
"""given the data stored, how can we look this up"""
return getattr(obj, getattr(self, "generated_remote_link_field"))
2021-12-05 20:37:19 +00:00
def update_author_from_remote(self, obj):
"""load the remote data from this connector and add it to an existing author"""
2021-12-05 21:38:15 +00:00
remote_id = self.get_remote_id_from_model(obj)
2021-12-05 20:37:19 +00:00
return self.get_or_create_author(remote_id, instance=obj)
def update_book_from_remote(self, obj):
"""load the remote data from this connector and add it to an existing book"""
2021-12-05 21:38:15 +00:00
remote_id = self.get_remote_id_from_model(obj)
2021-12-05 20:37:19 +00:00
data = self.get_book_data(remote_id)
return self.create_edition_from_data(obj.parent_work, data, instance=obj)
2020-05-10 19:56:59 +00:00
@abstractmethod
def is_work_data(self, data):
2021-04-26 16:15:42 +00:00
"""differentiate works and editions"""
2020-05-10 19:56:59 +00:00
@abstractmethod
def get_edition_from_work_data(self, data):
2021-04-26 16:15:42 +00:00
"""every work needs at least one edition"""
2020-05-10 19:56:59 +00:00
@abstractmethod
def get_work_from_edition_data(self, data):
2021-04-26 16:15:42 +00:00
"""every edition needs a work"""
2020-05-09 19:53:55 +00:00
@abstractmethod
def get_authors_from_data(self, data):
2021-04-26 16:15:42 +00:00
"""load author data"""
2020-04-02 05:11:31 +00:00
@abstractmethod
def expand_book_data(self, book):
2021-04-26 16:15:42 +00:00
"""get more info on a book"""
2020-04-02 05:11:31 +00:00
2020-12-20 00:14:05 +00:00
def dict_from_mappings(data, mappings):
2021-03-08 16:49:10 +00:00
"""create a dict in Activitypub format, using mappings supplies by
the subclass"""
2020-12-19 23:20:31 +00:00
result = {}
for mapping in mappings:
# sometimes there are multiple mappings for one field, don't
# overwrite earlier writes in that case
if mapping.local_field in result and result[mapping.local_field]:
continue
2020-12-19 23:20:31 +00:00
result[mapping.local_field] = mapping.get_value(data)
return result
2020-03-30 00:40:51 +00:00
2021-06-17 19:34:54 +00:00
def get_data(url, params=None, timeout=10):
2021-04-26 16:15:42 +00:00
"""wrapper for request.get"""
# check if the url is blocked
2022-02-03 23:11:01 +00:00
raise_not_valid_url(url)
if models.FederatedServer.is_blocked(url):
2021-09-18 18:33:43 +00:00
raise ConnectorException(f"Attempting to load data from blocked url: {url}")
try:
resp = requests.get(
url,
params=params,
headers={ # pylint: disable=line-too-long
"Accept": (
2021-12-14 21:47:09 +00:00
'application/json, application/activity+json, application/ld+json; profile="https://www.w3.org/ns/activitystreams"; charset=utf-8'
),
2021-03-08 16:49:10 +00:00
"User-Agent": settings.USER_AGENT,
},
2021-06-17 19:34:54 +00:00
timeout=timeout,
)
2021-06-20 16:23:57 +00:00
except RequestException as err:
logger.info(err)
2021-12-14 21:47:09 +00:00
raise ConnectorException(err)
if not resp.ok:
2021-12-14 22:19:27 +00:00
raise ConnectorException()
try:
data = resp.json()
2021-06-18 21:12:56 +00:00
except ValueError as err:
logger.info(err)
2021-12-14 21:47:09 +00:00
raise ConnectorException(err)
return data
2021-06-17 19:34:54 +00:00
def get_image(url, timeout=10):
2021-04-26 16:15:42 +00:00
"""wrapper for requesting an image"""
2022-02-03 23:11:01 +00:00
raise_not_valid_url(url)
2020-11-29 17:40:15 +00:00
try:
resp = requests.get(
url,
headers={
2021-03-08 16:49:10 +00:00
"User-Agent": settings.USER_AGENT,
},
2021-06-17 19:34:54 +00:00
timeout=timeout,
)
2021-06-20 16:23:57 +00:00
except RequestException as err:
logger.info(err)
2022-02-02 15:09:35 +00:00
return None, None
2022-02-02 05:18:25 +00:00
2020-11-29 17:40:15 +00:00
if not resp.ok:
2022-02-02 15:09:35 +00:00
return None, None
2022-02-02 05:18:25 +00:00
image_content = ContentFile(resp.content)
extension = imghdr.what(None, image_content.read())
if not extension:
logger.info("File requested was not an image: %s", url)
2022-02-02 15:09:35 +00:00
return None, None
2022-02-02 05:18:25 +00:00
return image_content, extension
2020-11-29 17:40:15 +00:00
2022-02-03 23:11:01 +00:00
def raise_not_valid_url(url):
"""do some basic reality checks on the url"""
parsed = urlparse(url)
if not parsed.scheme in ["http", "https"]:
raise ConnectorException("Invalid scheme: ", url)
try:
ipaddress.ip_address(parsed.netloc)
raise ConnectorException("Provided url is an IP address: ", url)
except ValueError:
# it's not an IP address, which is good
pass
2020-09-21 17:25:26 +00:00
class Mapping:
2021-04-26 16:15:42 +00:00
"""associate a local database field with a field in an external dataset"""
2021-03-08 16:49:10 +00:00
def __init__(self, local_field, remote_field=None, formatter=None):
2021-04-07 00:46:06 +00:00
noop = lambda x: x
2020-05-10 23:41:24 +00:00
self.local_field = local_field
self.remote_field = remote_field or local_field
self.formatter = formatter or noop
def get_value(self, data):
2021-04-26 16:15:42 +00:00
"""pull a field from incoming json and return the formatted version"""
value = data.get(self.remote_field)
2020-12-20 00:14:05 +00:00
if not value:
return None
try:
2021-04-07 00:46:06 +00:00
return self.formatter(value)
2021-03-08 16:49:10 +00:00
except: # pylint: disable=bare-except
2020-12-20 00:14:05 +00:00
return None
2021-09-29 19:21:19 +00:00
2021-09-29 19:38:31 +00:00
2021-09-29 19:21:19 +00:00
def infer_physical_format(format_text):
2021-09-29 19:38:31 +00:00
"""try to figure out what the standardized format is from the free value"""
2021-09-29 19:21:19 +00:00
format_text = format_text.lower()
if format_text in format_mappings:
# try a direct match
return format_mappings[format_text]
2021-09-29 19:42:28 +00:00
# failing that, try substring
matches = [v for k, v in format_mappings.items() if k in format_text]
if not matches:
return None
return matches[0]
2021-09-29 19:38:31 +00:00
def unique_physical_format(format_text):
2021-09-29 19:38:31 +00:00
"""only store the format if it isn't diretly in the format mappings"""
format_text = format_text.lower()
if format_text in format_mappings:
# try a direct match, so saving this would be redundant
return None
return format_text