2020-03-07 20:22:28 +00:00
|
|
|
''' openlibrary data connector '''
|
|
|
|
import re
|
|
|
|
import requests
|
|
|
|
|
2020-04-22 13:53:22 +00:00
|
|
|
from django.core.files.base import ContentFile
|
|
|
|
|
2020-09-21 15:10:37 +00:00
|
|
|
from bookwyrm import models
|
2020-05-10 23:41:24 +00:00
|
|
|
from .abstract_connector import AbstractConnector, SearchResult, Mapping
|
2020-10-01 02:57:25 +00:00
|
|
|
from .abstract_connector import ConnectorException
|
2020-05-09 19:39:58 +00:00
|
|
|
from .abstract_connector import get_date, get_data
|
2020-03-30 20:15:49 +00:00
|
|
|
from .openlibrary_languages import languages
|
2020-03-07 20:22:28 +00:00
|
|
|
|
|
|
|
|
2020-03-27 22:25:08 +00:00
|
|
|
class Connector(AbstractConnector):
|
2020-03-07 20:22:28 +00:00
|
|
|
''' instantiate a connector for OL '''
|
2020-03-27 23:36:52 +00:00
|
|
|
def __init__(self, identifier):
|
2020-05-10 19:56:59 +00:00
|
|
|
super().__init__(identifier)
|
2020-05-10 23:41:24 +00:00
|
|
|
|
2020-04-06 00:00:01 +00:00
|
|
|
get_first = lambda a: a[0]
|
2020-05-10 23:41:24 +00:00
|
|
|
self.key_mappings = [
|
|
|
|
Mapping('isbn_13', model=models.Edition, formatter=get_first),
|
|
|
|
Mapping('isbn_10', model=models.Edition, formatter=get_first),
|
|
|
|
Mapping('lccn', model=models.Work, formatter=get_first),
|
|
|
|
Mapping(
|
|
|
|
'oclc_number',
|
|
|
|
remote_field='oclc_numbers',
|
|
|
|
model=models.Edition,
|
|
|
|
formatter=get_first
|
|
|
|
),
|
|
|
|
Mapping(
|
|
|
|
'openlibrary_key',
|
|
|
|
remote_field='key',
|
|
|
|
formatter=get_openlibrary_key
|
|
|
|
),
|
|
|
|
Mapping('goodreads_key'),
|
|
|
|
Mapping('asin'),
|
|
|
|
]
|
|
|
|
|
|
|
|
self.book_mappings = self.key_mappings + [
|
|
|
|
Mapping('sort_title'),
|
|
|
|
Mapping('subtitle'),
|
|
|
|
Mapping('description', formatter=get_description),
|
|
|
|
Mapping('languages', formatter=get_languages),
|
|
|
|
Mapping('series', formatter=get_first),
|
|
|
|
Mapping('series_number'),
|
|
|
|
Mapping('subjects'),
|
|
|
|
Mapping('subject_places'),
|
|
|
|
Mapping(
|
|
|
|
'first_published_date',
|
|
|
|
remote_field='first_publish_date',
|
|
|
|
formatter=get_date
|
|
|
|
),
|
|
|
|
Mapping(
|
|
|
|
'published_date',
|
|
|
|
remote_field='publish_date',
|
|
|
|
formatter=get_date
|
|
|
|
),
|
|
|
|
Mapping(
|
|
|
|
'pages',
|
|
|
|
model=models.Edition,
|
|
|
|
remote_field='number_of_pages'
|
|
|
|
),
|
|
|
|
Mapping('physical_format', model=models.Edition),
|
|
|
|
Mapping('publishers'),
|
|
|
|
]
|
|
|
|
|
|
|
|
self.author_mappings = [
|
|
|
|
Mapping('born', remote_field='birth_date', formatter=get_date),
|
|
|
|
Mapping('died', remote_field='death_date', formatter=get_date),
|
|
|
|
Mapping('bio', formatter=get_description),
|
|
|
|
]
|
|
|
|
|
2020-03-07 20:22:28 +00:00
|
|
|
|
2020-10-31 00:04:10 +00:00
|
|
|
def get_remote_id_from_data(self, data):
|
|
|
|
try:
|
|
|
|
key = data['key']
|
|
|
|
except KeyError:
|
|
|
|
raise ConnectorException('Invalid book data')
|
|
|
|
return '%s/%s' % (self.books_url, key)
|
|
|
|
|
|
|
|
|
2020-05-10 19:56:59 +00:00
|
|
|
def is_work_data(self, data):
|
2020-05-10 21:12:03 +00:00
|
|
|
return bool(re.match(r'^[\/\w]+OL\d+W$', data['key']))
|
2020-03-07 20:22:28 +00:00
|
|
|
|
|
|
|
|
2020-05-10 19:56:59 +00:00
|
|
|
def get_edition_from_work_data(self, data):
|
|
|
|
try:
|
|
|
|
key = data['key']
|
|
|
|
except KeyError:
|
2020-09-30 17:27:40 +00:00
|
|
|
raise ConnectorException('Invalid book data')
|
2020-05-10 19:56:59 +00:00
|
|
|
url = '%s/%s/editions' % (self.books_url, key)
|
|
|
|
data = get_data(url)
|
|
|
|
return pick_default_edition(data['entries'])
|
2020-05-09 00:56:24 +00:00
|
|
|
|
|
|
|
|
2020-05-10 19:56:59 +00:00
|
|
|
def get_work_from_edition_date(self, data):
|
|
|
|
try:
|
|
|
|
key = data['works'][0]['key']
|
|
|
|
except (IndexError, KeyError):
|
2020-10-01 02:57:25 +00:00
|
|
|
raise ConnectorException('No work found for edition')
|
2020-05-10 19:56:59 +00:00
|
|
|
url = '%s/%s' % (self.books_url, key)
|
|
|
|
return get_data(url)
|
2020-04-06 00:00:01 +00:00
|
|
|
|
|
|
|
|
|
|
|
def get_authors_from_data(self, data):
|
|
|
|
''' parse author json and load or create authors '''
|
2020-03-14 04:10:53 +00:00
|
|
|
for author_blob in data.get('authors', []):
|
2020-03-07 20:22:28 +00:00
|
|
|
author_blob = author_blob.get('author', author_blob)
|
2020-05-10 19:56:59 +00:00
|
|
|
# this id is "/authors/OL1234567A" and we want just "OL1234567A"
|
2020-04-06 00:00:01 +00:00
|
|
|
author_id = author_blob['key'].split('/')[-1]
|
2020-05-10 19:56:59 +00:00
|
|
|
yield self.get_or_create_author(author_id)
|
2020-04-02 16:11:42 +00:00
|
|
|
|
2020-03-07 20:22:28 +00:00
|
|
|
|
2020-05-10 19:56:59 +00:00
|
|
|
def get_cover_from_data(self, data):
|
|
|
|
''' ask openlibrary for the cover '''
|
|
|
|
if not data.get('covers'):
|
|
|
|
return None
|
|
|
|
|
|
|
|
cover_id = data.get('covers')[0]
|
|
|
|
image_name = '%s-M.jpg' % cover_id
|
|
|
|
url = '%s/b/id/%s' % (self.covers_url, image_name)
|
|
|
|
response = requests.get(url)
|
|
|
|
if not response.ok:
|
|
|
|
response.raise_for_status()
|
|
|
|
image_content = ContentFile(response.content)
|
|
|
|
return [image_name, image_content]
|
|
|
|
|
|
|
|
|
|
|
|
def parse_search_data(self, data):
|
|
|
|
return data.get('docs')
|
|
|
|
|
|
|
|
|
2020-09-21 17:25:26 +00:00
|
|
|
def format_search_result(self, search_result):
|
2020-05-13 01:56:28 +00:00
|
|
|
# build the remote id from the openlibrary key
|
2020-09-21 17:25:26 +00:00
|
|
|
key = self.books_url + search_result['key']
|
|
|
|
author = search_result.get('author_name') or ['Unknown']
|
2020-05-10 19:56:59 +00:00
|
|
|
return SearchResult(
|
2020-10-29 22:29:23 +00:00
|
|
|
title=search_result.get('title'),
|
|
|
|
key=key,
|
|
|
|
author=', '.join(author),
|
|
|
|
year=search_result.get('first_publish_year'),
|
2020-05-10 19:56:59 +00:00
|
|
|
)
|
2020-04-06 00:00:01 +00:00
|
|
|
|
|
|
|
|
|
|
|
def load_edition_data(self, olkey):
|
|
|
|
''' query openlibrary for editions of a work '''
|
2020-05-09 19:39:58 +00:00
|
|
|
url = '%s/works/%s/editions.json' % (self.books_url, olkey)
|
|
|
|
return get_data(url)
|
2020-03-07 20:22:28 +00:00
|
|
|
|
|
|
|
|
2020-04-02 05:11:31 +00:00
|
|
|
def expand_book_data(self, book):
|
|
|
|
work = book
|
2020-10-01 02:43:42 +00:00
|
|
|
# go from the edition to the work, if necessary
|
2020-04-02 05:11:31 +00:00
|
|
|
if isinstance(book, models.Edition):
|
|
|
|
work = book.parent_work
|
|
|
|
|
2020-10-01 02:43:42 +00:00
|
|
|
# we can mass download edition data from OL to avoid repeatedly querying
|
2020-04-06 00:00:01 +00:00
|
|
|
edition_options = self.load_edition_data(work.openlibrary_key)
|
|
|
|
for edition_data in edition_options.get('entries'):
|
|
|
|
olkey = edition_data.get('key').split('/')[-1]
|
2020-10-01 02:43:42 +00:00
|
|
|
# make sure the edition isn't already in the database
|
2020-04-06 00:00:01 +00:00
|
|
|
if models.Edition.objects.filter(openlibrary_key=olkey).count():
|
|
|
|
continue
|
2020-10-01 02:43:42 +00:00
|
|
|
|
|
|
|
# creates and populates the book from the data
|
2020-04-06 00:00:01 +00:00
|
|
|
edition = self.create_book(olkey, edition_data, models.Edition)
|
2020-10-01 02:43:42 +00:00
|
|
|
# ensures that the edition is associated with the work
|
2020-04-06 00:00:01 +00:00
|
|
|
edition.parent_work = work
|
|
|
|
edition.save()
|
2020-10-01 02:43:42 +00:00
|
|
|
# get author data from the work if it's missing from the edition
|
2020-04-06 00:00:01 +00:00
|
|
|
if not edition.authors and work.authors:
|
|
|
|
edition.authors.set(work.authors.all())
|
2020-03-30 19:21:04 +00:00
|
|
|
|
|
|
|
|
2020-03-07 20:22:28 +00:00
|
|
|
def get_or_create_author(self, olkey):
|
|
|
|
''' load that author '''
|
|
|
|
if not re.match(r'^OL\d+A$', olkey):
|
|
|
|
raise ValueError('Invalid OpenLibrary author ID')
|
2020-11-12 21:16:26 +00:00
|
|
|
author = models.Author.objects.filter(openlibrary_key=olkey).first()
|
|
|
|
if author:
|
|
|
|
return author
|
2020-03-07 20:22:28 +00:00
|
|
|
|
2020-05-09 19:39:58 +00:00
|
|
|
url = '%s/authors/%s.json' % (self.base_url, olkey)
|
|
|
|
data = get_data(url)
|
2020-03-07 20:22:28 +00:00
|
|
|
|
|
|
|
author = models.Author(openlibrary_key=olkey)
|
2020-11-25 00:05:00 +00:00
|
|
|
author = self.update_from_mappings(author, data, self.author_mappings)
|
2020-04-01 01:58:13 +00:00
|
|
|
name = data.get('name')
|
2020-05-10 19:56:59 +00:00
|
|
|
# TODO this is making some BOLD assumption
|
2020-04-01 21:18:46 +00:00
|
|
|
if name:
|
|
|
|
author.last_name = name.split(' ')[-1]
|
|
|
|
author.first_name = ' '.join(name.split(' ')[:-1])
|
2020-03-07 20:22:28 +00:00
|
|
|
author.save()
|
|
|
|
|
|
|
|
return author
|
|
|
|
|
|
|
|
|
2020-03-28 04:28:52 +00:00
|
|
|
def get_description(description_blob):
|
|
|
|
''' descriptions can be a string or a dict '''
|
|
|
|
if isinstance(description_blob, dict):
|
|
|
|
return description_blob.get('value')
|
|
|
|
return description_blob
|
|
|
|
|
2020-03-30 20:15:49 +00:00
|
|
|
|
2020-05-10 23:41:24 +00:00
|
|
|
def get_openlibrary_key(key):
|
|
|
|
''' convert /books/OL27320736M into OL27320736M '''
|
|
|
|
return key.split('/')[-1]
|
|
|
|
|
|
|
|
|
2020-03-30 20:15:49 +00:00
|
|
|
def get_languages(language_blob):
|
|
|
|
''' /language/eng -> English '''
|
|
|
|
langs = []
|
|
|
|
for lang in language_blob:
|
|
|
|
langs.append(
|
|
|
|
languages.get(lang.get('key', ''), None)
|
|
|
|
)
|
|
|
|
return langs
|
|
|
|
|
|
|
|
|
2020-04-06 00:00:01 +00:00
|
|
|
def pick_default_edition(options):
|
|
|
|
''' favor physical copies with covers in english '''
|
2020-04-22 13:53:22 +00:00
|
|
|
if not options:
|
2020-04-06 00:00:01 +00:00
|
|
|
return None
|
|
|
|
if len(options) == 1:
|
|
|
|
return options[0]
|
|
|
|
|
|
|
|
options = [e for e in options if e.get('cover')] or options
|
|
|
|
options = [e for e in options if \
|
|
|
|
'/languages/eng' in str(e.get('languages'))] or options
|
|
|
|
formats = ['paperback', 'hardcover', 'mass market paperback']
|
|
|
|
options = [e for e in options if \
|
|
|
|
str(e.get('physical_format')).lower() in formats] or options
|
|
|
|
options = [e for e in options if e.get('isbn_13')] or options
|
|
|
|
options = [e for e in options if e.get('ocaid')] or options
|
|
|
|
return options[0]
|