Load author data with fedireads connector

This commit is contained in:
Mouse Reeve 2020-05-09 12:09:40 -07:00
parent bb01834a31
commit 093945e7fb
9 changed files with 73 additions and 25 deletions

View file

@ -1,6 +1,6 @@
''' bring activitypub functions into the namespace '''
from .actor import get_actor
from .book import get_book
from .book import get_book, get_author
from .create import get_create, get_update
from .follow import get_following, get_followers
from .follow import get_follow_request, get_unfollow, get_accept, get_reject

View file

@ -36,7 +36,7 @@ def get_book(book, recursive=True):
'name': book.title,
'url': book.absolute_id,
'authors': [get_author(a) for a in book.authors.all()],
'authors': [a.absolute_id for a in book.authors.all()],
'first_published_date': book.first_published_date.isoformat() if \
book.first_published_date else None,
'published_date': book.published_date.isoformat() if \
@ -68,7 +68,21 @@ def get_book(book, recursive=True):
def get_author(author):
''' serialize an author '''
return {
'name': author.name,
fields = [
'name',
'born',
'died',
'aliases',
'bio'
'openlibrary_key',
'wikipedia_link',
]
activity = {
'@context': 'https://www.w3.org/ns/activitystreams',
'url': author.absolute_id,
'type': 'Person',
}
for field in fields:
if hasattr(author, field):
activity[field] = author.__getattribute__(field)
return activity

View file

@ -1,4 +1,6 @@
''' select and call a connector for whatever book task needs doing '''
from requests import HTTPError
import importlib
from urllib.parse import urlparse
@ -96,7 +98,10 @@ def search(query):
dedup_slug = lambda r: '%s/%s/%s' % (r.title, r.author, r.year)
result_index = set()
for connector in get_connectors():
result_set = connector.search(query)
try:
result_set = connector.search(query)
except HTTPError:
continue
result_set = [r for r in result_set \
if dedup_slug(r) not in result_index]

View file

@ -79,9 +79,30 @@ class AbstractConnector(ABC):
''' simple function to save data to a book '''
update_from_mappings(book, data, self.book_mappings)
book.save()
authors = self.get_authors_from_data(data)
for author in authors:
book.authors.add(author)
if authors:
book.author_text = ', '.join(a.name for a in authors)
book.save()
cover = self.get_cover_from_data(data)
if cover:
book.cover.save(*cover, save=True)
return book
@abstractmethod
def get_authors_from_data(self, data):
''' load author data '''
@abstractmethod
def get_cover_from_data(self, data):
''' load cover '''
@abstractmethod
def parse_search_data(self, data):
''' turn the result json from a search into a list '''

View file

@ -80,6 +80,18 @@ class Connector(AbstractConnector):
return edition
def get_cover_from_data(self, data):
return None
def get_authors_from_data(self, data):
authors = []
for author_url in data.get('authors', []):
authors.append(self.get_or_create_author(author_url))
return authors
def update_book(self, book, data=None):
''' add remote data to a local book '''
if not data:

View file

@ -108,22 +108,6 @@ class Connector(AbstractConnector):
return edition
def update_book_from_data(self, book, data):
''' updaet a book model instance from ol data '''
# populate the simple data fields
super().update_book_from_data(book, data)
authors = self.get_authors_from_data(data)
for author in authors:
book.authors.add(author)
if authors:
book.author_text = ', '.join(a.name for a in authors)
if data.get('covers'):
book.cover.save(*self.get_cover(data['covers'][0]), save=True)
return book
def update_book(self, book, data=None):
''' load new data '''
if not book.sync and not book.sync_cover:
@ -133,7 +117,7 @@ class Connector(AbstractConnector):
data = self.load_book_data(book.openlibrary_key)
if book.sync_cover and data.get('covers'):
book.cover.save(*self.get_cover(data['covers'][0]), save=True)
book.cover.save(*self.get_cover_from_data(data, save=True))
if book.sync:
book = self.update_book_from_data(book, data)
return book
@ -217,9 +201,12 @@ class Connector(AbstractConnector):
return author
def get_cover(self, cover_id):
def get_cover_from_data(self, data):
''' ask openlibrary for the cover '''
# TODO: get medium and small versions
if not data.get('covers'):
return None
cover_id = data.get('covers')[0]
image_name = '%s-M.jpg' % cover_id
url = '%s/b/id/%s' % (self.covers_url, image_name)
response = requests.get(url)

View file

@ -39,6 +39,12 @@ class Connector(AbstractConnector):
return search_results
def get_authors_from_data(self, data):
return None
def get_cover_from_data(self, data):
return None
def parse_search_data(self, data):
return data

View file

@ -63,7 +63,7 @@ urlpatterns = [
re_path(r'%s/edit/?$' % book_path, views.edit_book_page),
re_path(r'^editions/(?P<work_id>\d+)/?$', views.editions_page),
re_path(r'^author/(?P<author_identifier>[\w\-]+)/?$', views.author_page),
re_path(r'^author/(?P<author_id>[\w\-]+)(.json)?/?$', views.author_page),
re_path(r'^tag/(?P<tag_id>.+)/?$', views.tag_page),
re_path(r'^shelf/%s/(?P<shelf_identifier>[\w-]+)(.json)?/?$' % username_regex, views.shelf_page),
re_path(r'^shelf/%s/(?P<shelf_identifier>[\w-]+)(.json)?/?$' % localname_regex, views.shelf_page),

View file

@ -507,6 +507,9 @@ def author_page(request, author_id):
except ValueError:
return HttpResponseNotFound()
if is_api_request(request):
return JsonResponse(activitypub.get_author(author))
books = models.Work.objects.filter(authors=author)
data = {
'author': author,