Merge branch 'main' into production

This commit is contained in:
Mouse Reeve 2021-04-26 14:07:57 -07:00
commit 3d2a56090c
161 changed files with 11780 additions and 4491 deletions

View file

@ -67,10 +67,12 @@ def search(query, min_confidence=0.1):
return results
def local_search(query, min_confidence=0.1, raw=False):
def local_search(query, min_confidence=0.1, raw=False, filters=None):
"""only look at local search results"""
connector = load_connector(models.Connector.objects.get(local=True))
return connector.search(query, min_confidence=min_confidence, raw=raw)
return connector.search(
query, min_confidence=min_confidence, raw=raw, filters=filters
)
def isbn_local_search(query, raw=False):

View file

@ -13,15 +13,16 @@ class Connector(AbstractConnector):
"""instantiate a connector"""
# pylint: disable=arguments-differ
def search(self, query, min_confidence=0.1, raw=False):
def search(self, query, min_confidence=0.1, raw=False, filters=None):
"""search your local database"""
filters = filters or []
if not query:
return []
# first, try searching unqiue identifiers
results = search_identifiers(query)
results = search_identifiers(query, *filters)
if not results:
# then try searching title/author
results = search_title_author(query, min_confidence)
results = search_title_author(query, min_confidence, *filters)
search_results = []
for result in results:
if raw:
@ -98,15 +99,15 @@ class Connector(AbstractConnector):
pass
def search_identifiers(query):
def search_identifiers(query, *filters):
"""tries remote_id, isbn; defined as dedupe fields on the model"""
filters = [
or_filters = [
{f.name: query}
for f in models.Edition._meta.get_fields()
if hasattr(f, "deduplication_field") and f.deduplication_field
]
results = models.Edition.objects.filter(
reduce(operator.or_, (Q(**f) for f in filters))
*filters, reduce(operator.or_, (Q(**f) for f in or_filters))
).distinct()
# when there are multiple editions of the same work, pick the default.
@ -114,7 +115,7 @@ def search_identifiers(query):
return results.filter(parent_work__default_edition__id=F("id")) or results
def search_title_author(query, min_confidence):
def search_title_author(query, min_confidence, *filters):
"""searches for title and author"""
vector = (
SearchVector("title", weight="A")
@ -126,7 +127,7 @@ def search_title_author(query, min_confidence):
results = (
models.Edition.objects.annotate(search=vector)
.annotate(rank=SearchRank(vector, query))
.filter(rank__gt=min_confidence)
.filter(*filters, rank__gt=min_confidence)
.order_by("-rank")
)

View file

@ -116,6 +116,10 @@ def handle_imported_book(source, user, item, include_reviews, privacy):
read.save()
if include_reviews and (item.rating or item.review):
# we don't know the publication date of the review,
# but "now" is a bad guess
published_date_guess = item.date_read or item.date_added
if item.review:
review_title = (
"Review of {!r} on {!r}".format(
item.book.title,
@ -124,10 +128,6 @@ def handle_imported_book(source, user, item, include_reviews, privacy):
if item.review
else ""
)
# we don't know the publication date of the review,
# but "now" is a bad guess
published_date_guess = item.date_read or item.date_added
models.Review.objects.create(
user=user,
book=item.book,
@ -137,3 +137,12 @@ def handle_imported_book(source, user, item, include_reviews, privacy):
published_date=published_date_guess,
privacy=privacy,
)
else:
# just a rating
models.ReviewRating.objects.create(
user=user,
book=item.book,
rating=item.rating,
published_date=published_date_guess,
privacy=privacy,
)

View file

@ -167,7 +167,7 @@ class Work(OrderedCollectionPageMixin, Book):
return self.to_ordered_collection(
self.editions.order_by("-edition_rank").all(),
remote_id="%s/editions" % self.remote_id,
**kwargs
**kwargs,
)
activity_serializer = activitypub.Work

View file

@ -13,6 +13,16 @@
<div class="columns mt-3">
<section class="column is-three-quarters">
{% if request.GET.updated %}
<div class="notification is-primary">
{% if list.curation != "open" and request.user != list.user %}
{% trans "You successfully suggested a book for this list!" %}
{% else %}
{% trans "You successfully added a book to this list!" %}
{% endif %}
</div>
{% endif %}
{% if not items.object_list.exists %}
<p>{% trans "This list is currently empty" %}</p>
{% else %}
@ -116,7 +126,7 @@
</div>
<div class="column">
<p>{% include 'snippets/book_titleby.html' with book=book %}</p>
<form name="add-book" method="post" action="{% url 'list-add-book' %}">
<form name="add-book" method="post" action="{% url 'list-add-book' %}{% if query %}?q={{ query }}{% endif %}">
{% csrf_token %}
<input type="hidden" name="book" value="{{ book.id }}">
<input type="hidden" name="list" value="{{ list.id }}">

View file

@ -47,7 +47,7 @@
{% if status.book %}
{% if status.status_type == 'GeneratedNote' or status.status_type == 'Rating' %}
<a href="/book/{{ status.book.id }}">{{ status.book|title }}</a>{% if status.status_type == 'Rating' %}:
<a href="/book/{{ status.book.id }}">{{ status.book|title }}</a>{% if status.status_type == 'Rating' %}: {% include 'snippets/stars.html' with rating=status.rating %}
<span
itemprop="reviewRating"
itemscope
@ -71,7 +71,6 @@
<meta itemprop="bestRating" content="5">
</span>
{% include 'snippets/stars.html' with rating=status.rating %}
{% endif %}
{% else %}
{% include 'snippets/book_titleby.html' with book=status.book %}

View file

@ -29,4 +29,6 @@
<div>{% blocktrans with username=user.display_name %}{{ username }} has no followers{% endblocktrans %}</div>
{% endif %}
</div>
{% include 'snippets/pagination.html' with page=followers path=request.path %}
{% endblock %}

View file

@ -29,4 +29,6 @@
<div>{% blocktrans with username=user|username %}{{ username }} isn't following any users{% endblocktrans %}</div>
{% endif %}
</div>
{% include 'snippets/pagination.html' with page=following path=request.path %}
{% endblock %}

View file

@ -0,0 +1,5 @@
Book Id,Title,Author,Author l-f,Additional Authors,ISBN,ISBN13,My Rating,Average Rating,Publisher,Binding,Number of Pages,Year Published,Original Publication Year,Date Read,Date Added,Bookshelves,Bookshelves with positions,Exclusive Shelf,My Review,Spoiler,Private Notes,Read Count,Recommended For,Recommended By,Owned Copies,Original Purchase Date,Original Purchase Location,Condition,Condition Description,BCID
42036538,Gideon the Ninth (The Locked Tomb #1),Tamsyn Muir,"Muir, Tamsyn",,"=""1250313198""","=""9781250313195""",0,4.20,Tor,Hardcover,448,2019,2019,2020/10/25,2020/10/21,,,read,,,,1,,,0,,,,,
52691223,Subcutanean,Aaron A. Reed,"Reed, Aaron A.",,"=""""","=""""",0,4.45,,Paperback,232,2020,,2020/03/06,2020/03/05,,,read,,,,1,,,0,,,,,
28694510,Patisserie at Home,Mélanie Dupuis,"Dupuis, Mélanie",Anne Cazor,"=""0062445316""","=""9780062445315""",2,4.60,Harper Design,Hardcover,288,2016,,,2019/07/08,,,read,,,,2,,,0,,,,,
1 Book Id Title Author Author l-f Additional Authors ISBN ISBN13 My Rating Average Rating Publisher Binding Number of Pages Year Published Original Publication Year Date Read Date Added Bookshelves Bookshelves with positions Exclusive Shelf My Review Spoiler Private Notes Read Count Recommended For Recommended By Owned Copies Original Purchase Date Original Purchase Location Condition Condition Description BCID
2 42036538 Gideon the Ninth (The Locked Tomb #1) Tamsyn Muir Muir, Tamsyn ="1250313198" ="9781250313195" 0 4.20 Tor Hardcover 448 2019 2019 2020/10/25 2020/10/21 read 1 0
3 52691223 Subcutanean Aaron A. Reed Reed, Aaron A. ="" ="" 0 4.45 Paperback 232 2020 2020/03/06 2020/03/05 read 1 0
4 28694510 Patisserie at Home Mélanie Dupuis Dupuis, Mélanie Anne Cazor ="0062445316" ="9780062445315" 2 4.60 Harper Design Hardcover 288 2016 2019/07/08 read 2 0

View file

@ -228,6 +228,32 @@ class GoodreadsImport(TestCase):
self.assertEqual(review.published_date.day, 8)
self.assertEqual(review.privacy, "unlisted")
@patch("bookwyrm.activitystreams.ActivityStream.add_status")
def test_handle_imported_book_rating(self, _):
"""goodreads rating import"""
import_job = models.ImportJob.objects.create(user=self.user)
datafile = pathlib.Path(__file__).parent.joinpath(
"../data/goodreads-rating.csv"
)
csv_file = open(datafile, "r")
entry = list(csv.DictReader(csv_file))[2]
entry = self.importer.parse_fields(entry)
import_item = models.ImportItem.objects.create(
job_id=import_job.id, index=0, data=entry, book=self.book
)
with patch("bookwyrm.models.activitypub_mixin.broadcast_task.delay"):
handle_imported_book(
self.importer.service, self.user, import_item, True, "unlisted"
)
review = models.ReviewRating.objects.get(book=self.book, user=self.user)
self.assertIsInstance(review, models.ReviewRating)
self.assertEqual(review.rating, 2)
self.assertEqual(review.published_date.year, 2019)
self.assertEqual(review.published_date.month, 7)
self.assertEqual(review.published_date.day, 8)
self.assertEqual(review.privacy, "unlisted")
def test_handle_imported_book_reviews_disabled(self):
"""goodreads review import"""
import_job = models.ImportJob.objects.create(user=self.user)

View file

@ -1,5 +1,6 @@
""" book list views"""
from typing import Optional
from urllib.parse import urlencode
from django.contrib.auth.decorators import login_required
from django.core.paginator import Paginator
@ -9,6 +10,7 @@ from django.db.models.functions import Coalesce
from django.http import HttpResponseNotFound, HttpResponseBadRequest, HttpResponse
from django.shortcuts import get_object_or_404, redirect
from django.template.response import TemplateResponse
from django.urls import reverse
from django.utils.decorators import method_decorator
from django.views import View
from django.views.decorators.http import require_POST
@ -135,7 +137,11 @@ class List(View):
if query and request.user.is_authenticated:
# search for books
suggestions = connector_manager.local_search(query, raw=True)
suggestions = connector_manager.local_search(
query,
raw=True,
filters=[~Q(parent_work__editions__in=book_list.books.all())],
)
elif request.user.is_authenticated:
# just suggest whatever books are nearby
suggestions = request.user.shelfbook_set.filter(
@ -263,7 +269,10 @@ def add_book(request):
# if the book is already on the list, don't flip out
pass
return redirect("list", book_list.id)
path = reverse("list", args=[book_list.id])
params = request.GET.copy()
params["updated"] = True
return redirect("{:s}?{:s}".format(path, urlencode(params)))
@require_POST

View file

@ -16,7 +16,7 @@ class Notifications(View):
notifications = request.user.notification_set.all().order_by("-created_date")
unread = [n.id for n in notifications.filter(read=False)]
data = {
"notifications": notifications,
"notifications": notifications[:50],
"unread": unread,
}
notifications.update(read=True)

View file

@ -106,10 +106,11 @@ class Followers(View):
if is_api_request(request):
return ActivitypubResponse(user.to_followers_activity(**request.GET))
paginated = Paginator(user.followers.all(), PAGE_LENGTH)
data = {
"user": user,
"is_self": request.user.id == user.id,
"followers": user.followers.all(),
"followers": paginated.page(request.GET.get("page", 1)),
}
return TemplateResponse(request, "user/followers.html", data)
@ -131,10 +132,11 @@ class Following(View):
if is_api_request(request):
return ActivitypubResponse(user.to_following_activity(**request.GET))
paginated = Paginator(user.followers.all(), PAGE_LENGTH)
data = {
"user": user,
"is_self": request.user.id == user.id,
"following": user.following.all(),
"following": paginated.page(request.GET.get("page", 1)),
}
return TemplateResponse(request, "user/following.html", data)

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff