mirror of
https://github.com/bookwyrm-social/bookwyrm.git
synced 2024-12-24 00:50:35 +00:00
update user export file to use ActivityPub objects where possible. (#3109)
* add more context to user export page * fix BookData fields wrong for files * use to_activity and to_model where possible * fixes for import and export - use AP JSON where possible - minor template wording updates * import fixes and updates tests * minor cleanup * remove todo for mastodon
This commit is contained in:
parent
0a5e1048ce
commit
891b72c79c
10 changed files with 800 additions and 534 deletions
|
@ -22,8 +22,6 @@ class BookData(ActivityObject):
|
||||||
aasin: Optional[str] = None
|
aasin: Optional[str] = None
|
||||||
isfdb: Optional[str] = None
|
isfdb: Optional[str] = None
|
||||||
lastEditedBy: Optional[str] = None
|
lastEditedBy: Optional[str] = None
|
||||||
links: list[str] = field(default_factory=list)
|
|
||||||
fileLinks: list[str] = field(default_factory=list)
|
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=invalid-name
|
# pylint: disable=invalid-name
|
||||||
|
@ -45,6 +43,8 @@ class Book(BookData):
|
||||||
firstPublishedDate: str = ""
|
firstPublishedDate: str = ""
|
||||||
publishedDate: str = ""
|
publishedDate: str = ""
|
||||||
|
|
||||||
|
fileLinks: list[str] = field(default_factory=list)
|
||||||
|
|
||||||
cover: Optional[Document] = None
|
cover: Optional[Document] = None
|
||||||
type: str = "Book"
|
type: str = "Book"
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
"""Export user account to tar.gz file for import into another Bookwyrm instance"""
|
"""Export user account to tar.gz file for import into another Bookwyrm instance"""
|
||||||
|
|
||||||
|
import dataclasses
|
||||||
import logging
|
import logging
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
|
@ -8,12 +9,11 @@ from django.db.models import Q
|
||||||
from django.core.serializers.json import DjangoJSONEncoder
|
from django.core.serializers.json import DjangoJSONEncoder
|
||||||
from django.core.files.base import ContentFile
|
from django.core.files.base import ContentFile
|
||||||
|
|
||||||
from bookwyrm.models import AnnualGoal, ReadThrough, ShelfBook, Shelf, List, ListItem
|
from bookwyrm.models import AnnualGoal, ReadThrough, ShelfBook, List, ListItem
|
||||||
from bookwyrm.models import Review, Comment, Quotation
|
from bookwyrm.models import Review, Comment, Quotation
|
||||||
from bookwyrm.models import Edition, Book
|
from bookwyrm.models import Edition
|
||||||
from bookwyrm.models import UserFollows, User, UserBlocks
|
from bookwyrm.models import UserFollows, User, UserBlocks
|
||||||
from bookwyrm.models.job import ParentJob, ParentTask
|
from bookwyrm.models.job import ParentJob, ParentTask
|
||||||
from bookwyrm.settings import DOMAIN
|
|
||||||
from bookwyrm.tasks import app, IMPORTS
|
from bookwyrm.tasks import app, IMPORTS
|
||||||
from bookwyrm.utils.tar import BookwyrmTarFile
|
from bookwyrm.utils.tar import BookwyrmTarFile
|
||||||
|
|
||||||
|
@ -63,7 +63,7 @@ def tar_export(json_data: str, user, file):
|
||||||
if getattr(user, "avatar", False):
|
if getattr(user, "avatar", False):
|
||||||
tar.add_image(user.avatar, filename="avatar")
|
tar.add_image(user.avatar, filename="avatar")
|
||||||
|
|
||||||
editions, books = get_books_for_user(user) # pylint: disable=unused-variable
|
editions = get_books_for_user(user)
|
||||||
for book in editions:
|
for book in editions:
|
||||||
if getattr(book, "cover", False):
|
if getattr(book, "cover", False):
|
||||||
tar.add_image(book.cover)
|
tar.add_image(book.cover)
|
||||||
|
@ -71,138 +71,162 @@ def tar_export(json_data: str, user, file):
|
||||||
file.close()
|
file.close()
|
||||||
|
|
||||||
|
|
||||||
def json_export(user): # pylint: disable=too-many-locals, too-many-statements
|
def json_export(
|
||||||
|
user,
|
||||||
|
): # pylint: disable=too-many-locals, too-many-statements, too-many-branches
|
||||||
"""Generate an export for a user"""
|
"""Generate an export for a user"""
|
||||||
# user
|
|
||||||
exported_user = {}
|
# User as AP object
|
||||||
|
exported_user = user.to_activity()
|
||||||
|
# I don't love this but it prevents a JSON encoding error
|
||||||
|
# when there is no user image
|
||||||
|
if isinstance(
|
||||||
|
exported_user["icon"],
|
||||||
|
dataclasses._MISSING_TYPE, # pylint: disable=protected-access
|
||||||
|
):
|
||||||
|
exported_user["icon"] = {}
|
||||||
|
else:
|
||||||
|
# change the URL to be relative to the JSON file
|
||||||
|
file_type = exported_user["icon"]["url"].rsplit(".", maxsplit=1)[-1]
|
||||||
|
filename = f"avatar.{file_type}"
|
||||||
|
exported_user["icon"]["url"] = filename
|
||||||
|
|
||||||
|
# Additional settings - can't be serialized as AP
|
||||||
vals = [
|
vals = [
|
||||||
"username",
|
|
||||||
"name",
|
|
||||||
"summary",
|
|
||||||
"manually_approves_followers",
|
|
||||||
"hide_follows",
|
|
||||||
"show_goal",
|
"show_goal",
|
||||||
"show_suggested_users",
|
|
||||||
"discoverable",
|
|
||||||
"preferred_timezone",
|
"preferred_timezone",
|
||||||
"default_post_privacy",
|
"default_post_privacy",
|
||||||
|
"show_suggested_users",
|
||||||
]
|
]
|
||||||
|
exported_user["settings"] = {}
|
||||||
for k in vals:
|
for k in vals:
|
||||||
exported_user[k] = getattr(user, k)
|
exported_user["settings"][k] = getattr(user, k)
|
||||||
|
|
||||||
if getattr(user, "avatar", False):
|
# Reading goals - can't be serialized as AP
|
||||||
exported_user["avatar"] = f'https://{DOMAIN}{getattr(user, "avatar").url}'
|
|
||||||
|
|
||||||
# reading goals
|
|
||||||
reading_goals = AnnualGoal.objects.filter(user=user).distinct()
|
reading_goals = AnnualGoal.objects.filter(user=user).distinct()
|
||||||
goals_list = []
|
exported_user["goals"] = []
|
||||||
# TODO: either error checking should be more sophisticated
|
for goal in reading_goals:
|
||||||
# or maybe we don't need this try/except
|
exported_user["goals"].append(
|
||||||
try:
|
{"goal": goal.goal, "year": goal.year, "privacy": goal.privacy}
|
||||||
for goal in reading_goals:
|
)
|
||||||
goals_list.append(
|
|
||||||
{"goal": goal.goal, "year": goal.year, "privacy": goal.privacy}
|
|
||||||
)
|
|
||||||
except Exception: # pylint: disable=broad-except
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
# Reading history - can't be serialized as AP
|
||||||
readthroughs = ReadThrough.objects.filter(user=user).distinct().values()
|
readthroughs = ReadThrough.objects.filter(user=user).distinct().values()
|
||||||
readthroughs = list(readthroughs)
|
readthroughs = list(readthroughs)
|
||||||
except Exception: # pylint: disable=broad-except
|
|
||||||
readthroughs = []
|
|
||||||
|
|
||||||
# books
|
# Books
|
||||||
editions, books = get_books_for_user(user)
|
editions = get_books_for_user(user)
|
||||||
final_books = []
|
exported_user["books"] = []
|
||||||
|
|
||||||
|
for edition in editions:
|
||||||
|
book = {}
|
||||||
|
book["work"] = edition.parent_work.to_activity()
|
||||||
|
book["edition"] = edition.to_activity()
|
||||||
|
|
||||||
|
if book["edition"].get("cover"):
|
||||||
|
# change the URL to be relative to the JSON file
|
||||||
|
filename = book["edition"]["cover"]["url"].rsplit("/", maxsplit=1)[-1]
|
||||||
|
book["edition"]["cover"]["url"] = f"covers/{filename}"
|
||||||
|
|
||||||
for book in books.values():
|
|
||||||
edition = editions.filter(id=book["id"])
|
|
||||||
book["edition"] = edition.values()[0]
|
|
||||||
# authors
|
# authors
|
||||||
book["authors"] = list(edition.first().authors.all().values())
|
book["authors"] = []
|
||||||
# readthroughs
|
for author in edition.authors.all():
|
||||||
|
book["authors"].append(author.to_activity())
|
||||||
|
|
||||||
|
# Shelves this book is on
|
||||||
|
# Every ShelfItem is this book so we don't other serializing
|
||||||
|
book["shelves"] = []
|
||||||
|
shelf_books = (
|
||||||
|
ShelfBook.objects.select_related("shelf")
|
||||||
|
.filter(user=user, book=edition)
|
||||||
|
.distinct()
|
||||||
|
)
|
||||||
|
|
||||||
|
for shelfbook in shelf_books:
|
||||||
|
book["shelves"].append(shelfbook.shelf.to_activity())
|
||||||
|
|
||||||
|
# Lists and ListItems
|
||||||
|
# ListItems include "notes" and "approved" so we need them
|
||||||
|
# even though we know it's this book
|
||||||
|
book["lists"] = []
|
||||||
|
list_items = ListItem.objects.filter(book=edition, user=user).distinct()
|
||||||
|
|
||||||
|
for item in list_items:
|
||||||
|
list_info = item.book_list.to_activity()
|
||||||
|
list_info[
|
||||||
|
"privacy"
|
||||||
|
] = item.book_list.privacy # this isn't serialized so we add it
|
||||||
|
list_info["list_item"] = item.to_activity()
|
||||||
|
book["lists"].append(list_info)
|
||||||
|
|
||||||
|
# Statuses
|
||||||
|
# Can't use select_subclasses here because
|
||||||
|
# we need to filter on the "book" value,
|
||||||
|
# which is not available on an ordinary Status
|
||||||
|
for status in ["comments", "quotations", "reviews"]:
|
||||||
|
book[status] = []
|
||||||
|
|
||||||
|
comments = Comment.objects.filter(user=user, book=edition).all()
|
||||||
|
for status in comments:
|
||||||
|
obj = status.to_activity()
|
||||||
|
obj["progress"] = status.progress
|
||||||
|
obj["progress_mode"] = status.progress_mode
|
||||||
|
book["comments"].append(obj)
|
||||||
|
|
||||||
|
quotes = Quotation.objects.filter(user=user, book=edition).all()
|
||||||
|
for status in quotes:
|
||||||
|
obj = status.to_activity()
|
||||||
|
obj["position"] = status.position
|
||||||
|
obj["endposition"] = status.endposition
|
||||||
|
obj["position_mode"] = status.position_mode
|
||||||
|
book["quotations"].append(obj)
|
||||||
|
|
||||||
|
reviews = Review.objects.filter(user=user, book=edition).all()
|
||||||
|
for status in reviews:
|
||||||
|
obj = status.to_activity()
|
||||||
|
book["reviews"].append(obj)
|
||||||
|
|
||||||
|
# readthroughs can't be serialized to activity
|
||||||
book_readthroughs = (
|
book_readthroughs = (
|
||||||
ReadThrough.objects.filter(user=user, book=book["id"]).distinct().values()
|
ReadThrough.objects.filter(user=user, book=edition).distinct().values()
|
||||||
)
|
)
|
||||||
book["readthroughs"] = list(book_readthroughs)
|
book["readthroughs"] = list(book_readthroughs)
|
||||||
# shelves
|
|
||||||
shelf_books = ShelfBook.objects.filter(user=user, book=book["id"]).distinct()
|
|
||||||
shelves_from_books = Shelf.objects.filter(shelfbook__in=shelf_books, user=user)
|
|
||||||
|
|
||||||
book["shelves"] = list(shelves_from_books.values())
|
|
||||||
book["shelf_books"] = {}
|
|
||||||
|
|
||||||
for shelf in shelves_from_books:
|
|
||||||
shelf_contents = ShelfBook.objects.filter(user=user, shelf=shelf).distinct()
|
|
||||||
|
|
||||||
book["shelf_books"][shelf.identifier] = list(shelf_contents.values())
|
|
||||||
|
|
||||||
# book lists
|
|
||||||
book_lists = List.objects.filter(books__in=[book["id"]], user=user).distinct()
|
|
||||||
book["lists"] = list(book_lists.values())
|
|
||||||
book["list_items"] = {}
|
|
||||||
for blist in book_lists:
|
|
||||||
list_items = ListItem.objects.filter(book_list=blist).distinct()
|
|
||||||
book["list_items"][blist.name] = list(list_items.values())
|
|
||||||
|
|
||||||
# reviews
|
|
||||||
reviews = Review.objects.filter(user=user, book=book["id"]).distinct()
|
|
||||||
|
|
||||||
book["reviews"] = list(reviews.values())
|
|
||||||
|
|
||||||
# comments
|
|
||||||
comments = Comment.objects.filter(user=user, book=book["id"]).distinct()
|
|
||||||
|
|
||||||
book["comments"] = list(comments.values())
|
|
||||||
|
|
||||||
# quotes
|
|
||||||
quotes = Quotation.objects.filter(user=user, book=book["id"]).distinct()
|
|
||||||
|
|
||||||
book["quotes"] = list(quotes.values())
|
|
||||||
|
|
||||||
# append everything
|
# append everything
|
||||||
final_books.append(book)
|
exported_user["books"].append(book)
|
||||||
|
|
||||||
# saved book lists
|
# saved book lists - just the remote id
|
||||||
saved_lists = List.objects.filter(id__in=user.saved_lists.all()).distinct()
|
saved_lists = List.objects.filter(id__in=user.saved_lists.all()).distinct()
|
||||||
saved_lists = [l.remote_id for l in saved_lists]
|
exported_user["saved_lists"] = [l.remote_id for l in saved_lists]
|
||||||
|
|
||||||
# follows
|
# follows - just the remote id
|
||||||
follows = UserFollows.objects.filter(user_subject=user).distinct()
|
follows = UserFollows.objects.filter(user_subject=user).distinct()
|
||||||
following = User.objects.filter(userfollows_user_object__in=follows).distinct()
|
following = User.objects.filter(userfollows_user_object__in=follows).distinct()
|
||||||
follows = [f.remote_id for f in following]
|
exported_user["follows"] = [f.remote_id for f in following]
|
||||||
|
|
||||||
# blocks
|
# blocks - just the remote id
|
||||||
blocks = UserBlocks.objects.filter(user_subject=user).distinct()
|
blocks = UserBlocks.objects.filter(user_subject=user).distinct()
|
||||||
blocking = User.objects.filter(userblocks_user_object__in=blocks).distinct()
|
blocking = User.objects.filter(userblocks_user_object__in=blocks).distinct()
|
||||||
|
|
||||||
blocks = [b.remote_id for b in blocking]
|
exported_user["blocks"] = [b.remote_id for b in blocking]
|
||||||
|
|
||||||
data = {
|
return DjangoJSONEncoder().encode(exported_user)
|
||||||
"user": exported_user,
|
|
||||||
"goals": goals_list,
|
|
||||||
"books": final_books,
|
|
||||||
"saved_lists": saved_lists,
|
|
||||||
"follows": follows,
|
|
||||||
"blocked_users": blocks,
|
|
||||||
}
|
|
||||||
|
|
||||||
return DjangoJSONEncoder().encode(data)
|
|
||||||
|
|
||||||
|
|
||||||
def get_books_for_user(user):
|
def get_books_for_user(user):
|
||||||
"""Get all the books and editions related to a user
|
"""Get all the books and editions related to a user"""
|
||||||
:returns: tuple of editions, books
|
|
||||||
"""
|
|
||||||
|
|
||||||
editions = Edition.objects.filter(
|
editions = (
|
||||||
Q(shelves__user=user)
|
Edition.objects.select_related("parent_work")
|
||||||
| Q(readthrough__user=user)
|
.filter(
|
||||||
| Q(review__user=user)
|
Q(shelves__user=user)
|
||||||
| Q(list__user=user)
|
| Q(readthrough__user=user)
|
||||||
| Q(comment__user=user)
|
| Q(review__user=user)
|
||||||
| Q(quotation__user=user)
|
| Q(list__user=user)
|
||||||
).distinct()
|
| Q(comment__user=user)
|
||||||
books = Book.objects.filter(id__in=editions).distinct()
|
| Q(quotation__user=user)
|
||||||
return editions, books
|
)
|
||||||
|
.distinct()
|
||||||
|
)
|
||||||
|
|
||||||
|
return editions
|
||||||
|
|
|
@ -1,13 +1,11 @@
|
||||||
"""Import a user from another Bookwyrm instance"""
|
"""Import a user from another Bookwyrm instance"""
|
||||||
|
|
||||||
from functools import reduce
|
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import operator
|
|
||||||
|
|
||||||
from django.db.models import FileField, JSONField, CharField
|
from django.db.models import FileField, JSONField, CharField
|
||||||
from django.db.models import Q
|
from django.utils import timezone
|
||||||
from django.utils.dateparse import parse_datetime
|
from django.utils.html import strip_tags
|
||||||
from django.contrib.postgres.fields import ArrayField as DjangoArrayField
|
from django.contrib.postgres.fields import ArrayField as DjangoArrayField
|
||||||
|
|
||||||
from bookwyrm import activitypub
|
from bookwyrm import activitypub
|
||||||
|
@ -47,9 +45,9 @@ def start_import_task(**kwargs):
|
||||||
job.import_data = json.loads(tar.read("archive.json").decode("utf-8"))
|
job.import_data = json.loads(tar.read("archive.json").decode("utf-8"))
|
||||||
|
|
||||||
if "include_user_profile" in job.required:
|
if "include_user_profile" in job.required:
|
||||||
update_user_profile(job.user, tar, job.import_data.get("user"))
|
update_user_profile(job.user, tar, job.import_data)
|
||||||
if "include_user_settings" in job.required:
|
if "include_user_settings" in job.required:
|
||||||
update_user_settings(job.user, job.import_data.get("user"))
|
update_user_settings(job.user, job.import_data)
|
||||||
if "include_goals" in job.required:
|
if "include_goals" in job.required:
|
||||||
update_goals(job.user, job.import_data.get("goals"))
|
update_goals(job.user, job.import_data.get("goals"))
|
||||||
if "include_saved_lists" in job.required:
|
if "include_saved_lists" in job.required:
|
||||||
|
@ -57,7 +55,7 @@ def start_import_task(**kwargs):
|
||||||
if "include_follows" in job.required:
|
if "include_follows" in job.required:
|
||||||
upsert_follows(job.user, job.import_data.get("follows"))
|
upsert_follows(job.user, job.import_data.get("follows"))
|
||||||
if "include_blocks" in job.required:
|
if "include_blocks" in job.required:
|
||||||
upsert_user_blocks(job.user, job.import_data.get("blocked_users"))
|
upsert_user_blocks(job.user, job.import_data.get("blocks"))
|
||||||
|
|
||||||
process_books(job, tar)
|
process_books(job, tar)
|
||||||
|
|
||||||
|
@ -70,10 +68,12 @@ def start_import_task(**kwargs):
|
||||||
|
|
||||||
|
|
||||||
def process_books(job, tar):
|
def process_books(job, tar):
|
||||||
"""process user import data related to books"""
|
"""
|
||||||
|
Process user import data related to books
|
||||||
|
We always import the books even if not assigning
|
||||||
|
them to shelves, lists etc
|
||||||
|
"""
|
||||||
|
|
||||||
# create the books. We need to merge Book and Edition instances
|
|
||||||
# and also check whether these books already exist in the DB
|
|
||||||
books = job.import_data.get("books")
|
books = job.import_data.get("books")
|
||||||
|
|
||||||
for data in books:
|
for data in books:
|
||||||
|
@ -85,308 +85,193 @@ def process_books(job, tar):
|
||||||
if "include_readthroughs" in job.required:
|
if "include_readthroughs" in job.required:
|
||||||
upsert_readthroughs(data.get("readthroughs"), job.user, book.id)
|
upsert_readthroughs(data.get("readthroughs"), job.user, book.id)
|
||||||
|
|
||||||
if "include_reviews" in job.required:
|
|
||||||
get_or_create_statuses(
|
|
||||||
job.user, models.Review, data.get("reviews"), book.id
|
|
||||||
)
|
|
||||||
|
|
||||||
if "include_comments" in job.required:
|
if "include_comments" in job.required:
|
||||||
get_or_create_statuses(
|
upsert_statuses(
|
||||||
job.user, models.Comment, data.get("comments"), book.id
|
job.user, models.Comment, data.get("comments"), book.remote_id
|
||||||
|
)
|
||||||
|
if "include_quotations" in job.required:
|
||||||
|
upsert_statuses(
|
||||||
|
job.user, models.Quotation, data.get("quotations"), book.remote_id
|
||||||
)
|
)
|
||||||
|
|
||||||
if "include_quotes" in job.required:
|
if "include_reviews" in job.required:
|
||||||
get_or_create_statuses(
|
upsert_statuses(
|
||||||
job.user, models.Quotation, data.get("quotes"), book.id
|
job.user, models.Review, data.get("reviews"), book.remote_id
|
||||||
)
|
)
|
||||||
|
|
||||||
if "include_lists" in job.required:
|
if "include_lists" in job.required:
|
||||||
upsert_lists(job.user, data.get("lists"), data.get("list_items"), book.id)
|
upsert_lists(job.user, data.get("lists"), book.id)
|
||||||
|
|
||||||
|
|
||||||
def get_or_create_edition(book_data, tar):
|
def get_or_create_edition(book_data, tar):
|
||||||
"""Take a JSON string of book and edition data,
|
"""Take a JSON string of work and edition data,
|
||||||
find or create the edition in the database and
|
find or create the edition and work in the database and
|
||||||
return an edition instance"""
|
return an edition instance"""
|
||||||
|
|
||||||
cover_path = book_data.get(
|
edition = book_data.get("edition")
|
||||||
"cover", None
|
existing = models.Edition.find_existing(edition)
|
||||||
) # we use this further down but need to assign a var before cleaning
|
|
||||||
|
|
||||||
clean_book = clean_values(book_data)
|
|
||||||
book = clean_book.copy() # don't mutate the original book data
|
|
||||||
|
|
||||||
# prefer edition values only if they are not null
|
|
||||||
edition = clean_values(book["edition"])
|
|
||||||
for key in edition.keys():
|
|
||||||
if key not in book.keys() or (
|
|
||||||
key in book.keys() and (edition[key] not in [None, ""])
|
|
||||||
):
|
|
||||||
book[key] = edition[key]
|
|
||||||
|
|
||||||
existing = find_existing(models.Edition, book)
|
|
||||||
if existing:
|
if existing:
|
||||||
return existing
|
return existing
|
||||||
|
|
||||||
# the book is not in the local database, so we have to do this the hard way
|
# make sure we have the authors in the local DB
|
||||||
local_authors = get_or_create_authors(book["authors"])
|
# replace the old author ids in the edition JSON
|
||||||
|
edition["authors"] = []
|
||||||
|
for author in book_data.get("authors"):
|
||||||
|
parsed_author = activitypub.parse(author)
|
||||||
|
instance = parsed_author.to_model(
|
||||||
|
model=models.Author, save=True, overwrite=True
|
||||||
|
)
|
||||||
|
|
||||||
# get rid of everything that's not strictly in a Book
|
edition["authors"].append(instance.remote_id)
|
||||||
# or is many-to-many so can't be set directly
|
|
||||||
associated_values = [
|
|
||||||
"edition",
|
|
||||||
"authors",
|
|
||||||
"readthroughs",
|
|
||||||
"shelves",
|
|
||||||
"shelf_books",
|
|
||||||
"lists",
|
|
||||||
"list_items",
|
|
||||||
"reviews",
|
|
||||||
"comments",
|
|
||||||
"quotes",
|
|
||||||
]
|
|
||||||
|
|
||||||
for val in associated_values:
|
# we will add the cover later from the tar
|
||||||
del book[val]
|
# don't try to load it from the old server
|
||||||
|
cover = edition.get("cover", {})
|
||||||
|
cover_path = cover.get("url", None)
|
||||||
|
edition["cover"] = {}
|
||||||
|
|
||||||
# now we can save the book as an Edition
|
# first we need the parent work to exist
|
||||||
new_book = models.Edition.objects.create(**book)
|
work = book_data.get("work")
|
||||||
new_book.authors.set(local_authors) # now we can add authors with set()
|
work["editions"] = []
|
||||||
|
parsed_work = activitypub.parse(work)
|
||||||
|
work_instance = parsed_work.to_model(model=models.Work, save=True, overwrite=True)
|
||||||
|
|
||||||
# get cover from original book_data because we lost it in clean_values
|
# now we have a work we can add it to the edition
|
||||||
|
# and create the edition model instance
|
||||||
|
edition["work"] = work_instance.remote_id
|
||||||
|
parsed_edition = activitypub.parse(edition)
|
||||||
|
book = parsed_edition.to_model(model=models.Edition, save=True, overwrite=True)
|
||||||
|
|
||||||
|
# set the cover image from the tar
|
||||||
if cover_path:
|
if cover_path:
|
||||||
tar.write_image_to_file(cover_path, new_book.cover)
|
tar.write_image_to_file(cover_path, book.cover)
|
||||||
|
|
||||||
# NOTE: clean_values removes "last_edited_by"
|
return book
|
||||||
# because it's a user ID from the old database
|
|
||||||
# if this is required, bookwyrm_export_job will
|
|
||||||
# need to bring in the user who edited it.
|
|
||||||
|
|
||||||
# create parent
|
|
||||||
work = models.Work.objects.create(title=book["title"])
|
|
||||||
work.authors.set(local_authors)
|
|
||||||
new_book.parent_work = work
|
|
||||||
|
|
||||||
new_book.save(broadcast=False)
|
|
||||||
return new_book
|
|
||||||
|
|
||||||
|
|
||||||
def clean_values(data):
|
|
||||||
"""clean values we don't want when creating new instances"""
|
|
||||||
|
|
||||||
values = [
|
|
||||||
"id",
|
|
||||||
"pk",
|
|
||||||
"remote_id",
|
|
||||||
"cover",
|
|
||||||
"preview_image",
|
|
||||||
"last_edited_by",
|
|
||||||
"last_edited_by_id",
|
|
||||||
"user",
|
|
||||||
"book_list",
|
|
||||||
"shelf_book",
|
|
||||||
"parent_work_id",
|
|
||||||
]
|
|
||||||
|
|
||||||
common = data.keys() & values
|
|
||||||
new_data = data
|
|
||||||
for val in common:
|
|
||||||
del new_data[val]
|
|
||||||
return new_data
|
|
||||||
|
|
||||||
|
|
||||||
def find_existing(cls, data):
|
|
||||||
"""Given a book or author, find any existing model instances"""
|
|
||||||
|
|
||||||
identifiers = [
|
|
||||||
"openlibrary_key",
|
|
||||||
"inventaire_id",
|
|
||||||
"librarything_key",
|
|
||||||
"goodreads_key",
|
|
||||||
"asin",
|
|
||||||
"isfdb",
|
|
||||||
"isbn_10",
|
|
||||||
"isbn_13",
|
|
||||||
"oclc_number",
|
|
||||||
"origin_id",
|
|
||||||
"viaf",
|
|
||||||
"wikipedia_link",
|
|
||||||
"isni",
|
|
||||||
"gutenberg_id",
|
|
||||||
]
|
|
||||||
|
|
||||||
match_fields = []
|
|
||||||
for i in identifiers:
|
|
||||||
if data.get(i) not in [None, ""]:
|
|
||||||
match_fields.append({i: data.get(i)})
|
|
||||||
|
|
||||||
if len(match_fields) > 0:
|
|
||||||
match = cls.objects.filter(reduce(operator.or_, (Q(**f) for f in match_fields)))
|
|
||||||
return match.first()
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def get_or_create_authors(data):
|
|
||||||
"""Take a JSON string of authors find or create the authors
|
|
||||||
in the database and return a list of author instances"""
|
|
||||||
|
|
||||||
authors = []
|
|
||||||
for author in data:
|
|
||||||
clean = clean_values(author)
|
|
||||||
existing = find_existing(models.Author, clean)
|
|
||||||
if existing:
|
|
||||||
authors.append(existing)
|
|
||||||
else:
|
|
||||||
new = models.Author.objects.create(**clean)
|
|
||||||
authors.append(new)
|
|
||||||
return authors
|
|
||||||
|
|
||||||
|
|
||||||
def upsert_readthroughs(data, user, book_id):
|
def upsert_readthroughs(data, user, book_id):
|
||||||
"""Take a JSON string of readthroughs, find or create the
|
"""Take a JSON string of readthroughs and
|
||||||
instances in the database and return a list of saved instances"""
|
find or create the instances in the database"""
|
||||||
|
|
||||||
for read_thru in data:
|
for read_through in data:
|
||||||
start_date = (
|
|
||||||
parse_datetime(read_thru["start_date"])
|
|
||||||
if read_thru["start_date"] is not None
|
|
||||||
else None
|
|
||||||
)
|
|
||||||
finish_date = (
|
|
||||||
parse_datetime(read_thru["finish_date"])
|
|
||||||
if read_thru["finish_date"] is not None
|
|
||||||
else None
|
|
||||||
)
|
|
||||||
stopped_date = (
|
|
||||||
parse_datetime(read_thru["stopped_date"])
|
|
||||||
if read_thru["stopped_date"] is not None
|
|
||||||
else None
|
|
||||||
)
|
|
||||||
readthrough = {
|
|
||||||
"user": user,
|
|
||||||
"book": models.Edition.objects.get(id=book_id),
|
|
||||||
"progress": read_thru["progress"],
|
|
||||||
"progress_mode": read_thru["progress_mode"],
|
|
||||||
"start_date": start_date,
|
|
||||||
"finish_date": finish_date,
|
|
||||||
"stopped_date": stopped_date,
|
|
||||||
"is_active": read_thru["is_active"],
|
|
||||||
}
|
|
||||||
|
|
||||||
existing = models.ReadThrough.objects.filter(**readthrough).exists()
|
obj = {}
|
||||||
|
keys = [
|
||||||
|
"progress_mode",
|
||||||
|
"start_date",
|
||||||
|
"finish_date",
|
||||||
|
"stopped_date",
|
||||||
|
"is_active",
|
||||||
|
]
|
||||||
|
for key in keys:
|
||||||
|
obj[key] = read_through[key]
|
||||||
|
obj["user_id"] = user.id
|
||||||
|
obj["book_id"] = book_id
|
||||||
|
|
||||||
|
existing = models.ReadThrough.objects.filter(**obj).first()
|
||||||
if not existing:
|
if not existing:
|
||||||
models.ReadThrough.objects.create(**readthrough)
|
models.ReadThrough.objects.create(**obj)
|
||||||
|
|
||||||
|
|
||||||
def get_or_create_statuses(user, cls, data, book_id):
|
def upsert_statuses(user, cls, data, book_remote_id):
|
||||||
"""Take a JSON string of a status and
|
"""Take a JSON string of a status and
|
||||||
find or create the instances in the database"""
|
find or create the instances in the database"""
|
||||||
|
|
||||||
for book_status in data:
|
for status in data:
|
||||||
|
|
||||||
keys = [
|
# update ids and remove replies
|
||||||
"content",
|
status["attributedTo"] = user.remote_id
|
||||||
"raw_content",
|
status["to"] = update_followers_address(user, status["to"])
|
||||||
"content_warning",
|
status["cc"] = update_followers_address(user, status["cc"])
|
||||||
"privacy",
|
status[
|
||||||
"sensitive",
|
"replies"
|
||||||
"published_date",
|
] = {} # this parses incorrectly but we can't set it without knowing the new id
|
||||||
"reading_status",
|
status["inReplyToBook"] = book_remote_id
|
||||||
"name",
|
|
||||||
"rating",
|
# save new status or do update it if it already exists
|
||||||
"quote",
|
parsed = activitypub.parse(status)
|
||||||
"raw_quote",
|
instance = parsed.to_model(model=cls, save=True, overwrite=True)
|
||||||
|
|
||||||
|
print(instance.id, instance.privacy)
|
||||||
|
|
||||||
|
for val in [
|
||||||
"progress",
|
"progress",
|
||||||
"progress_mode",
|
"progress_mode",
|
||||||
"position",
|
"position",
|
||||||
|
"endposition",
|
||||||
"position_mode",
|
"position_mode",
|
||||||
]
|
]:
|
||||||
common = book_status.keys() & keys
|
if status.get(val):
|
||||||
status = {k: book_status[k] for k in common}
|
print(val, status[val])
|
||||||
status["published_date"] = parse_datetime(book_status["published_date"])
|
instance.val = status[val]
|
||||||
if "rating" in common:
|
instance.save()
|
||||||
status["rating"] = float(book_status["rating"])
|
|
||||||
book = models.Edition.objects.get(id=book_id)
|
|
||||||
exists = cls.objects.filter(**status, book=book, user=user).exists()
|
|
||||||
if not exists:
|
|
||||||
cls.objects.create(**status, book=book, user=user)
|
|
||||||
|
|
||||||
|
|
||||||
def upsert_lists(user, lists, items, book_id):
|
def upsert_lists(user, lists, book_id):
|
||||||
"""Take a list and ListItems as JSON and
|
"""Take a list of objects each containing
|
||||||
create DB entries if they don't already exist"""
|
a list and list item as AP objects
|
||||||
|
|
||||||
|
Because we are creating new IDs we can't assume the id
|
||||||
|
will exist or be accurate, so we only use to_model for
|
||||||
|
adding new items after checking whether they exist .
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
book = models.Edition.objects.get(id=book_id)
|
book = models.Edition.objects.get(id=book_id)
|
||||||
|
|
||||||
for lst in lists:
|
for blist in lists:
|
||||||
book_list = models.List.objects.filter(name=lst["name"], user=user).first()
|
booklist = models.List.objects.filter(name=blist["name"], user=user).first()
|
||||||
if not book_list:
|
if not booklist:
|
||||||
book_list = models.List.objects.create(
|
|
||||||
|
blist["owner"] = user.remote_id
|
||||||
|
parsed = activitypub.parse(blist)
|
||||||
|
booklist = parsed.to_model(model=models.List, save=True, overwrite=True)
|
||||||
|
|
||||||
|
booklist.privacy = blist["privacy"]
|
||||||
|
booklist.save()
|
||||||
|
|
||||||
|
item = models.ListItem.objects.filter(book=book, book_list=booklist).exists()
|
||||||
|
if not item:
|
||||||
|
count = booklist.books.count()
|
||||||
|
models.ListItem.objects.create(
|
||||||
|
book=book,
|
||||||
|
book_list=booklist,
|
||||||
user=user,
|
user=user,
|
||||||
name=lst["name"],
|
notes=blist["list_item"]["notes"],
|
||||||
description=lst["description"],
|
approved=blist["list_item"]["approved"],
|
||||||
curation=lst["curation"],
|
order=count + 1,
|
||||||
privacy=lst["privacy"],
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# If the list exists but the ListItem doesn't don't try to add it
|
|
||||||
# with the same order as an existing item
|
|
||||||
count = models.ListItem.objects.filter(book_list=book_list).count()
|
|
||||||
|
|
||||||
for i in items[lst["name"]]:
|
|
||||||
if not models.ListItem.objects.filter(
|
|
||||||
book=book, book_list=book_list, user=user
|
|
||||||
).exists():
|
|
||||||
models.ListItem.objects.create(
|
|
||||||
book=book,
|
|
||||||
book_list=book_list,
|
|
||||||
user=user,
|
|
||||||
notes=i["notes"],
|
|
||||||
order=i["order"] + count,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def upsert_shelves(book, user, book_data):
|
def upsert_shelves(book, user, book_data):
|
||||||
"""Take shelf and ShelfBooks JSON objects and create
|
"""Take shelf JSON objects and create
|
||||||
DB entries if they don't already exist"""
|
DB entries if they don't already exist"""
|
||||||
|
|
||||||
shelves = book_data["shelves"]
|
shelves = book_data["shelves"]
|
||||||
|
|
||||||
for shelf in shelves:
|
for shelf in shelves:
|
||||||
|
|
||||||
book_shelf = models.Shelf.objects.filter(name=shelf["name"], user=user).first()
|
book_shelf = models.Shelf.objects.filter(name=shelf["name"], user=user).first()
|
||||||
|
|
||||||
if not book_shelf:
|
if not book_shelf:
|
||||||
book_shelf = models.Shelf.objects.create(
|
book_shelf = models.Shelf.objects.create(name=shelf["name"], user=user)
|
||||||
name=shelf["name"],
|
|
||||||
user=user,
|
# add the book as a ShelfBook if needed
|
||||||
identifier=shelf["identifier"],
|
if not models.ShelfBook.objects.filter(
|
||||||
description=shelf["description"],
|
book=book, shelf=book_shelf, user=user
|
||||||
editable=shelf["editable"],
|
).exists():
|
||||||
privacy=shelf["privacy"],
|
models.ShelfBook.objects.create(
|
||||||
|
book=book, shelf=book_shelf, user=user, shelved_date=timezone.now()
|
||||||
)
|
)
|
||||||
|
|
||||||
for shelfbook in book_data["shelf_books"][book_shelf.identifier]:
|
|
||||||
|
|
||||||
shelved_date = parse_datetime(shelfbook["shelved_date"])
|
|
||||||
|
|
||||||
if not models.ShelfBook.objects.filter(
|
|
||||||
book=book, shelf=book_shelf, user=user
|
|
||||||
).exists():
|
|
||||||
models.ShelfBook.objects.create(
|
|
||||||
book=book,
|
|
||||||
shelf=book_shelf,
|
|
||||||
user=user,
|
|
||||||
shelved_date=shelved_date,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def update_user_profile(user, tar, data):
|
def update_user_profile(user, tar, data):
|
||||||
"""update the user's profile from import data"""
|
"""update the user's profile from import data"""
|
||||||
name = data.get("name")
|
name = data.get("name", None)
|
||||||
username = data.get("username").split("@")[0]
|
username = data.get("preferredUsername")
|
||||||
user.name = name if name else username
|
user.name = name if name else username
|
||||||
user.summary = data.get("summary")
|
user.summary = strip_tags(data.get("summary", None))
|
||||||
user.save(update_fields=["name", "summary"])
|
user.save(update_fields=["name", "summary"])
|
||||||
|
if data["icon"].get("url"):
|
||||||
if data.get("avatar") is not None:
|
|
||||||
avatar_filename = next(filter(lambda n: n.startswith("avatar"), tar.getnames()))
|
avatar_filename = next(filter(lambda n: n.startswith("avatar"), tar.getnames()))
|
||||||
tar.write_image_to_file(avatar_filename, user.avatar)
|
tar.write_image_to_file(avatar_filename, user.avatar)
|
||||||
|
|
||||||
|
@ -394,18 +279,28 @@ def update_user_profile(user, tar, data):
|
||||||
def update_user_settings(user, data):
|
def update_user_settings(user, data):
|
||||||
"""update the user's settings from import data"""
|
"""update the user's settings from import data"""
|
||||||
|
|
||||||
update_fields = [
|
update_fields = ["manually_approves_followers", "hide_follows", "discoverable"]
|
||||||
"manually_approves_followers",
|
|
||||||
"hide_follows",
|
ap_fields = [
|
||||||
"show_goal",
|
("manuallyApprovesFollowers", "manually_approves_followers"),
|
||||||
"show_suggested_users",
|
("hideFollows", "hide_follows"),
|
||||||
"discoverable",
|
("discoverable", "discoverable"),
|
||||||
"preferred_timezone",
|
|
||||||
"default_post_privacy",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
for field in update_fields:
|
for (ap_field, bw_field) in ap_fields:
|
||||||
setattr(user, field, data[field])
|
setattr(user, bw_field, data[ap_field])
|
||||||
|
|
||||||
|
bw_fields = [
|
||||||
|
"show_goal",
|
||||||
|
"show_suggested_users",
|
||||||
|
"default_post_privacy",
|
||||||
|
"preferred_timezone",
|
||||||
|
]
|
||||||
|
|
||||||
|
for field in bw_fields:
|
||||||
|
update_fields.append(field)
|
||||||
|
setattr(user, field, data["settings"][field])
|
||||||
|
|
||||||
user.save(update_fields=update_fields)
|
user.save(update_fields=update_fields)
|
||||||
|
|
||||||
|
|
||||||
|
@ -421,7 +316,7 @@ def update_goals(user, data):
|
||||||
"""update the user's goals from import data"""
|
"""update the user's goals from import data"""
|
||||||
|
|
||||||
for goal in data:
|
for goal in data:
|
||||||
# edit the existing goal if there is one instead of making a new one
|
# edit the existing goal if there is one
|
||||||
existing = models.AnnualGoal.objects.filter(
|
existing = models.AnnualGoal.objects.filter(
|
||||||
year=goal["year"], user=user
|
year=goal["year"], user=user
|
||||||
).first()
|
).first()
|
||||||
|
@ -513,3 +408,14 @@ def upsert_user_blocks_task(job_id):
|
||||||
return upsert_user_blocks(
|
return upsert_user_blocks(
|
||||||
parent_job.user, parent_job.import_data.get("blocked_users")
|
parent_job.user, parent_job.import_data.get("blocked_users")
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def update_followers_address(user, field):
|
||||||
|
"""statuses to or cc followers need to have the followers
|
||||||
|
address updated to the new local user"""
|
||||||
|
|
||||||
|
for i, audience in enumerate(field):
|
||||||
|
if audience.rsplit("/")[-1] == "followers":
|
||||||
|
field[i] = user.followers_url
|
||||||
|
|
||||||
|
return field
|
||||||
|
|
|
@ -261,9 +261,7 @@ def notify_user_on_user_export_complete(
|
||||||
"""we exported your user details! aren't you proud of us"""
|
"""we exported your user details! aren't you proud of us"""
|
||||||
update_fields = update_fields or []
|
update_fields = update_fields or []
|
||||||
if not instance.complete or "complete" not in update_fields:
|
if not instance.complete or "complete" not in update_fields:
|
||||||
print("RETURNING", instance.status)
|
|
||||||
return
|
return
|
||||||
print("NOTIFYING")
|
|
||||||
Notification.objects.create(
|
Notification.objects.create(
|
||||||
user=instance.user,
|
user=instance.user,
|
||||||
notification_type=Notification.USER_EXPORT,
|
notification_type=Notification.USER_EXPORT,
|
||||||
|
|
|
@ -132,7 +132,7 @@
|
||||||
<input type="checkbox" name="include_reviews" checked> {% trans "Book reviews" %}
|
<input type="checkbox" name="include_reviews" checked> {% trans "Book reviews" %}
|
||||||
</label>
|
</label>
|
||||||
<label class="label">
|
<label class="label">
|
||||||
<input type="checkbox" name="include_quotes" checked> {% trans "Quotations" %}
|
<input type="checkbox" name="include_quotations" checked> {% trans "Quotations" %}
|
||||||
</label>
|
</label>
|
||||||
<label class="label">
|
<label class="label">
|
||||||
<input type="checkbox" name="include_comments" checked> {% trans "Comments about books" %}
|
<input type="checkbox" name="include_comments" checked> {% trans "Comments about books" %}
|
||||||
|
|
|
@ -35,6 +35,7 @@
|
||||||
<li>Direct messages</li>
|
<li>Direct messages</li>
|
||||||
<li>Replies to your statuses</li>
|
<li>Replies to your statuses</li>
|
||||||
<li>Groups</li>
|
<li>Groups</li>
|
||||||
|
<li>Favorites</li>
|
||||||
</ul>
|
</ul>
|
||||||
</div>
|
</div>
|
||||||
{% endblocktrans %}
|
{% endblocktrans %}
|
||||||
|
|
Binary file not shown.
399
bookwyrm/tests/data/user_import.json
Normal file
399
bookwyrm/tests/data/user_import.json
Normal file
|
@ -0,0 +1,399 @@
|
||||||
|
{
|
||||||
|
"id": "https://www.example.com/user/rat",
|
||||||
|
"type": "Person",
|
||||||
|
"preferredUsername": "rat",
|
||||||
|
"inbox": "https://www.example.com/user/rat/inbox",
|
||||||
|
"publicKey": {
|
||||||
|
"id": "https://www.example.com/user/rat/#main-key",
|
||||||
|
"owner": "https://www.example.com/user/rat",
|
||||||
|
"publicKeyPem": "-----BEGIN PUBLIC KEY-----\nzzzz\n-----END PUBLIC KEY-----"
|
||||||
|
},
|
||||||
|
"followers": "https://www.example.com/user/rat/followers",
|
||||||
|
"following": "https://www.example.com/user/rat/following",
|
||||||
|
"outbox": "https://www.example.com/user/rat/outbox",
|
||||||
|
"endpoints": {
|
||||||
|
"sharedInbox": "https://www.example.com/inbox"
|
||||||
|
},
|
||||||
|
"name": "Rat",
|
||||||
|
"summary": "<p>I love to make soup in Paris and eat pizza in New York</p>",
|
||||||
|
"icon": {
|
||||||
|
"type": "Document",
|
||||||
|
"url": "avatar.png",
|
||||||
|
"name": "avatar for rat",
|
||||||
|
"@context": "https://www.w3.org/ns/activitystreams"
|
||||||
|
},
|
||||||
|
"bookwyrmUser": true,
|
||||||
|
"manuallyApprovesFollowers": true,
|
||||||
|
"discoverable": false,
|
||||||
|
"hideFollows": true,
|
||||||
|
"alsoKnownAs": [],
|
||||||
|
"@context": [
|
||||||
|
"https://www.w3.org/ns/activitystreams",
|
||||||
|
"https://w3id.org/security/v1",
|
||||||
|
{
|
||||||
|
"manuallyApprovesFollowers": "as:manuallyApprovesFollowers",
|
||||||
|
"schema": "http://schema.org#",
|
||||||
|
"PropertyValue": "schema:PropertyValue",
|
||||||
|
"value": "schema:value",
|
||||||
|
"alsoKnownAs": {
|
||||||
|
"@id": "as:alsoKnownAs",
|
||||||
|
"@type": "@id"
|
||||||
|
},
|
||||||
|
"movedTo": {
|
||||||
|
"@id": "as:movedTo",
|
||||||
|
"@type": "@id"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"settings": {
|
||||||
|
"show_goal": false,
|
||||||
|
"preferred_timezone": "Australia/Adelaide",
|
||||||
|
"default_post_privacy": "followers",
|
||||||
|
"show_suggested_users": false
|
||||||
|
},
|
||||||
|
"goals": [
|
||||||
|
{
|
||||||
|
"goal": 12,
|
||||||
|
"year": 2023,
|
||||||
|
"privacy": "followers"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"books": [
|
||||||
|
{
|
||||||
|
"work": {
|
||||||
|
"id": "https://www.example.com/book/1",
|
||||||
|
"type": "Work",
|
||||||
|
"title": "Seeing Like a State",
|
||||||
|
"description": "<p>Examines how (sometimes quasi-) authoritarian high-modernist planning fails to deliver the goods, be they increased resources for the state or a better life for the people.</p>",
|
||||||
|
"languages": [ "English" ],
|
||||||
|
"series": "",
|
||||||
|
"seriesNumber": "",
|
||||||
|
"subjects": [],
|
||||||
|
"subjectPlaces": [],
|
||||||
|
"authors": [
|
||||||
|
"https://www.example.com/author/1"
|
||||||
|
],
|
||||||
|
"firstPublishedDate": "",
|
||||||
|
"publishedDate": "1998-03-30T00:00:00Z",
|
||||||
|
"fileLinks": [],
|
||||||
|
"lccn": "",
|
||||||
|
"editions": [
|
||||||
|
"https://www.example.com/book/2"
|
||||||
|
],
|
||||||
|
"@context": "https://www.w3.org/ns/activitystreams"
|
||||||
|
},
|
||||||
|
"edition": {
|
||||||
|
"id": "https://www.example.com/book/2",
|
||||||
|
"type": "Edition",
|
||||||
|
"openlibraryKey": "OL680025M",
|
||||||
|
"title": "Seeking Like A State",
|
||||||
|
"sortTitle": "seeing like a state",
|
||||||
|
"subtitle": "",
|
||||||
|
"description": "<p>Examines how (sometimes quasi-) authoritarian high-modernist planning fails to deliver the goods, be they increased resources for the state or a better life for the people.</p>",
|
||||||
|
"languages": ["English"],
|
||||||
|
"series": "",
|
||||||
|
"seriesNumber": "",
|
||||||
|
"subjects": [],
|
||||||
|
"subjectPlaces": [],
|
||||||
|
"authors": [
|
||||||
|
"https://www.example.com/author/1"
|
||||||
|
],
|
||||||
|
"firstPublishedDate": "",
|
||||||
|
"publishedDate": "",
|
||||||
|
"fileLinks": [],
|
||||||
|
"cover": {
|
||||||
|
"type": "Document",
|
||||||
|
"url": "covers/d273d638-191d-4ebf-b213-3c60dbf010fe.jpeg",
|
||||||
|
"name": "James C. Scott: Seeing like a state",
|
||||||
|
"@context": "https://www.w3.org/ns/activitystreams"
|
||||||
|
},
|
||||||
|
"work": "https://www.example.com/book/1",
|
||||||
|
"isbn10": "",
|
||||||
|
"isbn13": "9780300070163",
|
||||||
|
"oclcNumber": "",
|
||||||
|
"physicalFormat": "",
|
||||||
|
"physicalFormatDetail": "",
|
||||||
|
"publishers": [],
|
||||||
|
"editionRank": 4,
|
||||||
|
"@context": "https://www.w3.org/ns/activitystreams"
|
||||||
|
},
|
||||||
|
"authors": [
|
||||||
|
{
|
||||||
|
"id": "https://www.example.com/author/1",
|
||||||
|
"type": "Author",
|
||||||
|
"name": "James C. Scott",
|
||||||
|
"aliases": [
|
||||||
|
"James Campbell Scott",
|
||||||
|
"\u30b8\u30a7\u30fc\u30e0\u30ba\u30fbC. \u30b9\u30b3\u30c3\u30c8",
|
||||||
|
"\u30b8\u30a7\u30fc\u30e0\u30ba\u30fbC\u30fb\u30b9\u30b3\u30c3\u30c8",
|
||||||
|
"\u062c\u06cc\u0645\u0632 \u0633\u06cc. \u0627\u0633\u06a9\u0627\u062a",
|
||||||
|
"Jim Scott",
|
||||||
|
"\u062c\u064a\u0645\u0633 \u0633\u0643\u0648\u062a",
|
||||||
|
"James C. Scott",
|
||||||
|
"\u0414\u0436\u0435\u0439\u043c\u0441 \u0421\u043a\u043e\u0442\u0442",
|
||||||
|
"\u30b8\u30a7\u30fc\u30e0\u30b9\u30fbC \u30b9\u30b3\u30c3\u30c8",
|
||||||
|
"James Cameron Scott"
|
||||||
|
],
|
||||||
|
"bio": "<p>American political scientist and anthropologist</p>",
|
||||||
|
"wikipediaLink": "https://en.wikipedia.org/wiki/James_C._Scott",
|
||||||
|
"website": "",
|
||||||
|
"@context": "https://www.w3.org/ns/activitystreams"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"shelves": [
|
||||||
|
{
|
||||||
|
"id": "https://www.example.com/user/rat/books/read",
|
||||||
|
"type": "Shelf",
|
||||||
|
"totalItems": 1,
|
||||||
|
"first": "https://www.example.com/user/rat/books/read?page=1",
|
||||||
|
"last": "https://www.example.com/user/rat/books/read?page=1",
|
||||||
|
"name": "Read",
|
||||||
|
"owner": "https://www.example.com/user/rat",
|
||||||
|
"to": [
|
||||||
|
"https://www.w3.org/ns/activitystreams#Public"
|
||||||
|
],
|
||||||
|
"cc": [
|
||||||
|
"https://www.example.com/user/rat/followers"
|
||||||
|
],
|
||||||
|
"@context": "https://www.w3.org/ns/activitystreams"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "https://www.example.com/user/rat/books/to-read",
|
||||||
|
"type": "Shelf",
|
||||||
|
"totalItems": 1,
|
||||||
|
"first": "https://www.example.com/user/rat/books/to-read?page=1",
|
||||||
|
"last": "https://www.example.com/user/rat/books/to-read?page=1",
|
||||||
|
"name": "To Read",
|
||||||
|
"owner": "https://www.example.com/user/rat",
|
||||||
|
"to": [
|
||||||
|
"https://www.w3.org/ns/activitystreams#Public"
|
||||||
|
],
|
||||||
|
"cc": [
|
||||||
|
"https://www.example.com/user/rat/followers"
|
||||||
|
],
|
||||||
|
"@context": "https://www.w3.org/ns/activitystreams"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"lists": [
|
||||||
|
{
|
||||||
|
"id": "https://www.example.com/list/2",
|
||||||
|
"type": "BookList",
|
||||||
|
"totalItems": 1,
|
||||||
|
"first": "https://www.example.com/list/2?page=1",
|
||||||
|
"last": "https://www.example.com/list/2?page=1",
|
||||||
|
"name": "my list of books",
|
||||||
|
"owner": "https://www.example.com/user/rat",
|
||||||
|
"to": [
|
||||||
|
"https://www.example.com/user/rat/followers"
|
||||||
|
],
|
||||||
|
"cc": [],
|
||||||
|
"summary": "Here is a description of my list",
|
||||||
|
"curation": "closed",
|
||||||
|
"@context": "https://www.w3.org/ns/activitystreams",
|
||||||
|
"privacy": "followers",
|
||||||
|
"list_item": {
|
||||||
|
"id": "https://www.example.com/user/rat/listitem/3",
|
||||||
|
"type": "ListItem",
|
||||||
|
"actor": "https://www.example.com/user/rat",
|
||||||
|
"book": "https://www.example.com/book/2",
|
||||||
|
"notes": "<p>It's fun.</p>",
|
||||||
|
"approved": true,
|
||||||
|
"order": 1,
|
||||||
|
"@context": "https://www.w3.org/ns/activitystreams"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"comments": [],
|
||||||
|
"quotations": [],
|
||||||
|
"reviews": [
|
||||||
|
{
|
||||||
|
"id": "https://www.example.com/user/rat/review/7",
|
||||||
|
"type": "Review",
|
||||||
|
"published": "2023-08-14T04:09:18.343+00:00",
|
||||||
|
"attributedTo": "https://www.example.com//user/rat",
|
||||||
|
"content": "<p>I like it</p>",
|
||||||
|
"to": [
|
||||||
|
"https://your.domain.here/user/rat/followers"
|
||||||
|
],
|
||||||
|
"cc": [],
|
||||||
|
"replies": {
|
||||||
|
"id": "https://www.example.com/user/rat/review/7/replies",
|
||||||
|
"type": "OrderedCollection",
|
||||||
|
"totalItems": 0,
|
||||||
|
"first": "https://www.example.com/user/rat/review/7/replies?page=1",
|
||||||
|
"last": "https://www.example.com/user/rat/review/7/replies?page=1",
|
||||||
|
"@context": "https://www.w3.org/ns/activitystreams"
|
||||||
|
},
|
||||||
|
"summary": "Here's a spoiler alert",
|
||||||
|
"tag": [],
|
||||||
|
"attachment": [],
|
||||||
|
"sensitive": true,
|
||||||
|
"inReplyToBook": "https://www.example.com/book/6",
|
||||||
|
"name": "great book",
|
||||||
|
"rating": 5.0,
|
||||||
|
"@context": "https://www.w3.org/ns/activitystreams",
|
||||||
|
"progress": 23,
|
||||||
|
"progress_mode": "PG"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"readthroughs": [
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"created_date": "2023-08-14T04:00:27.544Z",
|
||||||
|
"updated_date": "2023-08-14T04:00:27.546Z",
|
||||||
|
"remote_id": "https://www.example.com/user/rat/readthrough/1",
|
||||||
|
"user_id": 1,
|
||||||
|
"book_id": 4880,
|
||||||
|
"progress": null,
|
||||||
|
"progress_mode": "PG",
|
||||||
|
"start_date": "2018-01-01T00:00:00Z",
|
||||||
|
"finish_date": "2023-08-13T00:00:00Z",
|
||||||
|
"stopped_date": null,
|
||||||
|
"is_active": false
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"work": {
|
||||||
|
"id": "https://www.example.com/book/3",
|
||||||
|
"type": "Work",
|
||||||
|
"title": "Sand Talk: How Indigenous Thinking Can Save the World",
|
||||||
|
"description": "",
|
||||||
|
"languages": [],
|
||||||
|
"series": "",
|
||||||
|
"seriesNumber": "",
|
||||||
|
"subjects": [],
|
||||||
|
"subjectPlaces": [],
|
||||||
|
"authors": [
|
||||||
|
"https://www.example.com/author/2"
|
||||||
|
],
|
||||||
|
"firstPublishedDate": "",
|
||||||
|
"publishedDate": "",
|
||||||
|
"fileLinks": [],
|
||||||
|
"lccn": "",
|
||||||
|
"openlibraryKey": "OL28216445M",
|
||||||
|
"editions": [
|
||||||
|
"https://www.example.com/book/4"
|
||||||
|
],
|
||||||
|
"@context": "https://www.w3.org/ns/activitystreams"
|
||||||
|
},
|
||||||
|
"edition": {
|
||||||
|
"id": "https://www.example.com/book/4",
|
||||||
|
"type": "Edition",
|
||||||
|
"title": "Sand Talk",
|
||||||
|
"sortTitle": "sand talk",
|
||||||
|
"subtitle": "How Indigenous Thinking Can Save the World",
|
||||||
|
"description": "",
|
||||||
|
"languages": [],
|
||||||
|
"series": "",
|
||||||
|
"seriesNumber": "",
|
||||||
|
"subjects": [],
|
||||||
|
"subjectPlaces": [],
|
||||||
|
"authors": [
|
||||||
|
"https://www.example.com/author/2"
|
||||||
|
],
|
||||||
|
"firstPublishedDate": "",
|
||||||
|
"publishedDate": "",
|
||||||
|
"fileLinks": [],
|
||||||
|
"cover": {
|
||||||
|
"type": "Document",
|
||||||
|
"url": "covers/6a553a08-2641-42a1-baa4-960df9edbbfc.jpeg",
|
||||||
|
"name": "Tyson Yunkaporta - Sand Talk",
|
||||||
|
"@context": "https://www.w3.org/ns/activitystreams"
|
||||||
|
},
|
||||||
|
"work": "https://www.example.com/book/3",
|
||||||
|
"isbn10": "",
|
||||||
|
"isbn13": "9780062975645",
|
||||||
|
"oclcNumber": "",
|
||||||
|
"inventaireId": "isbn:9780062975645",
|
||||||
|
"physicalFormat": "paperback",
|
||||||
|
"physicalFormatDetail": "",
|
||||||
|
"publishers": [],
|
||||||
|
"editionRank": 5,
|
||||||
|
"@context": "https://www.w3.org/ns/activitystreams"
|
||||||
|
},
|
||||||
|
"authors": [
|
||||||
|
{
|
||||||
|
"id": "https://www.example.com/author/2",
|
||||||
|
"type": "Author",
|
||||||
|
"name": "Tyson Yunkaporta",
|
||||||
|
"aliases": [],
|
||||||
|
"bio": "",
|
||||||
|
"wikipediaLink": "",
|
||||||
|
"website": "",
|
||||||
|
"@context": "https://www.w3.org/ns/activitystreams"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"shelves": [],
|
||||||
|
"lists": [],
|
||||||
|
"comments": [
|
||||||
|
{
|
||||||
|
"id": "https://www.example.com/user/rat/comment/4",
|
||||||
|
"type": "Comment",
|
||||||
|
"published": "2023-08-14T04:48:18.746+00:00",
|
||||||
|
"attributedTo": "https://www.example.com/user/rat",
|
||||||
|
"content": "<p>this is a comment about an amazing book</p>",
|
||||||
|
"to": [
|
||||||
|
"https://www.w3.org/ns/activitystreams#Public"
|
||||||
|
],
|
||||||
|
"cc": [
|
||||||
|
"https://www.example.com/user/rat/followers"
|
||||||
|
],
|
||||||
|
"replies": {
|
||||||
|
"id": "https://www.example.com/user/rat/comment/4/replies",
|
||||||
|
"type": "OrderedCollection",
|
||||||
|
"totalItems": 0,
|
||||||
|
"first": "https://www.example.com/user/rat/comment/4/replies?page=1",
|
||||||
|
"last": "https://www.example.com/user/rat/comment/4/replies?page=1",
|
||||||
|
"@context": "https://www.w3.org/ns/activitystreams"
|
||||||
|
},
|
||||||
|
"tag": [],
|
||||||
|
"attachment": [],
|
||||||
|
"sensitive": false,
|
||||||
|
"inReplyToBook": "https://www.example.com/book/4",
|
||||||
|
"readingStatus": null,
|
||||||
|
"@context": "https://www.w3.org/ns/activitystreams"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"quotations": [
|
||||||
|
{
|
||||||
|
"id": "https://www.example.com/user/rat/quotation/2",
|
||||||
|
"type": "Quotation",
|
||||||
|
"published": "2023-11-12T04:29:38.370305+00:00",
|
||||||
|
"attributedTo": "https://www.example.com/user/rat",
|
||||||
|
"content": "<p>not actually from this book lol</p>",
|
||||||
|
"to": [
|
||||||
|
"https://www.w3.org/ns/activitystreams#Public"
|
||||||
|
],
|
||||||
|
"cc": [
|
||||||
|
"https://www.example.com/user/rat/followers"
|
||||||
|
],
|
||||||
|
"replies": {
|
||||||
|
"id": "https://www.example.com/user/rat/quotation/2/replies",
|
||||||
|
"type": "OrderedCollection",
|
||||||
|
"totalItems": 0,
|
||||||
|
"first": "https://www.example.com/user/rat/quotation/2/replies?page=1",
|
||||||
|
"last": "https://www.example.com/user/rat/quotation/2/replies?page=1",
|
||||||
|
"@context": "https://www.w3.org/ns/activitystreams"
|
||||||
|
},
|
||||||
|
"tag": [],
|
||||||
|
"attachment": [],
|
||||||
|
"sensitive": false,
|
||||||
|
"summary": "spoiler ahead!",
|
||||||
|
"inReplyToBook": "https://www.example.com/book/2",
|
||||||
|
"quote": "<p>To be or not to be</p>",
|
||||||
|
"@context": "https://www.w3.org/ns/activitystreams"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"reviews": [],
|
||||||
|
"readthroughs": []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"saved_lists": [
|
||||||
|
"https://local.lists/9999"
|
||||||
|
],
|
||||||
|
"follows": [
|
||||||
|
"https://your.domain.here/user/rat"
|
||||||
|
],
|
||||||
|
"blocks": ["https://your.domain.here/user/badger"]
|
||||||
|
}
|
|
@ -144,17 +144,19 @@ class BookwyrmExport(TestCase):
|
||||||
def test_json_export_user_settings(self):
|
def test_json_export_user_settings(self):
|
||||||
"""Test the json export function for basic user info"""
|
"""Test the json export function for basic user info"""
|
||||||
data = export_job.json_export(self.local_user)
|
data = export_job.json_export(self.local_user)
|
||||||
user_data = json.loads(data)["user"]
|
user_data = json.loads(data)
|
||||||
self.assertEqual(user_data["username"], "mouse")
|
self.assertEqual(user_data["preferredUsername"], "mouse")
|
||||||
self.assertEqual(user_data["name"], "Mouse")
|
self.assertEqual(user_data["name"], "Mouse")
|
||||||
self.assertEqual(user_data["summary"], "I'm a real bookmouse")
|
self.assertEqual(user_data["summary"], "<p>I'm a real bookmouse</p>")
|
||||||
self.assertEqual(user_data["manually_approves_followers"], False)
|
self.assertEqual(user_data["manuallyApprovesFollowers"], False)
|
||||||
self.assertEqual(user_data["hide_follows"], False)
|
self.assertEqual(user_data["hideFollows"], False)
|
||||||
self.assertEqual(user_data["show_goal"], False)
|
|
||||||
self.assertEqual(user_data["show_suggested_users"], False)
|
|
||||||
self.assertEqual(user_data["discoverable"], True)
|
self.assertEqual(user_data["discoverable"], True)
|
||||||
self.assertEqual(user_data["preferred_timezone"], "America/Los Angeles")
|
self.assertEqual(user_data["settings"]["show_goal"], False)
|
||||||
self.assertEqual(user_data["default_post_privacy"], "followers")
|
self.assertEqual(user_data["settings"]["show_suggested_users"], False)
|
||||||
|
self.assertEqual(
|
||||||
|
user_data["settings"]["preferred_timezone"], "America/Los Angeles"
|
||||||
|
)
|
||||||
|
self.assertEqual(user_data["settings"]["default_post_privacy"], "followers")
|
||||||
|
|
||||||
def test_json_export_extended_user_data(self):
|
def test_json_export_extended_user_data(self):
|
||||||
"""Test the json export function for other non-book user info"""
|
"""Test the json export function for other non-book user info"""
|
||||||
|
@ -175,10 +177,8 @@ class BookwyrmExport(TestCase):
|
||||||
self.assertEqual(len(json_data["follows"]), 1)
|
self.assertEqual(len(json_data["follows"]), 1)
|
||||||
self.assertEqual(json_data["follows"][0], "https://your.domain.here/user/rat")
|
self.assertEqual(json_data["follows"][0], "https://your.domain.here/user/rat")
|
||||||
# blocked users
|
# blocked users
|
||||||
self.assertEqual(len(json_data["blocked_users"]), 1)
|
self.assertEqual(len(json_data["blocks"]), 1)
|
||||||
self.assertEqual(
|
self.assertEqual(json_data["blocks"][0], "https://your.domain.here/user/badger")
|
||||||
json_data["blocked_users"][0], "https://your.domain.here/user/badger"
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_json_export_books(self):
|
def test_json_export_books(self):
|
||||||
"""Test the json export function for extended user info"""
|
"""Test the json export function for extended user info"""
|
||||||
|
@ -188,46 +188,44 @@ class BookwyrmExport(TestCase):
|
||||||
start_date = json_data["books"][0]["readthroughs"][0]["start_date"]
|
start_date = json_data["books"][0]["readthroughs"][0]["start_date"]
|
||||||
|
|
||||||
self.assertEqual(len(json_data["books"]), 1)
|
self.assertEqual(len(json_data["books"]), 1)
|
||||||
self.assertEqual(json_data["books"][0]["title"], "Example Edition")
|
self.assertEqual(json_data["books"][0]["edition"]["title"], "Example Edition")
|
||||||
self.assertEqual(len(json_data["books"][0]["authors"]), 1)
|
self.assertEqual(len(json_data["books"][0]["authors"]), 1)
|
||||||
self.assertEqual(json_data["books"][0]["authors"][0]["name"], "Sam Zhu")
|
self.assertEqual(json_data["books"][0]["authors"][0]["name"], "Sam Zhu")
|
||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
f'"{start_date}"', DjangoJSONEncoder().encode(self.readthrough_start)
|
f'"{start_date}"', DjangoJSONEncoder().encode(self.readthrough_start)
|
||||||
)
|
)
|
||||||
self.assertEqual(json_data["books"][0]["shelves"][0]["identifier"], "read")
|
|
||||||
self.assertEqual(
|
self.assertEqual(json_data["books"][0]["shelves"][0]["name"], "Read")
|
||||||
json_data["books"][0]["shelf_books"]["read"][0]["book_id"], self.edition.id
|
|
||||||
)
|
|
||||||
|
|
||||||
self.assertEqual(len(json_data["books"][0]["lists"]), 1)
|
self.assertEqual(len(json_data["books"][0]["lists"]), 1)
|
||||||
self.assertEqual(json_data["books"][0]["lists"][0]["name"], "My excellent list")
|
self.assertEqual(json_data["books"][0]["lists"][0]["name"], "My excellent list")
|
||||||
self.assertEqual(len(json_data["books"][0]["list_items"]), 1)
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
json_data["books"][0]["list_items"]["My excellent list"][0]["book_id"],
|
json_data["books"][0]["lists"][0]["list_item"]["book"],
|
||||||
|
self.edition.remote_id,
|
||||||
self.edition.id,
|
self.edition.id,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertEqual(len(json_data["books"][0]["reviews"]), 1)
|
self.assertEqual(len(json_data["books"][0]["reviews"]), 1)
|
||||||
self.assertEqual(len(json_data["books"][0]["comments"]), 1)
|
self.assertEqual(len(json_data["books"][0]["comments"]), 1)
|
||||||
self.assertEqual(len(json_data["books"][0]["quotes"]), 1)
|
self.assertEqual(len(json_data["books"][0]["quotations"]), 1)
|
||||||
|
|
||||||
self.assertEqual(json_data["books"][0]["reviews"][0]["name"], "my review")
|
self.assertEqual(json_data["books"][0]["reviews"][0]["name"], "my review")
|
||||||
self.assertEqual(json_data["books"][0]["reviews"][0]["content"], "awesome")
|
self.assertEqual(
|
||||||
self.assertEqual(json_data["books"][0]["reviews"][0]["rating"], "5.00")
|
json_data["books"][0]["reviews"][0]["content"], "<p>awesome</p>"
|
||||||
|
)
|
||||||
|
self.assertEqual(json_data["books"][0]["reviews"][0]["rating"], 5.0)
|
||||||
|
|
||||||
self.assertEqual(json_data["books"][0]["comments"][0]["content"], "ok so far")
|
self.assertEqual(
|
||||||
|
json_data["books"][0]["comments"][0]["content"], "<p>ok so far</p>"
|
||||||
|
)
|
||||||
self.assertEqual(json_data["books"][0]["comments"][0]["progress"], 15)
|
self.assertEqual(json_data["books"][0]["comments"][0]["progress"], 15)
|
||||||
self.assertEqual(json_data["books"][0]["comments"][0]["progress_mode"], "PG")
|
self.assertEqual(json_data["books"][0]["comments"][0]["progress_mode"], "PG")
|
||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
json_data["books"][0]["quotes"][0]["content"], "check this out"
|
json_data["books"][0]["quotations"][0]["content"], "<p>check this out</p>"
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
json_data["books"][0]["quotes"][0]["quote"], "A rose by any other name"
|
json_data["books"][0]["quotations"][0]["quote"],
|
||||||
|
"<p>A rose by any other name</p>",
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_tar_export(self):
|
|
||||||
"""test the tar export function"""
|
|
||||||
|
|
||||||
# TODO
|
|
||||||
pass # pylint: disable=unnecessary-pass
|
|
||||||
|
|
|
@ -53,57 +53,62 @@ class BookwyrmImport(TestCase): # pylint: disable=too-many-public-methods
|
||||||
localname="badger",
|
localname="badger",
|
||||||
)
|
)
|
||||||
|
|
||||||
self.work = models.Work.objects.create(title="Test Book")
|
self.work = models.Work.objects.create(title="Sand Talk")
|
||||||
|
|
||||||
self.book = models.Edition.objects.create(
|
self.book = models.Edition.objects.create(
|
||||||
title="Test Book",
|
title="Sand Talk",
|
||||||
remote_id="https://example.com/book/1234",
|
remote_id="https://example.com/book/1234",
|
||||||
openlibrary_key="OL28216445M",
|
openlibrary_key="OL28216445M",
|
||||||
|
inventaire_id="isbn:9780062975645",
|
||||||
|
isbn_13="9780062975645",
|
||||||
parent_work=self.work,
|
parent_work=self.work,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
self.json_file = pathlib.Path(__file__).parent.joinpath(
|
||||||
|
"../data/user_import.json"
|
||||||
|
)
|
||||||
|
|
||||||
|
with open(self.json_file, "r", encoding="utf-8") as jsonfile:
|
||||||
|
self.json_data = json.loads(jsonfile.read())
|
||||||
|
|
||||||
self.archive_file = pathlib.Path(__file__).parent.joinpath(
|
self.archive_file = pathlib.Path(__file__).parent.joinpath(
|
||||||
"../data/bookwyrm_account_export.tar.gz"
|
"../data/bookwyrm_account_export.tar.gz"
|
||||||
)
|
)
|
||||||
with open(self.archive_file, "rb") as fileobj:
|
|
||||||
with BookwyrmTarFile.open(mode="r:gz", fileobj=fileobj) as tarfile:
|
|
||||||
self.import_data = json.loads(
|
|
||||||
tarfile.read("archive.json").decode("utf-8")
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_update_user_profile(self):
|
def test_update_user_profile(self):
|
||||||
"""Test update the user's profile from import data"""
|
"""Test update the user's profile from import data"""
|
||||||
|
|
||||||
with patch("bookwyrm.suggested_users.remove_user_task.delay"), patch(
|
with patch("bookwyrm.suggested_users.remove_user_task.delay"), patch(
|
||||||
"bookwyrm.models.activitypub_mixin.broadcast_task.apply_async"
|
"bookwyrm.models.activitypub_mixin.broadcast_task.apply_async"
|
||||||
):
|
), patch("bookwyrm.suggested_users.rerank_user_task.delay"):
|
||||||
|
|
||||||
with open(self.archive_file, "rb") as fileobj:
|
with open(self.archive_file, "rb") as fileobj:
|
||||||
with BookwyrmTarFile.open(mode="r:gz", fileobj=fileobj) as tarfile:
|
with BookwyrmTarFile.open(mode="r:gz", fileobj=fileobj) as tarfile:
|
||||||
|
|
||||||
models.bookwyrm_import_job.update_user_profile(
|
models.bookwyrm_import_job.update_user_profile(
|
||||||
self.local_user, tarfile, self.import_data.get("user")
|
self.local_user, tarfile, self.json_data
|
||||||
)
|
)
|
||||||
self.local_user.refresh_from_db()
|
|
||||||
|
|
||||||
self.assertEqual(
|
self.local_user.refresh_from_db()
|
||||||
self.local_user.username, "mouse"
|
|
||||||
) # username should not change
|
self.assertEqual(
|
||||||
self.assertEqual(self.local_user.name, "Rat")
|
self.local_user.username, "mouse"
|
||||||
self.assertEqual(
|
) # username should not change
|
||||||
self.local_user.summary,
|
self.assertEqual(self.local_user.name, "Rat")
|
||||||
"I love to make soup in Paris and eat pizza in New York",
|
self.assertEqual(
|
||||||
)
|
self.local_user.summary,
|
||||||
|
"I love to make soup in Paris and eat pizza in New York",
|
||||||
|
)
|
||||||
|
|
||||||
def test_update_user_settings(self):
|
def test_update_user_settings(self):
|
||||||
"""Test updating the user's settings from import data"""
|
"""Test updating the user's settings from import data"""
|
||||||
|
|
||||||
with patch("bookwyrm.suggested_users.remove_user_task.delay"), patch(
|
with patch("bookwyrm.suggested_users.remove_user_task.delay"), patch(
|
||||||
"bookwyrm.models.activitypub_mixin.broadcast_task.apply_async"
|
"bookwyrm.models.activitypub_mixin.broadcast_task.apply_async"
|
||||||
):
|
), patch("bookwyrm.suggested_users.rerank_user_task.delay"):
|
||||||
|
|
||||||
models.bookwyrm_import_job.update_user_settings(
|
models.bookwyrm_import_job.update_user_settings(
|
||||||
self.local_user, self.import_data.get("user")
|
self.local_user, self.json_data
|
||||||
)
|
)
|
||||||
self.local_user.refresh_from_db()
|
self.local_user.refresh_from_db()
|
||||||
|
|
||||||
|
@ -125,11 +130,11 @@ class BookwyrmImport(TestCase): # pylint: disable=too-many-public-methods
|
||||||
privacy="public",
|
privacy="public",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
goals = [{"goal": 12, "year": 2023, "privacy": "followers"}]
|
||||||
|
|
||||||
with patch("bookwyrm.models.activitypub_mixin.broadcast_task.apply_async"):
|
with patch("bookwyrm.models.activitypub_mixin.broadcast_task.apply_async"):
|
||||||
|
|
||||||
models.bookwyrm_import_job.update_goals(
|
models.bookwyrm_import_job.update_goals(self.local_user, goals)
|
||||||
self.local_user, self.import_data.get("goals")
|
|
||||||
)
|
|
||||||
|
|
||||||
self.local_user.refresh_from_db()
|
self.local_user.refresh_from_db()
|
||||||
goal = models.AnnualGoal.objects.get()
|
goal = models.AnnualGoal.objects.get()
|
||||||
|
@ -198,7 +203,7 @@ class BookwyrmImport(TestCase): # pylint: disable=too-many-public-methods
|
||||||
"bookwyrm.lists_stream.add_user_lists_task.delay"
|
"bookwyrm.lists_stream.add_user_lists_task.delay"
|
||||||
), patch("bookwyrm.models.activitypub_mixin.broadcast_task.apply_async"):
|
), patch("bookwyrm.models.activitypub_mixin.broadcast_task.apply_async"):
|
||||||
models.bookwyrm_import_job.upsert_follows(
|
models.bookwyrm_import_job.upsert_follows(
|
||||||
self.local_user, self.import_data.get("follows")
|
self.local_user, self.json_data.get("follows")
|
||||||
)
|
)
|
||||||
|
|
||||||
after_follow = models.UserFollows.objects.filter(
|
after_follow = models.UserFollows.objects.filter(
|
||||||
|
@ -223,7 +228,7 @@ class BookwyrmImport(TestCase): # pylint: disable=too-many-public-methods
|
||||||
"bookwyrm.models.activitypub_mixin.broadcast_task.apply_async"
|
"bookwyrm.models.activitypub_mixin.broadcast_task.apply_async"
|
||||||
):
|
):
|
||||||
models.bookwyrm_import_job.upsert_user_blocks(
|
models.bookwyrm_import_job.upsert_user_blocks(
|
||||||
self.local_user, self.import_data.get("blocked_users")
|
self.local_user, self.json_data.get("blocks")
|
||||||
)
|
)
|
||||||
|
|
||||||
blocked_after = models.UserBlocks.objects.filter(
|
blocked_after = models.UserBlocks.objects.filter(
|
||||||
|
@ -234,19 +239,6 @@ class BookwyrmImport(TestCase): # pylint: disable=too-many-public-methods
|
||||||
).exists()
|
).exists()
|
||||||
self.assertTrue(blocked_after)
|
self.assertTrue(blocked_after)
|
||||||
|
|
||||||
def test_get_or_create_authors(self):
|
|
||||||
"""Test taking a JSON string of authors find or create the authors
|
|
||||||
in the database and returning a list of author instances"""
|
|
||||||
|
|
||||||
author_exists = models.Author.objects.filter(isni="0000000108973024").exists()
|
|
||||||
self.assertFalse(author_exists)
|
|
||||||
|
|
||||||
authors = self.import_data.get("books")[0]["authors"]
|
|
||||||
bookwyrm_import_job.get_or_create_authors(authors)
|
|
||||||
|
|
||||||
author = models.Author.objects.get(isni="0000000108973024")
|
|
||||||
self.assertEqual(author.name, "James C. Scott")
|
|
||||||
|
|
||||||
def test_get_or_create_edition_existing(self):
|
def test_get_or_create_edition_existing(self):
|
||||||
"""Test take a JSON string of books and editions,
|
"""Test take a JSON string of books and editions,
|
||||||
find or create the editions in the database and
|
find or create the editions in the database and
|
||||||
|
@ -258,7 +250,7 @@ class BookwyrmImport(TestCase): # pylint: disable=too-many-public-methods
|
||||||
with BookwyrmTarFile.open(mode="r:gz", fileobj=fileobj) as tarfile:
|
with BookwyrmTarFile.open(mode="r:gz", fileobj=fileobj) as tarfile:
|
||||||
|
|
||||||
bookwyrm_import_job.get_or_create_edition(
|
bookwyrm_import_job.get_or_create_edition(
|
||||||
self.import_data["books"][1], tarfile
|
self.json_data["books"][1], tarfile
|
||||||
) # Sand Talk
|
) # Sand Talk
|
||||||
|
|
||||||
self.assertEqual(models.Edition.objects.count(), 1)
|
self.assertEqual(models.Edition.objects.count(), 1)
|
||||||
|
@ -272,53 +264,13 @@ class BookwyrmImport(TestCase): # pylint: disable=too-many-public-methods
|
||||||
|
|
||||||
with open(self.archive_file, "rb") as fileobj:
|
with open(self.archive_file, "rb") as fileobj:
|
||||||
with BookwyrmTarFile.open(mode="r:gz", fileobj=fileobj) as tarfile:
|
with BookwyrmTarFile.open(mode="r:gz", fileobj=fileobj) as tarfile:
|
||||||
|
|
||||||
bookwyrm_import_job.get_or_create_edition(
|
bookwyrm_import_job.get_or_create_edition(
|
||||||
self.import_data["books"][0], tarfile
|
self.json_data["books"][0], tarfile
|
||||||
) # Seeing like a state
|
) # Seeing like a state
|
||||||
|
|
||||||
self.assertTrue(
|
self.assertTrue(models.Edition.objects.filter(isbn_13="9780300070163").exists())
|
||||||
models.Edition.objects.filter(isbn_13="9780300070163").exists()
|
self.assertEqual(models.Edition.objects.count(), 2)
|
||||||
)
|
|
||||||
self.assertEqual(models.Edition.objects.count(), 2)
|
|
||||||
|
|
||||||
def test_clean_values(self):
|
|
||||||
"""test clean values we don't want when creating new instances"""
|
|
||||||
|
|
||||||
author = self.import_data.get("books")[0]["authors"][0]
|
|
||||||
edition = self.import_data.get("books")[0]["edition"]
|
|
||||||
|
|
||||||
cleaned_author = bookwyrm_import_job.clean_values(author)
|
|
||||||
cleaned_edition = bookwyrm_import_job.clean_values(edition)
|
|
||||||
|
|
||||||
self.assertEqual(cleaned_author["name"], "James C. Scott")
|
|
||||||
self.assertEqual(cleaned_author.get("id"), None)
|
|
||||||
self.assertEqual(cleaned_author.get("remote_id"), None)
|
|
||||||
self.assertEqual(cleaned_author.get("last_edited_by"), None)
|
|
||||||
self.assertEqual(cleaned_author.get("last_edited_by_id"), None)
|
|
||||||
|
|
||||||
self.assertEqual(cleaned_edition.get("title"), "Seeing Like a State")
|
|
||||||
self.assertEqual(cleaned_edition.get("id"), None)
|
|
||||||
self.assertEqual(cleaned_edition.get("remote_id"), None)
|
|
||||||
self.assertEqual(cleaned_edition.get("last_edited_by"), None)
|
|
||||||
self.assertEqual(cleaned_edition.get("last_edited_by_id"), None)
|
|
||||||
self.assertEqual(cleaned_edition.get("cover"), None)
|
|
||||||
self.assertEqual(cleaned_edition.get("preview_image "), None)
|
|
||||||
self.assertEqual(cleaned_edition.get("user"), None)
|
|
||||||
self.assertEqual(cleaned_edition.get("book_list"), None)
|
|
||||||
self.assertEqual(cleaned_edition.get("shelf_book"), None)
|
|
||||||
|
|
||||||
def test_find_existing(self):
|
|
||||||
"""Given a book or author, find any existing model instances"""
|
|
||||||
|
|
||||||
self.assertEqual(models.Book.objects.count(), 2) # includes Work
|
|
||||||
self.assertEqual(models.Edition.objects.count(), 1)
|
|
||||||
self.assertEqual(models.Edition.objects.first().title, "Test Book")
|
|
||||||
self.assertEqual(models.Edition.objects.first().openlibrary_key, "OL28216445M")
|
|
||||||
|
|
||||||
existing = bookwyrm_import_job.find_existing(
|
|
||||||
models.Edition, {"openlibrary_key": "OL28216445M", "isbn_10": None}
|
|
||||||
)
|
|
||||||
self.assertEqual(existing.title, "Test Book")
|
|
||||||
|
|
||||||
def test_upsert_readthroughs(self):
|
def test_upsert_readthroughs(self):
|
||||||
"""Test take a JSON string of readthroughs, find or create the
|
"""Test take a JSON string of readthroughs, find or create the
|
||||||
|
@ -332,7 +284,7 @@ class BookwyrmImport(TestCase): # pylint: disable=too-many-public-methods
|
||||||
"remote_id": "https://example.com/mouse/readthrough/1",
|
"remote_id": "https://example.com/mouse/readthrough/1",
|
||||||
"user_id": 1,
|
"user_id": 1,
|
||||||
"book_id": 1234,
|
"book_id": 1234,
|
||||||
"progress": None,
|
"progress": 23,
|
||||||
"progress_mode": "PG",
|
"progress_mode": "PG",
|
||||||
"start_date": "2022-12-31T13:30:00Z",
|
"start_date": "2022-12-31T13:30:00Z",
|
||||||
"finish_date": "2023-08-23T14:30:00Z",
|
"finish_date": "2023-08-23T14:30:00Z",
|
||||||
|
@ -355,19 +307,20 @@ class BookwyrmImport(TestCase): # pylint: disable=too-many-public-methods
|
||||||
self.assertEqual(models.ReadThrough.objects.first().book_id, self.book.id)
|
self.assertEqual(models.ReadThrough.objects.first().book_id, self.book.id)
|
||||||
self.assertEqual(models.ReadThrough.objects.first().user, self.local_user)
|
self.assertEqual(models.ReadThrough.objects.first().user, self.local_user)
|
||||||
|
|
||||||
def test_get_or_create_review_status(self):
|
def test_get_or_create_review(self):
|
||||||
"""Test get_or_create_review_status with a review"""
|
"""Test get_or_create_review_status with a review"""
|
||||||
|
|
||||||
self.assertEqual(models.Review.objects.filter(user=self.local_user).count(), 0)
|
self.assertEqual(models.Review.objects.filter(user=self.local_user).count(), 0)
|
||||||
reviews = self.import_data["books"][0]["reviews"]
|
reviews = self.json_data["books"][0]["reviews"]
|
||||||
with patch("bookwyrm.models.activitypub_mixin.broadcast_task.apply_async"):
|
with patch("bookwyrm.models.activitypub_mixin.broadcast_task.apply_async"):
|
||||||
bookwyrm_import_job.get_or_create_statuses(
|
bookwyrm_import_job.upsert_statuses(
|
||||||
self.local_user, models.Review, reviews, self.book.id
|
self.local_user, models.Review, reviews, self.book.remote_id
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertEqual(models.Review.objects.filter(user=self.local_user).count(), 1)
|
self.assertEqual(models.Review.objects.filter(user=self.local_user).count(), 1)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
models.Review.objects.filter(book=self.book).first().raw_content,
|
models.Review.objects.filter(book=self.book).first().content,
|
||||||
"I like it",
|
"<p>I like it</p>",
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
models.Review.objects.filter(book=self.book).first().content_warning,
|
models.Review.objects.filter(book=self.book).first().content_warning,
|
||||||
|
@ -376,10 +329,6 @@ class BookwyrmImport(TestCase): # pylint: disable=too-many-public-methods
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
models.Review.objects.filter(book=self.book).first().sensitive, True
|
models.Review.objects.filter(book=self.book).first().sensitive, True
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
|
||||||
models.Review.objects.filter(book=self.book).first().published_date,
|
|
||||||
parse_datetime("2023-08-14T04:09:18.343Z"),
|
|
||||||
)
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
models.Review.objects.filter(book=self.book).first().name, "great book"
|
models.Review.objects.filter(book=self.book).first().name, "great book"
|
||||||
)
|
)
|
||||||
|
@ -387,19 +336,23 @@ class BookwyrmImport(TestCase): # pylint: disable=too-many-public-methods
|
||||||
models.Review.objects.filter(book=self.book).first().rating, 5.00
|
models.Review.objects.filter(book=self.book).first().rating, 5.00
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_get_or_create_comment_status(self):
|
self.assertEqual(
|
||||||
|
models.Review.objects.filter(book=self.book).first().privacy, "followers"
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_get_or_create_comment(self):
|
||||||
"""Test get_or_create_review_status with a comment"""
|
"""Test get_or_create_review_status with a comment"""
|
||||||
|
|
||||||
self.assertEqual(models.Comment.objects.filter(user=self.local_user).count(), 0)
|
self.assertEqual(models.Comment.objects.filter(user=self.local_user).count(), 0)
|
||||||
comments = self.import_data["books"][1]["comments"]
|
comments = self.json_data["books"][1]["comments"]
|
||||||
with patch("bookwyrm.models.activitypub_mixin.broadcast_task.apply_async"):
|
with patch("bookwyrm.models.activitypub_mixin.broadcast_task.apply_async"):
|
||||||
bookwyrm_import_job.get_or_create_statuses(
|
bookwyrm_import_job.upsert_statuses(
|
||||||
self.local_user, models.Comment, comments, self.book.id
|
self.local_user, models.Comment, comments, self.book.remote_id
|
||||||
)
|
)
|
||||||
self.assertEqual(models.Comment.objects.filter(user=self.local_user).count(), 1)
|
self.assertEqual(models.Comment.objects.filter(user=self.local_user).count(), 1)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
models.Comment.objects.filter(book=self.book).first().raw_content,
|
models.Comment.objects.filter(book=self.book).first().content,
|
||||||
"this is a comment about an amazing book",
|
"<p>this is a comment about an amazing book</p>",
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
models.Comment.objects.filter(book=self.book).first().content_warning, None
|
models.Comment.objects.filter(book=self.book).first().content_warning, None
|
||||||
|
@ -407,56 +360,45 @@ class BookwyrmImport(TestCase): # pylint: disable=too-many-public-methods
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
models.Comment.objects.filter(book=self.book).first().sensitive, False
|
models.Comment.objects.filter(book=self.book).first().sensitive, False
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
|
||||||
models.Comment.objects.filter(book=self.book).first().published_date,
|
|
||||||
parse_datetime("2023-08-14T04:48:18.746Z"),
|
|
||||||
)
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
models.Comment.objects.filter(book=self.book).first().progress_mode, "PG"
|
models.Comment.objects.filter(book=self.book).first().progress_mode, "PG"
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_get_or_create_comment_quote(self):
|
def test_get_or_create_quote(self):
|
||||||
"""Test get_or_create_review_status with a quote"""
|
"""Test get_or_create_review_status with a quote"""
|
||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
models.Quotation.objects.filter(user=self.local_user).count(), 0
|
models.Quotation.objects.filter(user=self.local_user).count(), 0
|
||||||
)
|
)
|
||||||
quotes = self.import_data["books"][1]["quotes"]
|
quotes = self.json_data["books"][1]["quotations"]
|
||||||
with patch("bookwyrm.models.activitypub_mixin.broadcast_task.apply_async"):
|
with patch("bookwyrm.models.activitypub_mixin.broadcast_task.apply_async"):
|
||||||
bookwyrm_import_job.get_or_create_statuses(
|
bookwyrm_import_job.upsert_statuses(
|
||||||
self.local_user, models.Quotation, quotes, self.book.id
|
self.local_user, models.Quotation, quotes, self.book.remote_id
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
models.Quotation.objects.filter(user=self.local_user).count(), 1
|
models.Quotation.objects.filter(user=self.local_user).count(), 1
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
models.Quotation.objects.filter(book=self.book).first().raw_content,
|
models.Quotation.objects.filter(book=self.book).first().content,
|
||||||
"not actually from this book lol",
|
"<p>not actually from this book lol</p>",
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
models.Quotation.objects.filter(book=self.book).first().content_warning,
|
models.Quotation.objects.filter(book=self.book).first().content_warning,
|
||||||
"spoiler ahead!",
|
"spoiler ahead!",
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
models.Quotation.objects.filter(book=self.book).first().raw_quote,
|
models.Quotation.objects.filter(book=self.book).first().quote,
|
||||||
"To be or not to be",
|
"<p>To be or not to be</p>",
|
||||||
)
|
|
||||||
self.assertEqual(
|
|
||||||
models.Quotation.objects.filter(book=self.book).first().published_date,
|
|
||||||
parse_datetime("2023-08-14T04:48:50.207Z"),
|
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
models.Quotation.objects.filter(book=self.book).first().position_mode, "PG"
|
models.Quotation.objects.filter(book=self.book).first().position_mode, "PG"
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
|
||||||
models.Quotation.objects.filter(book=self.book).first().position, 1
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_upsert_list_existing(self):
|
def test_upsert_list_existing(self):
|
||||||
"""Take a list and ListItems as JSON and create DB entries
|
"""Take a list and ListItems as JSON and create DB entries
|
||||||
if they don't already exist"""
|
if they don't already exist"""
|
||||||
|
|
||||||
book_data = self.import_data["books"][0]
|
book_data = self.json_data["books"][0]
|
||||||
|
|
||||||
other_book = models.Edition.objects.create(
|
other_book = models.Edition.objects.create(
|
||||||
title="Another Book", remote_id="https://example.com/book/9876"
|
title="Another Book", remote_id="https://example.com/book/9876"
|
||||||
|
@ -488,7 +430,6 @@ class BookwyrmImport(TestCase): # pylint: disable=too-many-public-methods
|
||||||
bookwyrm_import_job.upsert_lists(
|
bookwyrm_import_job.upsert_lists(
|
||||||
self.local_user,
|
self.local_user,
|
||||||
book_data["lists"],
|
book_data["lists"],
|
||||||
book_data["list_items"],
|
|
||||||
other_book.id,
|
other_book.id,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -505,7 +446,7 @@ class BookwyrmImport(TestCase): # pylint: disable=too-many-public-methods
|
||||||
"""Take a list and ListItems as JSON and create DB entries
|
"""Take a list and ListItems as JSON and create DB entries
|
||||||
if they don't already exist"""
|
if they don't already exist"""
|
||||||
|
|
||||||
book_data = self.import_data["books"][0]
|
book_data = self.json_data["books"][0]
|
||||||
|
|
||||||
self.assertEqual(models.List.objects.filter(user=self.local_user).count(), 0)
|
self.assertEqual(models.List.objects.filter(user=self.local_user).count(), 0)
|
||||||
self.assertFalse(models.ListItem.objects.filter(book=self.book.id).exists())
|
self.assertFalse(models.ListItem.objects.filter(book=self.book.id).exists())
|
||||||
|
@ -516,7 +457,6 @@ class BookwyrmImport(TestCase): # pylint: disable=too-many-public-methods
|
||||||
bookwyrm_import_job.upsert_lists(
|
bookwyrm_import_job.upsert_lists(
|
||||||
self.local_user,
|
self.local_user,
|
||||||
book_data["lists"],
|
book_data["lists"],
|
||||||
book_data["list_items"],
|
|
||||||
self.book.id,
|
self.book.id,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -542,7 +482,7 @@ class BookwyrmImport(TestCase): # pylint: disable=too-many-public-methods
|
||||||
book=self.book, shelf=shelf, user=self.local_user
|
book=self.book, shelf=shelf, user=self.local_user
|
||||||
)
|
)
|
||||||
|
|
||||||
book_data = self.import_data["books"][0]
|
book_data = self.json_data["books"][0]
|
||||||
with patch("bookwyrm.activitystreams.add_book_statuses_task.delay"), patch(
|
with patch("bookwyrm.activitystreams.add_book_statuses_task.delay"), patch(
|
||||||
"bookwyrm.models.activitypub_mixin.broadcast_task.apply_async"
|
"bookwyrm.models.activitypub_mixin.broadcast_task.apply_async"
|
||||||
):
|
):
|
||||||
|
@ -560,7 +500,7 @@ class BookwyrmImport(TestCase): # pylint: disable=too-many-public-methods
|
||||||
models.ShelfBook.objects.filter(user=self.local_user.id).count(), 0
|
models.ShelfBook.objects.filter(user=self.local_user.id).count(), 0
|
||||||
)
|
)
|
||||||
|
|
||||||
book_data = self.import_data["books"][0]
|
book_data = self.json_data["books"][0]
|
||||||
|
|
||||||
with patch("bookwyrm.activitystreams.add_book_statuses_task.delay"), patch(
|
with patch("bookwyrm.activitystreams.add_book_statuses_task.delay"), patch(
|
||||||
"bookwyrm.models.activitypub_mixin.broadcast_task.apply_async"
|
"bookwyrm.models.activitypub_mixin.broadcast_task.apply_async"
|
||||||
|
|
Loading…
Reference in a new issue