Merge pull request #1 from hughrun/export-as-ap

initial work to use AP json for user export/import
This commit is contained in:
Hugh Rundle 2023-11-13 20:57:22 +11:00 committed by GitHub
commit acb385507a
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
10 changed files with 800 additions and 534 deletions

View file

@ -22,8 +22,6 @@ class BookData(ActivityObject):
aasin: Optional[str] = None
isfdb: Optional[str] = None
lastEditedBy: Optional[str] = None
links: list[str] = field(default_factory=list)
fileLinks: list[str] = field(default_factory=list)
# pylint: disable=invalid-name
@ -45,6 +43,8 @@ class Book(BookData):
firstPublishedDate: str = ""
publishedDate: str = ""
fileLinks: list[str] = field(default_factory=list)
cover: Optional[Document] = None
type: str = "Book"

View file

@ -1,5 +1,6 @@
"""Export user account to tar.gz file for import into another Bookwyrm instance"""
import dataclasses
import logging
from uuid import uuid4
@ -8,12 +9,11 @@ from django.db.models import Q
from django.core.serializers.json import DjangoJSONEncoder
from django.core.files.base import ContentFile
from bookwyrm.models import AnnualGoal, ReadThrough, ShelfBook, Shelf, List, ListItem
from bookwyrm.models import AnnualGoal, ReadThrough, ShelfBook, List, ListItem
from bookwyrm.models import Review, Comment, Quotation
from bookwyrm.models import Edition, Book
from bookwyrm.models import Edition
from bookwyrm.models import UserFollows, User, UserBlocks
from bookwyrm.models.job import ParentJob, ParentTask
from bookwyrm.settings import DOMAIN
from bookwyrm.tasks import app, IMPORTS
from bookwyrm.utils.tar import BookwyrmTarFile
@ -63,7 +63,7 @@ def tar_export(json_data: str, user, file):
if getattr(user, "avatar", False):
tar.add_image(user.avatar, filename="avatar")
editions, books = get_books_for_user(user) # pylint: disable=unused-variable
editions = get_books_for_user(user)
for book in editions:
if getattr(book, "cover", False):
tar.add_image(book.cover)
@ -71,138 +71,162 @@ def tar_export(json_data: str, user, file):
file.close()
def json_export(user): # pylint: disable=too-many-locals, too-many-statements
def json_export(
user,
): # pylint: disable=too-many-locals, too-many-statements, too-many-branches
"""Generate an export for a user"""
# user
exported_user = {}
# User as AP object
exported_user = user.to_activity()
# I don't love this but it prevents a JSON encoding error
# when there is no user image
if isinstance(
exported_user["icon"],
dataclasses._MISSING_TYPE, # pylint: disable=protected-access
):
exported_user["icon"] = {}
else:
# change the URL to be relative to the JSON file
file_type = exported_user["icon"]["url"].rsplit(".", maxsplit=1)[-1]
filename = f"avatar.{file_type}"
exported_user["icon"]["url"] = filename
# Additional settings - can't be serialized as AP
vals = [
"username",
"name",
"summary",
"manually_approves_followers",
"hide_follows",
"show_goal",
"show_suggested_users",
"discoverable",
"preferred_timezone",
"default_post_privacy",
"show_suggested_users",
]
exported_user["settings"] = {}
for k in vals:
exported_user[k] = getattr(user, k)
exported_user["settings"][k] = getattr(user, k)
if getattr(user, "avatar", False):
exported_user["avatar"] = f'https://{DOMAIN}{getattr(user, "avatar").url}'
# reading goals
# Reading goals - can't be serialized as AP
reading_goals = AnnualGoal.objects.filter(user=user).distinct()
goals_list = []
# TODO: either error checking should be more sophisticated
# or maybe we don't need this try/except
try:
exported_user["goals"] = []
for goal in reading_goals:
goals_list.append(
exported_user["goals"].append(
{"goal": goal.goal, "year": goal.year, "privacy": goal.privacy}
)
except Exception: # pylint: disable=broad-except
pass
try:
# Reading history - can't be serialized as AP
readthroughs = ReadThrough.objects.filter(user=user).distinct().values()
readthroughs = list(readthroughs)
except Exception: # pylint: disable=broad-except
readthroughs = []
# books
editions, books = get_books_for_user(user)
final_books = []
# Books
editions = get_books_for_user(user)
exported_user["books"] = []
for edition in editions:
book = {}
book["work"] = edition.parent_work.to_activity()
book["edition"] = edition.to_activity()
if book["edition"].get("cover"):
# change the URL to be relative to the JSON file
filename = book["edition"]["cover"]["url"].rsplit("/", maxsplit=1)[-1]
book["edition"]["cover"]["url"] = f"covers/{filename}"
for book in books.values():
edition = editions.filter(id=book["id"])
book["edition"] = edition.values()[0]
# authors
book["authors"] = list(edition.first().authors.all().values())
# readthroughs
book["authors"] = []
for author in edition.authors.all():
book["authors"].append(author.to_activity())
# Shelves this book is on
# Every ShelfItem is this book so we don't other serializing
book["shelves"] = []
shelf_books = (
ShelfBook.objects.select_related("shelf")
.filter(user=user, book=edition)
.distinct()
)
for shelfbook in shelf_books:
book["shelves"].append(shelfbook.shelf.to_activity())
# Lists and ListItems
# ListItems include "notes" and "approved" so we need them
# even though we know it's this book
book["lists"] = []
list_items = ListItem.objects.filter(book=edition, user=user).distinct()
for item in list_items:
list_info = item.book_list.to_activity()
list_info[
"privacy"
] = item.book_list.privacy # this isn't serialized so we add it
list_info["list_item"] = item.to_activity()
book["lists"].append(list_info)
# Statuses
# Can't use select_subclasses here because
# we need to filter on the "book" value,
# which is not available on an ordinary Status
for status in ["comments", "quotations", "reviews"]:
book[status] = []
comments = Comment.objects.filter(user=user, book=edition).all()
for status in comments:
obj = status.to_activity()
obj["progress"] = status.progress
obj["progress_mode"] = status.progress_mode
book["comments"].append(obj)
quotes = Quotation.objects.filter(user=user, book=edition).all()
for status in quotes:
obj = status.to_activity()
obj["position"] = status.position
obj["endposition"] = status.endposition
obj["position_mode"] = status.position_mode
book["quotations"].append(obj)
reviews = Review.objects.filter(user=user, book=edition).all()
for status in reviews:
obj = status.to_activity()
book["reviews"].append(obj)
# readthroughs can't be serialized to activity
book_readthroughs = (
ReadThrough.objects.filter(user=user, book=book["id"]).distinct().values()
ReadThrough.objects.filter(user=user, book=edition).distinct().values()
)
book["readthroughs"] = list(book_readthroughs)
# shelves
shelf_books = ShelfBook.objects.filter(user=user, book=book["id"]).distinct()
shelves_from_books = Shelf.objects.filter(shelfbook__in=shelf_books, user=user)
book["shelves"] = list(shelves_from_books.values())
book["shelf_books"] = {}
for shelf in shelves_from_books:
shelf_contents = ShelfBook.objects.filter(user=user, shelf=shelf).distinct()
book["shelf_books"][shelf.identifier] = list(shelf_contents.values())
# book lists
book_lists = List.objects.filter(books__in=[book["id"]], user=user).distinct()
book["lists"] = list(book_lists.values())
book["list_items"] = {}
for blist in book_lists:
list_items = ListItem.objects.filter(book_list=blist).distinct()
book["list_items"][blist.name] = list(list_items.values())
# reviews
reviews = Review.objects.filter(user=user, book=book["id"]).distinct()
book["reviews"] = list(reviews.values())
# comments
comments = Comment.objects.filter(user=user, book=book["id"]).distinct()
book["comments"] = list(comments.values())
# quotes
quotes = Quotation.objects.filter(user=user, book=book["id"]).distinct()
book["quotes"] = list(quotes.values())
# append everything
final_books.append(book)
exported_user["books"].append(book)
# saved book lists
# saved book lists - just the remote id
saved_lists = List.objects.filter(id__in=user.saved_lists.all()).distinct()
saved_lists = [l.remote_id for l in saved_lists]
exported_user["saved_lists"] = [l.remote_id for l in saved_lists]
# follows
# follows - just the remote id
follows = UserFollows.objects.filter(user_subject=user).distinct()
following = User.objects.filter(userfollows_user_object__in=follows).distinct()
follows = [f.remote_id for f in following]
exported_user["follows"] = [f.remote_id for f in following]
# blocks
# blocks - just the remote id
blocks = UserBlocks.objects.filter(user_subject=user).distinct()
blocking = User.objects.filter(userblocks_user_object__in=blocks).distinct()
blocks = [b.remote_id for b in blocking]
exported_user["blocks"] = [b.remote_id for b in blocking]
data = {
"user": exported_user,
"goals": goals_list,
"books": final_books,
"saved_lists": saved_lists,
"follows": follows,
"blocked_users": blocks,
}
return DjangoJSONEncoder().encode(data)
return DjangoJSONEncoder().encode(exported_user)
def get_books_for_user(user):
"""Get all the books and editions related to a user
:returns: tuple of editions, books
"""
"""Get all the books and editions related to a user"""
editions = Edition.objects.filter(
editions = (
Edition.objects.select_related("parent_work")
.filter(
Q(shelves__user=user)
| Q(readthrough__user=user)
| Q(review__user=user)
| Q(list__user=user)
| Q(comment__user=user)
| Q(quotation__user=user)
).distinct()
books = Book.objects.filter(id__in=editions).distinct()
return editions, books
)
.distinct()
)
return editions

View file

@ -1,13 +1,11 @@
"""Import a user from another Bookwyrm instance"""
from functools import reduce
import json
import logging
import operator
from django.db.models import FileField, JSONField, CharField
from django.db.models import Q
from django.utils.dateparse import parse_datetime
from django.utils import timezone
from django.utils.html import strip_tags
from django.contrib.postgres.fields import ArrayField as DjangoArrayField
from bookwyrm import activitypub
@ -47,9 +45,9 @@ def start_import_task(**kwargs):
job.import_data = json.loads(tar.read("archive.json").decode("utf-8"))
if "include_user_profile" in job.required:
update_user_profile(job.user, tar, job.import_data.get("user"))
update_user_profile(job.user, tar, job.import_data)
if "include_user_settings" in job.required:
update_user_settings(job.user, job.import_data.get("user"))
update_user_settings(job.user, job.import_data)
if "include_goals" in job.required:
update_goals(job.user, job.import_data.get("goals"))
if "include_saved_lists" in job.required:
@ -57,7 +55,7 @@ def start_import_task(**kwargs):
if "include_follows" in job.required:
upsert_follows(job.user, job.import_data.get("follows"))
if "include_blocks" in job.required:
upsert_user_blocks(job.user, job.import_data.get("blocked_users"))
upsert_user_blocks(job.user, job.import_data.get("blocks"))
process_books(job, tar)
@ -70,10 +68,12 @@ def start_import_task(**kwargs):
def process_books(job, tar):
"""process user import data related to books"""
"""
Process user import data related to books
We always import the books even if not assigning
them to shelves, lists etc
"""
# create the books. We need to merge Book and Edition instances
# and also check whether these books already exist in the DB
books = job.import_data.get("books")
for data in books:
@ -85,308 +85,193 @@ def process_books(job, tar):
if "include_readthroughs" in job.required:
upsert_readthroughs(data.get("readthroughs"), job.user, book.id)
if "include_reviews" in job.required:
get_or_create_statuses(
job.user, models.Review, data.get("reviews"), book.id
)
if "include_comments" in job.required:
get_or_create_statuses(
job.user, models.Comment, data.get("comments"), book.id
upsert_statuses(
job.user, models.Comment, data.get("comments"), book.remote_id
)
if "include_quotations" in job.required:
upsert_statuses(
job.user, models.Quotation, data.get("quotations"), book.remote_id
)
if "include_quotes" in job.required:
get_or_create_statuses(
job.user, models.Quotation, data.get("quotes"), book.id
if "include_reviews" in job.required:
upsert_statuses(
job.user, models.Review, data.get("reviews"), book.remote_id
)
if "include_lists" in job.required:
upsert_lists(job.user, data.get("lists"), data.get("list_items"), book.id)
upsert_lists(job.user, data.get("lists"), book.id)
def get_or_create_edition(book_data, tar):
"""Take a JSON string of book and edition data,
find or create the edition in the database and
"""Take a JSON string of work and edition data,
find or create the edition and work in the database and
return an edition instance"""
cover_path = book_data.get(
"cover", None
) # we use this further down but need to assign a var before cleaning
clean_book = clean_values(book_data)
book = clean_book.copy() # don't mutate the original book data
# prefer edition values only if they are not null
edition = clean_values(book["edition"])
for key in edition.keys():
if key not in book.keys() or (
key in book.keys() and (edition[key] not in [None, ""])
):
book[key] = edition[key]
existing = find_existing(models.Edition, book)
edition = book_data.get("edition")
existing = models.Edition.find_existing(edition)
if existing:
return existing
# the book is not in the local database, so we have to do this the hard way
local_authors = get_or_create_authors(book["authors"])
# make sure we have the authors in the local DB
# replace the old author ids in the edition JSON
edition["authors"] = []
for author in book_data.get("authors"):
parsed_author = activitypub.parse(author)
instance = parsed_author.to_model(
model=models.Author, save=True, overwrite=True
)
# get rid of everything that's not strictly in a Book
# or is many-to-many so can't be set directly
associated_values = [
"edition",
"authors",
"readthroughs",
"shelves",
"shelf_books",
"lists",
"list_items",
"reviews",
"comments",
"quotes",
]
edition["authors"].append(instance.remote_id)
for val in associated_values:
del book[val]
# we will add the cover later from the tar
# don't try to load it from the old server
cover = edition.get("cover", {})
cover_path = cover.get("url", None)
edition["cover"] = {}
# now we can save the book as an Edition
new_book = models.Edition.objects.create(**book)
new_book.authors.set(local_authors) # now we can add authors with set()
# first we need the parent work to exist
work = book_data.get("work")
work["editions"] = []
parsed_work = activitypub.parse(work)
work_instance = parsed_work.to_model(model=models.Work, save=True, overwrite=True)
# get cover from original book_data because we lost it in clean_values
# now we have a work we can add it to the edition
# and create the edition model instance
edition["work"] = work_instance.remote_id
parsed_edition = activitypub.parse(edition)
book = parsed_edition.to_model(model=models.Edition, save=True, overwrite=True)
# set the cover image from the tar
if cover_path:
tar.write_image_to_file(cover_path, new_book.cover)
tar.write_image_to_file(cover_path, book.cover)
# NOTE: clean_values removes "last_edited_by"
# because it's a user ID from the old database
# if this is required, bookwyrm_export_job will
# need to bring in the user who edited it.
# create parent
work = models.Work.objects.create(title=book["title"])
work.authors.set(local_authors)
new_book.parent_work = work
new_book.save(broadcast=False)
return new_book
def clean_values(data):
"""clean values we don't want when creating new instances"""
values = [
"id",
"pk",
"remote_id",
"cover",
"preview_image",
"last_edited_by",
"last_edited_by_id",
"user",
"book_list",
"shelf_book",
"parent_work_id",
]
common = data.keys() & values
new_data = data
for val in common:
del new_data[val]
return new_data
def find_existing(cls, data):
"""Given a book or author, find any existing model instances"""
identifiers = [
"openlibrary_key",
"inventaire_id",
"librarything_key",
"goodreads_key",
"asin",
"isfdb",
"isbn_10",
"isbn_13",
"oclc_number",
"origin_id",
"viaf",
"wikipedia_link",
"isni",
"gutenberg_id",
]
match_fields = []
for i in identifiers:
if data.get(i) not in [None, ""]:
match_fields.append({i: data.get(i)})
if len(match_fields) > 0:
match = cls.objects.filter(reduce(operator.or_, (Q(**f) for f in match_fields)))
return match.first()
return None
def get_or_create_authors(data):
"""Take a JSON string of authors find or create the authors
in the database and return a list of author instances"""
authors = []
for author in data:
clean = clean_values(author)
existing = find_existing(models.Author, clean)
if existing:
authors.append(existing)
else:
new = models.Author.objects.create(**clean)
authors.append(new)
return authors
return book
def upsert_readthroughs(data, user, book_id):
"""Take a JSON string of readthroughs, find or create the
instances in the database and return a list of saved instances"""
"""Take a JSON string of readthroughs and
find or create the instances in the database"""
for read_thru in data:
start_date = (
parse_datetime(read_thru["start_date"])
if read_thru["start_date"] is not None
else None
)
finish_date = (
parse_datetime(read_thru["finish_date"])
if read_thru["finish_date"] is not None
else None
)
stopped_date = (
parse_datetime(read_thru["stopped_date"])
if read_thru["stopped_date"] is not None
else None
)
readthrough = {
"user": user,
"book": models.Edition.objects.get(id=book_id),
"progress": read_thru["progress"],
"progress_mode": read_thru["progress_mode"],
"start_date": start_date,
"finish_date": finish_date,
"stopped_date": stopped_date,
"is_active": read_thru["is_active"],
}
for read_through in data:
existing = models.ReadThrough.objects.filter(**readthrough).exists()
obj = {}
keys = [
"progress_mode",
"start_date",
"finish_date",
"stopped_date",
"is_active",
]
for key in keys:
obj[key] = read_through[key]
obj["user_id"] = user.id
obj["book_id"] = book_id
existing = models.ReadThrough.objects.filter(**obj).first()
if not existing:
models.ReadThrough.objects.create(**readthrough)
models.ReadThrough.objects.create(**obj)
def get_or_create_statuses(user, cls, data, book_id):
def upsert_statuses(user, cls, data, book_remote_id):
"""Take a JSON string of a status and
find or create the instances in the database"""
for book_status in data:
for status in data:
keys = [
"content",
"raw_content",
"content_warning",
"privacy",
"sensitive",
"published_date",
"reading_status",
"name",
"rating",
"quote",
"raw_quote",
# update ids and remove replies
status["attributedTo"] = user.remote_id
status["to"] = update_followers_address(user, status["to"])
status["cc"] = update_followers_address(user, status["cc"])
status[
"replies"
] = {} # this parses incorrectly but we can't set it without knowing the new id
status["inReplyToBook"] = book_remote_id
# save new status or do update it if it already exists
parsed = activitypub.parse(status)
instance = parsed.to_model(model=cls, save=True, overwrite=True)
print(instance.id, instance.privacy)
for val in [
"progress",
"progress_mode",
"position",
"endposition",
"position_mode",
]
common = book_status.keys() & keys
status = {k: book_status[k] for k in common}
status["published_date"] = parse_datetime(book_status["published_date"])
if "rating" in common:
status["rating"] = float(book_status["rating"])
book = models.Edition.objects.get(id=book_id)
exists = cls.objects.filter(**status, book=book, user=user).exists()
if not exists:
cls.objects.create(**status, book=book, user=user)
]:
if status.get(val):
print(val, status[val])
instance.val = status[val]
instance.save()
def upsert_lists(user, lists, items, book_id):
"""Take a list and ListItems as JSON and
create DB entries if they don't already exist"""
def upsert_lists(user, lists, book_id):
"""Take a list of objects each containing
a list and list item as AP objects
Because we are creating new IDs we can't assume the id
will exist or be accurate, so we only use to_model for
adding new items after checking whether they exist .
"""
book = models.Edition.objects.get(id=book_id)
for lst in lists:
book_list = models.List.objects.filter(name=lst["name"], user=user).first()
if not book_list:
book_list = models.List.objects.create(
user=user,
name=lst["name"],
description=lst["description"],
curation=lst["curation"],
privacy=lst["privacy"],
)
for blist in lists:
booklist = models.List.objects.filter(name=blist["name"], user=user).first()
if not booklist:
# If the list exists but the ListItem doesn't don't try to add it
# with the same order as an existing item
count = models.ListItem.objects.filter(book_list=book_list).count()
blist["owner"] = user.remote_id
parsed = activitypub.parse(blist)
booklist = parsed.to_model(model=models.List, save=True, overwrite=True)
for i in items[lst["name"]]:
if not models.ListItem.objects.filter(
book=book, book_list=book_list, user=user
).exists():
booklist.privacy = blist["privacy"]
booklist.save()
item = models.ListItem.objects.filter(book=book, book_list=booklist).exists()
if not item:
count = booklist.books.count()
models.ListItem.objects.create(
book=book,
book_list=book_list,
book_list=booklist,
user=user,
notes=i["notes"],
order=i["order"] + count,
notes=blist["list_item"]["notes"],
approved=blist["list_item"]["approved"],
order=count + 1,
)
def upsert_shelves(book, user, book_data):
"""Take shelf and ShelfBooks JSON objects and create
"""Take shelf JSON objects and create
DB entries if they don't already exist"""
shelves = book_data["shelves"]
for shelf in shelves:
book_shelf = models.Shelf.objects.filter(name=shelf["name"], user=user).first()
if not book_shelf:
book_shelf = models.Shelf.objects.create(
name=shelf["name"],
user=user,
identifier=shelf["identifier"],
description=shelf["description"],
editable=shelf["editable"],
privacy=shelf["privacy"],
)
for shelfbook in book_data["shelf_books"][book_shelf.identifier]:
shelved_date = parse_datetime(shelfbook["shelved_date"])
book_shelf = models.Shelf.objects.create(name=shelf["name"], user=user)
# add the book as a ShelfBook if needed
if not models.ShelfBook.objects.filter(
book=book, shelf=book_shelf, user=user
).exists():
models.ShelfBook.objects.create(
book=book,
shelf=book_shelf,
user=user,
shelved_date=shelved_date,
book=book, shelf=book_shelf, user=user, shelved_date=timezone.now()
)
def update_user_profile(user, tar, data):
"""update the user's profile from import data"""
name = data.get("name")
username = data.get("username").split("@")[0]
name = data.get("name", None)
username = data.get("preferredUsername")
user.name = name if name else username
user.summary = data.get("summary")
user.summary = strip_tags(data.get("summary", None))
user.save(update_fields=["name", "summary"])
if data.get("avatar") is not None:
if data["icon"].get("url"):
avatar_filename = next(filter(lambda n: n.startswith("avatar"), tar.getnames()))
tar.write_image_to_file(avatar_filename, user.avatar)
@ -394,18 +279,28 @@ def update_user_profile(user, tar, data):
def update_user_settings(user, data):
"""update the user's settings from import data"""
update_fields = [
"manually_approves_followers",
"hide_follows",
"show_goal",
"show_suggested_users",
"discoverable",
"preferred_timezone",
"default_post_privacy",
update_fields = ["manually_approves_followers", "hide_follows", "discoverable"]
ap_fields = [
("manuallyApprovesFollowers", "manually_approves_followers"),
("hideFollows", "hide_follows"),
("discoverable", "discoverable"),
]
for field in update_fields:
setattr(user, field, data[field])
for (ap_field, bw_field) in ap_fields:
setattr(user, bw_field, data[ap_field])
bw_fields = [
"show_goal",
"show_suggested_users",
"default_post_privacy",
"preferred_timezone",
]
for field in bw_fields:
update_fields.append(field)
setattr(user, field, data["settings"][field])
user.save(update_fields=update_fields)
@ -421,7 +316,7 @@ def update_goals(user, data):
"""update the user's goals from import data"""
for goal in data:
# edit the existing goal if there is one instead of making a new one
# edit the existing goal if there is one
existing = models.AnnualGoal.objects.filter(
year=goal["year"], user=user
).first()
@ -513,3 +408,14 @@ def upsert_user_blocks_task(job_id):
return upsert_user_blocks(
parent_job.user, parent_job.import_data.get("blocked_users")
)
def update_followers_address(user, field):
"""statuses to or cc followers need to have the followers
address updated to the new local user"""
for i, audience in enumerate(field):
if audience.rsplit("/")[-1] == "followers":
field[i] = user.followers_url
return field

View file

@ -261,9 +261,7 @@ def notify_user_on_user_export_complete(
"""we exported your user details! aren't you proud of us"""
update_fields = update_fields or []
if not instance.complete or "complete" not in update_fields:
print("RETURNING", instance.status)
return
print("NOTIFYING")
Notification.objects.create(
user=instance.user,
notification_type=Notification.USER_EXPORT,

View file

@ -132,7 +132,7 @@
<input type="checkbox" name="include_reviews" checked> {% trans "Book reviews" %}
</label>
<label class="label">
<input type="checkbox" name="include_quotes" checked> {% trans "Quotations" %}
<input type="checkbox" name="include_quotations" checked> {% trans "Quotations" %}
</label>
<label class="label">
<input type="checkbox" name="include_comments" checked> {% trans "Comments about books" %}

View file

@ -35,6 +35,7 @@
<li>Direct messages</li>
<li>Replies to your statuses</li>
<li>Groups</li>
<li>Favorites</li>
</ul>
</div>
{% endblocktrans %}

View file

@ -0,0 +1,399 @@
{
"id": "https://www.example.com/user/rat",
"type": "Person",
"preferredUsername": "rat",
"inbox": "https://www.example.com/user/rat/inbox",
"publicKey": {
"id": "https://www.example.com/user/rat/#main-key",
"owner": "https://www.example.com/user/rat",
"publicKeyPem": "-----BEGIN PUBLIC KEY-----\nzzzz\n-----END PUBLIC KEY-----"
},
"followers": "https://www.example.com/user/rat/followers",
"following": "https://www.example.com/user/rat/following",
"outbox": "https://www.example.com/user/rat/outbox",
"endpoints": {
"sharedInbox": "https://www.example.com/inbox"
},
"name": "Rat",
"summary": "<p>I love to make soup in Paris and eat pizza in New York</p>",
"icon": {
"type": "Document",
"url": "avatar.png",
"name": "avatar for rat",
"@context": "https://www.w3.org/ns/activitystreams"
},
"bookwyrmUser": true,
"manuallyApprovesFollowers": true,
"discoverable": false,
"hideFollows": true,
"alsoKnownAs": [],
"@context": [
"https://www.w3.org/ns/activitystreams",
"https://w3id.org/security/v1",
{
"manuallyApprovesFollowers": "as:manuallyApprovesFollowers",
"schema": "http://schema.org#",
"PropertyValue": "schema:PropertyValue",
"value": "schema:value",
"alsoKnownAs": {
"@id": "as:alsoKnownAs",
"@type": "@id"
},
"movedTo": {
"@id": "as:movedTo",
"@type": "@id"
}
}
],
"settings": {
"show_goal": false,
"preferred_timezone": "Australia/Adelaide",
"default_post_privacy": "followers",
"show_suggested_users": false
},
"goals": [
{
"goal": 12,
"year": 2023,
"privacy": "followers"
}
],
"books": [
{
"work": {
"id": "https://www.example.com/book/1",
"type": "Work",
"title": "Seeing Like a State",
"description": "<p>Examines how (sometimes quasi-) authoritarian high-modernist planning fails to deliver the goods, be they increased resources for the state or a better life for the people.</p>",
"languages": [ "English" ],
"series": "",
"seriesNumber": "",
"subjects": [],
"subjectPlaces": [],
"authors": [
"https://www.example.com/author/1"
],
"firstPublishedDate": "",
"publishedDate": "1998-03-30T00:00:00Z",
"fileLinks": [],
"lccn": "",
"editions": [
"https://www.example.com/book/2"
],
"@context": "https://www.w3.org/ns/activitystreams"
},
"edition": {
"id": "https://www.example.com/book/2",
"type": "Edition",
"openlibraryKey": "OL680025M",
"title": "Seeking Like A State",
"sortTitle": "seeing like a state",
"subtitle": "",
"description": "<p>Examines how (sometimes quasi-) authoritarian high-modernist planning fails to deliver the goods, be they increased resources for the state or a better life for the people.</p>",
"languages": ["English"],
"series": "",
"seriesNumber": "",
"subjects": [],
"subjectPlaces": [],
"authors": [
"https://www.example.com/author/1"
],
"firstPublishedDate": "",
"publishedDate": "",
"fileLinks": [],
"cover": {
"type": "Document",
"url": "covers/d273d638-191d-4ebf-b213-3c60dbf010fe.jpeg",
"name": "James C. Scott: Seeing like a state",
"@context": "https://www.w3.org/ns/activitystreams"
},
"work": "https://www.example.com/book/1",
"isbn10": "",
"isbn13": "9780300070163",
"oclcNumber": "",
"physicalFormat": "",
"physicalFormatDetail": "",
"publishers": [],
"editionRank": 4,
"@context": "https://www.w3.org/ns/activitystreams"
},
"authors": [
{
"id": "https://www.example.com/author/1",
"type": "Author",
"name": "James C. Scott",
"aliases": [
"James Campbell Scott",
"\u30b8\u30a7\u30fc\u30e0\u30ba\u30fbC. \u30b9\u30b3\u30c3\u30c8",
"\u30b8\u30a7\u30fc\u30e0\u30ba\u30fbC\u30fb\u30b9\u30b3\u30c3\u30c8",
"\u062c\u06cc\u0645\u0632 \u0633\u06cc. \u0627\u0633\u06a9\u0627\u062a",
"Jim Scott",
"\u062c\u064a\u0645\u0633 \u0633\u0643\u0648\u062a",
"James C. Scott",
"\u0414\u0436\u0435\u0439\u043c\u0441 \u0421\u043a\u043e\u0442\u0442",
"\u30b8\u30a7\u30fc\u30e0\u30b9\u30fbC \u30b9\u30b3\u30c3\u30c8",
"James Cameron Scott"
],
"bio": "<p>American political scientist and anthropologist</p>",
"wikipediaLink": "https://en.wikipedia.org/wiki/James_C._Scott",
"website": "",
"@context": "https://www.w3.org/ns/activitystreams"
}
],
"shelves": [
{
"id": "https://www.example.com/user/rat/books/read",
"type": "Shelf",
"totalItems": 1,
"first": "https://www.example.com/user/rat/books/read?page=1",
"last": "https://www.example.com/user/rat/books/read?page=1",
"name": "Read",
"owner": "https://www.example.com/user/rat",
"to": [
"https://www.w3.org/ns/activitystreams#Public"
],
"cc": [
"https://www.example.com/user/rat/followers"
],
"@context": "https://www.w3.org/ns/activitystreams"
},
{
"id": "https://www.example.com/user/rat/books/to-read",
"type": "Shelf",
"totalItems": 1,
"first": "https://www.example.com/user/rat/books/to-read?page=1",
"last": "https://www.example.com/user/rat/books/to-read?page=1",
"name": "To Read",
"owner": "https://www.example.com/user/rat",
"to": [
"https://www.w3.org/ns/activitystreams#Public"
],
"cc": [
"https://www.example.com/user/rat/followers"
],
"@context": "https://www.w3.org/ns/activitystreams"
}
],
"lists": [
{
"id": "https://www.example.com/list/2",
"type": "BookList",
"totalItems": 1,
"first": "https://www.example.com/list/2?page=1",
"last": "https://www.example.com/list/2?page=1",
"name": "my list of books",
"owner": "https://www.example.com/user/rat",
"to": [
"https://www.example.com/user/rat/followers"
],
"cc": [],
"summary": "Here is a description of my list",
"curation": "closed",
"@context": "https://www.w3.org/ns/activitystreams",
"privacy": "followers",
"list_item": {
"id": "https://www.example.com/user/rat/listitem/3",
"type": "ListItem",
"actor": "https://www.example.com/user/rat",
"book": "https://www.example.com/book/2",
"notes": "<p>It's fun.</p>",
"approved": true,
"order": 1,
"@context": "https://www.w3.org/ns/activitystreams"
}
}
],
"comments": [],
"quotations": [],
"reviews": [
{
"id": "https://www.example.com/user/rat/review/7",
"type": "Review",
"published": "2023-08-14T04:09:18.343+00:00",
"attributedTo": "https://www.example.com//user/rat",
"content": "<p>I like it</p>",
"to": [
"https://your.domain.here/user/rat/followers"
],
"cc": [],
"replies": {
"id": "https://www.example.com/user/rat/review/7/replies",
"type": "OrderedCollection",
"totalItems": 0,
"first": "https://www.example.com/user/rat/review/7/replies?page=1",
"last": "https://www.example.com/user/rat/review/7/replies?page=1",
"@context": "https://www.w3.org/ns/activitystreams"
},
"summary": "Here's a spoiler alert",
"tag": [],
"attachment": [],
"sensitive": true,
"inReplyToBook": "https://www.example.com/book/6",
"name": "great book",
"rating": 5.0,
"@context": "https://www.w3.org/ns/activitystreams",
"progress": 23,
"progress_mode": "PG"
}
],
"readthroughs": [
{
"id": 1,
"created_date": "2023-08-14T04:00:27.544Z",
"updated_date": "2023-08-14T04:00:27.546Z",
"remote_id": "https://www.example.com/user/rat/readthrough/1",
"user_id": 1,
"book_id": 4880,
"progress": null,
"progress_mode": "PG",
"start_date": "2018-01-01T00:00:00Z",
"finish_date": "2023-08-13T00:00:00Z",
"stopped_date": null,
"is_active": false
}
]
},
{
"work": {
"id": "https://www.example.com/book/3",
"type": "Work",
"title": "Sand Talk: How Indigenous Thinking Can Save the World",
"description": "",
"languages": [],
"series": "",
"seriesNumber": "",
"subjects": [],
"subjectPlaces": [],
"authors": [
"https://www.example.com/author/2"
],
"firstPublishedDate": "",
"publishedDate": "",
"fileLinks": [],
"lccn": "",
"openlibraryKey": "OL28216445M",
"editions": [
"https://www.example.com/book/4"
],
"@context": "https://www.w3.org/ns/activitystreams"
},
"edition": {
"id": "https://www.example.com/book/4",
"type": "Edition",
"title": "Sand Talk",
"sortTitle": "sand talk",
"subtitle": "How Indigenous Thinking Can Save the World",
"description": "",
"languages": [],
"series": "",
"seriesNumber": "",
"subjects": [],
"subjectPlaces": [],
"authors": [
"https://www.example.com/author/2"
],
"firstPublishedDate": "",
"publishedDate": "",
"fileLinks": [],
"cover": {
"type": "Document",
"url": "covers/6a553a08-2641-42a1-baa4-960df9edbbfc.jpeg",
"name": "Tyson Yunkaporta - Sand Talk",
"@context": "https://www.w3.org/ns/activitystreams"
},
"work": "https://www.example.com/book/3",
"isbn10": "",
"isbn13": "9780062975645",
"oclcNumber": "",
"inventaireId": "isbn:9780062975645",
"physicalFormat": "paperback",
"physicalFormatDetail": "",
"publishers": [],
"editionRank": 5,
"@context": "https://www.w3.org/ns/activitystreams"
},
"authors": [
{
"id": "https://www.example.com/author/2",
"type": "Author",
"name": "Tyson Yunkaporta",
"aliases": [],
"bio": "",
"wikipediaLink": "",
"website": "",
"@context": "https://www.w3.org/ns/activitystreams"
}
],
"shelves": [],
"lists": [],
"comments": [
{
"id": "https://www.example.com/user/rat/comment/4",
"type": "Comment",
"published": "2023-08-14T04:48:18.746+00:00",
"attributedTo": "https://www.example.com/user/rat",
"content": "<p>this is a comment about an amazing book</p>",
"to": [
"https://www.w3.org/ns/activitystreams#Public"
],
"cc": [
"https://www.example.com/user/rat/followers"
],
"replies": {
"id": "https://www.example.com/user/rat/comment/4/replies",
"type": "OrderedCollection",
"totalItems": 0,
"first": "https://www.example.com/user/rat/comment/4/replies?page=1",
"last": "https://www.example.com/user/rat/comment/4/replies?page=1",
"@context": "https://www.w3.org/ns/activitystreams"
},
"tag": [],
"attachment": [],
"sensitive": false,
"inReplyToBook": "https://www.example.com/book/4",
"readingStatus": null,
"@context": "https://www.w3.org/ns/activitystreams"
}
],
"quotations": [
{
"id": "https://www.example.com/user/rat/quotation/2",
"type": "Quotation",
"published": "2023-11-12T04:29:38.370305+00:00",
"attributedTo": "https://www.example.com/user/rat",
"content": "<p>not actually from this book lol</p>",
"to": [
"https://www.w3.org/ns/activitystreams#Public"
],
"cc": [
"https://www.example.com/user/rat/followers"
],
"replies": {
"id": "https://www.example.com/user/rat/quotation/2/replies",
"type": "OrderedCollection",
"totalItems": 0,
"first": "https://www.example.com/user/rat/quotation/2/replies?page=1",
"last": "https://www.example.com/user/rat/quotation/2/replies?page=1",
"@context": "https://www.w3.org/ns/activitystreams"
},
"tag": [],
"attachment": [],
"sensitive": false,
"summary": "spoiler ahead!",
"inReplyToBook": "https://www.example.com/book/2",
"quote": "<p>To be or not to be</p>",
"@context": "https://www.w3.org/ns/activitystreams"
}
],
"reviews": [],
"readthroughs": []
}
],
"saved_lists": [
"https://local.lists/9999"
],
"follows": [
"https://your.domain.here/user/rat"
],
"blocks": ["https://your.domain.here/user/badger"]
}

View file

@ -144,17 +144,19 @@ class BookwyrmExport(TestCase):
def test_json_export_user_settings(self):
"""Test the json export function for basic user info"""
data = export_job.json_export(self.local_user)
user_data = json.loads(data)["user"]
self.assertEqual(user_data["username"], "mouse")
user_data = json.loads(data)
self.assertEqual(user_data["preferredUsername"], "mouse")
self.assertEqual(user_data["name"], "Mouse")
self.assertEqual(user_data["summary"], "I'm a real bookmouse")
self.assertEqual(user_data["manually_approves_followers"], False)
self.assertEqual(user_data["hide_follows"], False)
self.assertEqual(user_data["show_goal"], False)
self.assertEqual(user_data["show_suggested_users"], False)
self.assertEqual(user_data["summary"], "<p>I'm a real bookmouse</p>")
self.assertEqual(user_data["manuallyApprovesFollowers"], False)
self.assertEqual(user_data["hideFollows"], False)
self.assertEqual(user_data["discoverable"], True)
self.assertEqual(user_data["preferred_timezone"], "America/Los Angeles")
self.assertEqual(user_data["default_post_privacy"], "followers")
self.assertEqual(user_data["settings"]["show_goal"], False)
self.assertEqual(user_data["settings"]["show_suggested_users"], False)
self.assertEqual(
user_data["settings"]["preferred_timezone"], "America/Los Angeles"
)
self.assertEqual(user_data["settings"]["default_post_privacy"], "followers")
def test_json_export_extended_user_data(self):
"""Test the json export function for other non-book user info"""
@ -175,10 +177,8 @@ class BookwyrmExport(TestCase):
self.assertEqual(len(json_data["follows"]), 1)
self.assertEqual(json_data["follows"][0], "https://your.domain.here/user/rat")
# blocked users
self.assertEqual(len(json_data["blocked_users"]), 1)
self.assertEqual(
json_data["blocked_users"][0], "https://your.domain.here/user/badger"
)
self.assertEqual(len(json_data["blocks"]), 1)
self.assertEqual(json_data["blocks"][0], "https://your.domain.here/user/badger")
def test_json_export_books(self):
"""Test the json export function for extended user info"""
@ -188,46 +188,44 @@ class BookwyrmExport(TestCase):
start_date = json_data["books"][0]["readthroughs"][0]["start_date"]
self.assertEqual(len(json_data["books"]), 1)
self.assertEqual(json_data["books"][0]["title"], "Example Edition")
self.assertEqual(json_data["books"][0]["edition"]["title"], "Example Edition")
self.assertEqual(len(json_data["books"][0]["authors"]), 1)
self.assertEqual(json_data["books"][0]["authors"][0]["name"], "Sam Zhu")
self.assertEqual(
f'"{start_date}"', DjangoJSONEncoder().encode(self.readthrough_start)
)
self.assertEqual(json_data["books"][0]["shelves"][0]["identifier"], "read")
self.assertEqual(
json_data["books"][0]["shelf_books"]["read"][0]["book_id"], self.edition.id
)
self.assertEqual(json_data["books"][0]["shelves"][0]["name"], "Read")
self.assertEqual(len(json_data["books"][0]["lists"]), 1)
self.assertEqual(json_data["books"][0]["lists"][0]["name"], "My excellent list")
self.assertEqual(len(json_data["books"][0]["list_items"]), 1)
self.assertEqual(
json_data["books"][0]["list_items"]["My excellent list"][0]["book_id"],
json_data["books"][0]["lists"][0]["list_item"]["book"],
self.edition.remote_id,
self.edition.id,
)
self.assertEqual(len(json_data["books"][0]["reviews"]), 1)
self.assertEqual(len(json_data["books"][0]["comments"]), 1)
self.assertEqual(len(json_data["books"][0]["quotes"]), 1)
self.assertEqual(len(json_data["books"][0]["quotations"]), 1)
self.assertEqual(json_data["books"][0]["reviews"][0]["name"], "my review")
self.assertEqual(json_data["books"][0]["reviews"][0]["content"], "awesome")
self.assertEqual(json_data["books"][0]["reviews"][0]["rating"], "5.00")
self.assertEqual(
json_data["books"][0]["reviews"][0]["content"], "<p>awesome</p>"
)
self.assertEqual(json_data["books"][0]["reviews"][0]["rating"], 5.0)
self.assertEqual(json_data["books"][0]["comments"][0]["content"], "ok so far")
self.assertEqual(
json_data["books"][0]["comments"][0]["content"], "<p>ok so far</p>"
)
self.assertEqual(json_data["books"][0]["comments"][0]["progress"], 15)
self.assertEqual(json_data["books"][0]["comments"][0]["progress_mode"], "PG")
self.assertEqual(
json_data["books"][0]["quotes"][0]["content"], "check this out"
json_data["books"][0]["quotations"][0]["content"], "<p>check this out</p>"
)
self.assertEqual(
json_data["books"][0]["quotes"][0]["quote"], "A rose by any other name"
json_data["books"][0]["quotations"][0]["quote"],
"<p>A rose by any other name</p>",
)
def test_tar_export(self):
"""test the tar export function"""
# TODO
pass # pylint: disable=unnecessary-pass

View file

@ -53,37 +53,42 @@ class BookwyrmImport(TestCase): # pylint: disable=too-many-public-methods
localname="badger",
)
self.work = models.Work.objects.create(title="Test Book")
self.work = models.Work.objects.create(title="Sand Talk")
self.book = models.Edition.objects.create(
title="Test Book",
title="Sand Talk",
remote_id="https://example.com/book/1234",
openlibrary_key="OL28216445M",
inventaire_id="isbn:9780062975645",
isbn_13="9780062975645",
parent_work=self.work,
)
self.json_file = pathlib.Path(__file__).parent.joinpath(
"../data/user_import.json"
)
with open(self.json_file, "r", encoding="utf-8") as jsonfile:
self.json_data = json.loads(jsonfile.read())
self.archive_file = pathlib.Path(__file__).parent.joinpath(
"../data/bookwyrm_account_export.tar.gz"
)
with open(self.archive_file, "rb") as fileobj:
with BookwyrmTarFile.open(mode="r:gz", fileobj=fileobj) as tarfile:
self.import_data = json.loads(
tarfile.read("archive.json").decode("utf-8")
)
def test_update_user_profile(self):
"""Test update the user's profile from import data"""
with patch("bookwyrm.suggested_users.remove_user_task.delay"), patch(
"bookwyrm.models.activitypub_mixin.broadcast_task.apply_async"
):
), patch("bookwyrm.suggested_users.rerank_user_task.delay"):
with open(self.archive_file, "rb") as fileobj:
with BookwyrmTarFile.open(mode="r:gz", fileobj=fileobj) as tarfile:
models.bookwyrm_import_job.update_user_profile(
self.local_user, tarfile, self.import_data.get("user")
self.local_user, tarfile, self.json_data
)
self.local_user.refresh_from_db()
self.assertEqual(
@ -100,10 +105,10 @@ class BookwyrmImport(TestCase): # pylint: disable=too-many-public-methods
with patch("bookwyrm.suggested_users.remove_user_task.delay"), patch(
"bookwyrm.models.activitypub_mixin.broadcast_task.apply_async"
):
), patch("bookwyrm.suggested_users.rerank_user_task.delay"):
models.bookwyrm_import_job.update_user_settings(
self.local_user, self.import_data.get("user")
self.local_user, self.json_data
)
self.local_user.refresh_from_db()
@ -125,11 +130,11 @@ class BookwyrmImport(TestCase): # pylint: disable=too-many-public-methods
privacy="public",
)
goals = [{"goal": 12, "year": 2023, "privacy": "followers"}]
with patch("bookwyrm.models.activitypub_mixin.broadcast_task.apply_async"):
models.bookwyrm_import_job.update_goals(
self.local_user, self.import_data.get("goals")
)
models.bookwyrm_import_job.update_goals(self.local_user, goals)
self.local_user.refresh_from_db()
goal = models.AnnualGoal.objects.get()
@ -198,7 +203,7 @@ class BookwyrmImport(TestCase): # pylint: disable=too-many-public-methods
"bookwyrm.lists_stream.add_user_lists_task.delay"
), patch("bookwyrm.models.activitypub_mixin.broadcast_task.apply_async"):
models.bookwyrm_import_job.upsert_follows(
self.local_user, self.import_data.get("follows")
self.local_user, self.json_data.get("follows")
)
after_follow = models.UserFollows.objects.filter(
@ -223,7 +228,7 @@ class BookwyrmImport(TestCase): # pylint: disable=too-many-public-methods
"bookwyrm.models.activitypub_mixin.broadcast_task.apply_async"
):
models.bookwyrm_import_job.upsert_user_blocks(
self.local_user, self.import_data.get("blocked_users")
self.local_user, self.json_data.get("blocks")
)
blocked_after = models.UserBlocks.objects.filter(
@ -234,19 +239,6 @@ class BookwyrmImport(TestCase): # pylint: disable=too-many-public-methods
).exists()
self.assertTrue(blocked_after)
def test_get_or_create_authors(self):
"""Test taking a JSON string of authors find or create the authors
in the database and returning a list of author instances"""
author_exists = models.Author.objects.filter(isni="0000000108973024").exists()
self.assertFalse(author_exists)
authors = self.import_data.get("books")[0]["authors"]
bookwyrm_import_job.get_or_create_authors(authors)
author = models.Author.objects.get(isni="0000000108973024")
self.assertEqual(author.name, "James C. Scott")
def test_get_or_create_edition_existing(self):
"""Test take a JSON string of books and editions,
find or create the editions in the database and
@ -258,7 +250,7 @@ class BookwyrmImport(TestCase): # pylint: disable=too-many-public-methods
with BookwyrmTarFile.open(mode="r:gz", fileobj=fileobj) as tarfile:
bookwyrm_import_job.get_or_create_edition(
self.import_data["books"][1], tarfile
self.json_data["books"][1], tarfile
) # Sand Talk
self.assertEqual(models.Edition.objects.count(), 1)
@ -272,54 +264,14 @@ class BookwyrmImport(TestCase): # pylint: disable=too-many-public-methods
with open(self.archive_file, "rb") as fileobj:
with BookwyrmTarFile.open(mode="r:gz", fileobj=fileobj) as tarfile:
bookwyrm_import_job.get_or_create_edition(
self.import_data["books"][0], tarfile
self.json_data["books"][0], tarfile
) # Seeing like a state
self.assertTrue(
models.Edition.objects.filter(isbn_13="9780300070163").exists()
)
self.assertTrue(models.Edition.objects.filter(isbn_13="9780300070163").exists())
self.assertEqual(models.Edition.objects.count(), 2)
def test_clean_values(self):
"""test clean values we don't want when creating new instances"""
author = self.import_data.get("books")[0]["authors"][0]
edition = self.import_data.get("books")[0]["edition"]
cleaned_author = bookwyrm_import_job.clean_values(author)
cleaned_edition = bookwyrm_import_job.clean_values(edition)
self.assertEqual(cleaned_author["name"], "James C. Scott")
self.assertEqual(cleaned_author.get("id"), None)
self.assertEqual(cleaned_author.get("remote_id"), None)
self.assertEqual(cleaned_author.get("last_edited_by"), None)
self.assertEqual(cleaned_author.get("last_edited_by_id"), None)
self.assertEqual(cleaned_edition.get("title"), "Seeing Like a State")
self.assertEqual(cleaned_edition.get("id"), None)
self.assertEqual(cleaned_edition.get("remote_id"), None)
self.assertEqual(cleaned_edition.get("last_edited_by"), None)
self.assertEqual(cleaned_edition.get("last_edited_by_id"), None)
self.assertEqual(cleaned_edition.get("cover"), None)
self.assertEqual(cleaned_edition.get("preview_image "), None)
self.assertEqual(cleaned_edition.get("user"), None)
self.assertEqual(cleaned_edition.get("book_list"), None)
self.assertEqual(cleaned_edition.get("shelf_book"), None)
def test_find_existing(self):
"""Given a book or author, find any existing model instances"""
self.assertEqual(models.Book.objects.count(), 2) # includes Work
self.assertEqual(models.Edition.objects.count(), 1)
self.assertEqual(models.Edition.objects.first().title, "Test Book")
self.assertEqual(models.Edition.objects.first().openlibrary_key, "OL28216445M")
existing = bookwyrm_import_job.find_existing(
models.Edition, {"openlibrary_key": "OL28216445M", "isbn_10": None}
)
self.assertEqual(existing.title, "Test Book")
def test_upsert_readthroughs(self):
"""Test take a JSON string of readthroughs, find or create the
instances in the database and return a list of saved instances"""
@ -332,7 +284,7 @@ class BookwyrmImport(TestCase): # pylint: disable=too-many-public-methods
"remote_id": "https://example.com/mouse/readthrough/1",
"user_id": 1,
"book_id": 1234,
"progress": None,
"progress": 23,
"progress_mode": "PG",
"start_date": "2022-12-31T13:30:00Z",
"finish_date": "2023-08-23T14:30:00Z",
@ -355,19 +307,20 @@ class BookwyrmImport(TestCase): # pylint: disable=too-many-public-methods
self.assertEqual(models.ReadThrough.objects.first().book_id, self.book.id)
self.assertEqual(models.ReadThrough.objects.first().user, self.local_user)
def test_get_or_create_review_status(self):
def test_get_or_create_review(self):
"""Test get_or_create_review_status with a review"""
self.assertEqual(models.Review.objects.filter(user=self.local_user).count(), 0)
reviews = self.import_data["books"][0]["reviews"]
reviews = self.json_data["books"][0]["reviews"]
with patch("bookwyrm.models.activitypub_mixin.broadcast_task.apply_async"):
bookwyrm_import_job.get_or_create_statuses(
self.local_user, models.Review, reviews, self.book.id
bookwyrm_import_job.upsert_statuses(
self.local_user, models.Review, reviews, self.book.remote_id
)
self.assertEqual(models.Review.objects.filter(user=self.local_user).count(), 1)
self.assertEqual(
models.Review.objects.filter(book=self.book).first().raw_content,
"I like it",
models.Review.objects.filter(book=self.book).first().content,
"<p>I like it</p>",
)
self.assertEqual(
models.Review.objects.filter(book=self.book).first().content_warning,
@ -376,10 +329,6 @@ class BookwyrmImport(TestCase): # pylint: disable=too-many-public-methods
self.assertEqual(
models.Review.objects.filter(book=self.book).first().sensitive, True
)
self.assertEqual(
models.Review.objects.filter(book=self.book).first().published_date,
parse_datetime("2023-08-14T04:09:18.343Z"),
)
self.assertEqual(
models.Review.objects.filter(book=self.book).first().name, "great book"
)
@ -387,19 +336,23 @@ class BookwyrmImport(TestCase): # pylint: disable=too-many-public-methods
models.Review.objects.filter(book=self.book).first().rating, 5.00
)
def test_get_or_create_comment_status(self):
self.assertEqual(
models.Review.objects.filter(book=self.book).first().privacy, "followers"
)
def test_get_or_create_comment(self):
"""Test get_or_create_review_status with a comment"""
self.assertEqual(models.Comment.objects.filter(user=self.local_user).count(), 0)
comments = self.import_data["books"][1]["comments"]
comments = self.json_data["books"][1]["comments"]
with patch("bookwyrm.models.activitypub_mixin.broadcast_task.apply_async"):
bookwyrm_import_job.get_or_create_statuses(
self.local_user, models.Comment, comments, self.book.id
bookwyrm_import_job.upsert_statuses(
self.local_user, models.Comment, comments, self.book.remote_id
)
self.assertEqual(models.Comment.objects.filter(user=self.local_user).count(), 1)
self.assertEqual(
models.Comment.objects.filter(book=self.book).first().raw_content,
"this is a comment about an amazing book",
models.Comment.objects.filter(book=self.book).first().content,
"<p>this is a comment about an amazing book</p>",
)
self.assertEqual(
models.Comment.objects.filter(book=self.book).first().content_warning, None
@ -407,56 +360,45 @@ class BookwyrmImport(TestCase): # pylint: disable=too-many-public-methods
self.assertEqual(
models.Comment.objects.filter(book=self.book).first().sensitive, False
)
self.assertEqual(
models.Comment.objects.filter(book=self.book).first().published_date,
parse_datetime("2023-08-14T04:48:18.746Z"),
)
self.assertEqual(
models.Comment.objects.filter(book=self.book).first().progress_mode, "PG"
)
def test_get_or_create_comment_quote(self):
def test_get_or_create_quote(self):
"""Test get_or_create_review_status with a quote"""
self.assertEqual(
models.Quotation.objects.filter(user=self.local_user).count(), 0
)
quotes = self.import_data["books"][1]["quotes"]
quotes = self.json_data["books"][1]["quotations"]
with patch("bookwyrm.models.activitypub_mixin.broadcast_task.apply_async"):
bookwyrm_import_job.get_or_create_statuses(
self.local_user, models.Quotation, quotes, self.book.id
bookwyrm_import_job.upsert_statuses(
self.local_user, models.Quotation, quotes, self.book.remote_id
)
self.assertEqual(
models.Quotation.objects.filter(user=self.local_user).count(), 1
)
self.assertEqual(
models.Quotation.objects.filter(book=self.book).first().raw_content,
"not actually from this book lol",
models.Quotation.objects.filter(book=self.book).first().content,
"<p>not actually from this book lol</p>",
)
self.assertEqual(
models.Quotation.objects.filter(book=self.book).first().content_warning,
"spoiler ahead!",
)
self.assertEqual(
models.Quotation.objects.filter(book=self.book).first().raw_quote,
"To be or not to be",
)
self.assertEqual(
models.Quotation.objects.filter(book=self.book).first().published_date,
parse_datetime("2023-08-14T04:48:50.207Z"),
models.Quotation.objects.filter(book=self.book).first().quote,
"<p>To be or not to be</p>",
)
self.assertEqual(
models.Quotation.objects.filter(book=self.book).first().position_mode, "PG"
)
self.assertEqual(
models.Quotation.objects.filter(book=self.book).first().position, 1
)
def test_upsert_list_existing(self):
"""Take a list and ListItems as JSON and create DB entries
if they don't already exist"""
book_data = self.import_data["books"][0]
book_data = self.json_data["books"][0]
other_book = models.Edition.objects.create(
title="Another Book", remote_id="https://example.com/book/9876"
@ -488,7 +430,6 @@ class BookwyrmImport(TestCase): # pylint: disable=too-many-public-methods
bookwyrm_import_job.upsert_lists(
self.local_user,
book_data["lists"],
book_data["list_items"],
other_book.id,
)
@ -505,7 +446,7 @@ class BookwyrmImport(TestCase): # pylint: disable=too-many-public-methods
"""Take a list and ListItems as JSON and create DB entries
if they don't already exist"""
book_data = self.import_data["books"][0]
book_data = self.json_data["books"][0]
self.assertEqual(models.List.objects.filter(user=self.local_user).count(), 0)
self.assertFalse(models.ListItem.objects.filter(book=self.book.id).exists())
@ -516,7 +457,6 @@ class BookwyrmImport(TestCase): # pylint: disable=too-many-public-methods
bookwyrm_import_job.upsert_lists(
self.local_user,
book_data["lists"],
book_data["list_items"],
self.book.id,
)
@ -542,7 +482,7 @@ class BookwyrmImport(TestCase): # pylint: disable=too-many-public-methods
book=self.book, shelf=shelf, user=self.local_user
)
book_data = self.import_data["books"][0]
book_data = self.json_data["books"][0]
with patch("bookwyrm.activitystreams.add_book_statuses_task.delay"), patch(
"bookwyrm.models.activitypub_mixin.broadcast_task.apply_async"
):
@ -560,7 +500,7 @@ class BookwyrmImport(TestCase): # pylint: disable=too-many-public-methods
models.ShelfBook.objects.filter(user=self.local_user.id).count(), 0
)
book_data = self.import_data["books"][0]
book_data = self.json_data["books"][0]
with patch("bookwyrm.activitystreams.add_book_statuses_task.delay"), patch(
"bookwyrm.models.activitypub_mixin.broadcast_task.apply_async"