forked from mirrors/bookwyrm
Tests for goodreads import lookup
This commit is contained in:
parent
97b56e9bc2
commit
3344eed3b9
3 changed files with 70 additions and 6 deletions
|
@ -9,8 +9,7 @@ from .abstract_connector import AbstractConnector, SearchResult
|
||||||
class Connector(AbstractConnector):
|
class Connector(AbstractConnector):
|
||||||
''' instantiate a connector '''
|
''' instantiate a connector '''
|
||||||
def search(self, query, min_confidence=0.1):
|
def search(self, query, min_confidence=0.1):
|
||||||
''' right now you can't search bookwyrm sorry, but when
|
''' search your local database '''
|
||||||
that gets implemented it will totally rule '''
|
|
||||||
vector = SearchVector('title', weight='A') +\
|
vector = SearchVector('title', weight='A') +\
|
||||||
SearchVector('subtitle', weight='B') +\
|
SearchVector('subtitle', weight='B') +\
|
||||||
SearchVector('authors__name', weight='C') +\
|
SearchVector('authors__name', weight='C') +\
|
||||||
|
|
|
@ -23,6 +23,7 @@ def create_job(user, csv_file, include_reviews, privacy):
|
||||||
ImportItem(job=job, index=index, data=entry).save()
|
ImportItem(job=job, index=index, data=entry).save()
|
||||||
return job
|
return job
|
||||||
|
|
||||||
|
|
||||||
def create_retry_job(user, original_job, items):
|
def create_retry_job(user, original_job, items):
|
||||||
''' retry items that didn't import '''
|
''' retry items that didn't import '''
|
||||||
job = ImportJob.objects.create(
|
job = ImportJob.objects.create(
|
||||||
|
@ -35,6 +36,7 @@ def create_retry_job(user, original_job, items):
|
||||||
ImportItem(job=job, index=item.index, data=item.data).save()
|
ImportItem(job=job, index=item.index, data=item.data).save()
|
||||||
return job
|
return job
|
||||||
|
|
||||||
|
|
||||||
def start_import(job):
|
def start_import(job):
|
||||||
''' initalizes a csv import job '''
|
''' initalizes a csv import job '''
|
||||||
result = import_data.delay(job.id)
|
result = import_data.delay(job.id)
|
||||||
|
@ -47,7 +49,6 @@ def import_data(job_id):
|
||||||
''' does the actual lookup work in a celery task '''
|
''' does the actual lookup work in a celery task '''
|
||||||
job = ImportJob.objects.get(id=job_id)
|
job = ImportJob.objects.get(id=job_id)
|
||||||
try:
|
try:
|
||||||
results = []
|
|
||||||
for item in job.items.all():
|
for item in job.items.all():
|
||||||
try:
|
try:
|
||||||
item.resolve()
|
item.resolve()
|
||||||
|
@ -59,7 +60,6 @@ def import_data(job_id):
|
||||||
|
|
||||||
if item.book:
|
if item.book:
|
||||||
item.save()
|
item.save()
|
||||||
results.append(item)
|
|
||||||
|
|
||||||
# shelves book and handles reviews
|
# shelves book and handles reviews
|
||||||
outgoing.handle_imported_book(
|
outgoing.handle_imported_book(
|
||||||
|
|
|
@ -1,10 +1,13 @@
|
||||||
''' testing import '''
|
''' testing import '''
|
||||||
|
from collections import namedtuple
|
||||||
import pathlib
|
import pathlib
|
||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
import responses
|
import responses
|
||||||
|
|
||||||
from bookwyrm import goodreads_import, models
|
from bookwyrm import goodreads_import, models
|
||||||
|
from bookwyrm.settings import DOMAIN
|
||||||
|
|
||||||
class GoodreadsImport(TestCase):
|
class GoodreadsImport(TestCase):
|
||||||
''' importing from goodreads csv '''
|
''' importing from goodreads csv '''
|
||||||
|
@ -16,16 +19,29 @@ class GoodreadsImport(TestCase):
|
||||||
self.user = models.User.objects.create_user(
|
self.user = models.User.objects.create_user(
|
||||||
'mouse', 'mouse@mouse.mouse', 'password', local=True)
|
'mouse', 'mouse@mouse.mouse', 'password', local=True)
|
||||||
|
|
||||||
|
models.Connector.objects.create(
|
||||||
|
identifier=DOMAIN,
|
||||||
|
name='Local',
|
||||||
|
local=True,
|
||||||
|
connector_file='self_connector',
|
||||||
|
base_url='https://%s' % DOMAIN,
|
||||||
|
books_url='https://%s/book' % DOMAIN,
|
||||||
|
covers_url='https://%s/images/covers' % DOMAIN,
|
||||||
|
search_url='https://%s/search?q=' % DOMAIN,
|
||||||
|
priority=1,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_create_job(self):
|
def test_create_job(self):
|
||||||
''' creates the import job entry and checks csv '''
|
''' creates the import job entry and checks csv '''
|
||||||
goodreads_import.create_job(self.user, self.csv, False, 'public')
|
import_job = goodreads_import.create_job(
|
||||||
import_job = models.ImportJob.objects.get()
|
self.user, self.csv, False, 'public')
|
||||||
self.assertEqual(import_job.user, self.user)
|
self.assertEqual(import_job.user, self.user)
|
||||||
self.assertEqual(import_job.include_reviews, False)
|
self.assertEqual(import_job.include_reviews, False)
|
||||||
self.assertEqual(import_job.privacy, 'public')
|
self.assertEqual(import_job.privacy, 'public')
|
||||||
|
|
||||||
import_items = models.ImportItem.objects.filter(job=import_job).all()
|
import_items = models.ImportItem.objects.filter(job=import_job).all()
|
||||||
|
self.assertEqual(len(import_items), 3)
|
||||||
self.assertEqual(import_items[0].index, 0)
|
self.assertEqual(import_items[0].index, 0)
|
||||||
self.assertEqual(import_items[0].data['Book Id'], '42036538')
|
self.assertEqual(import_items[0].data['Book Id'], '42036538')
|
||||||
self.assertEqual(import_items[1].index, 1)
|
self.assertEqual(import_items[1].index, 1)
|
||||||
|
@ -34,6 +50,55 @@ class GoodreadsImport(TestCase):
|
||||||
self.assertEqual(import_items[2].data['Book Id'], '28694510')
|
self.assertEqual(import_items[2].data['Book Id'], '28694510')
|
||||||
|
|
||||||
|
|
||||||
|
def test_create_retry_job(self):
|
||||||
|
''' trying again with items that didn't import '''
|
||||||
|
import_job = goodreads_import.create_job(
|
||||||
|
self.user, self.csv, False, 'unlisted')
|
||||||
|
import_items = models.ImportItem.objects.filter(
|
||||||
|
job=import_job
|
||||||
|
).all()[:2]
|
||||||
|
|
||||||
|
retry = goodreads_import.create_retry_job(
|
||||||
|
self.user, import_job, import_items)
|
||||||
|
self.assertNotEqual(import_job, retry)
|
||||||
|
self.assertEqual(retry.user, self.user)
|
||||||
|
self.assertEqual(retry.include_reviews, False)
|
||||||
|
self.assertEqual(retry.privacy, 'unlisted')
|
||||||
|
|
||||||
|
retry_items = models.ImportItem.objects.filter(job=retry).all()
|
||||||
|
self.assertEqual(len(retry_items), 2)
|
||||||
|
self.assertEqual(retry_items[0].index, 0)
|
||||||
|
self.assertEqual(retry_items[0].data['Book Id'], '42036538')
|
||||||
|
self.assertEqual(retry_items[1].index, 1)
|
||||||
|
self.assertEqual(retry_items[1].data['Book Id'], '52691223')
|
||||||
|
|
||||||
|
|
||||||
|
def test_start_import(self):
|
||||||
|
''' begin loading books '''
|
||||||
|
import_job = goodreads_import.create_job(
|
||||||
|
self.user, self.csv, False, 'unlisted')
|
||||||
|
MockTask = namedtuple('Task', ('id'))
|
||||||
|
mock_task = MockTask(7)
|
||||||
|
with patch('bookwyrm.goodreads_import.import_data.delay') as start:
|
||||||
|
start.return_value = mock_task
|
||||||
|
goodreads_import.start_import(import_job)
|
||||||
|
import_job.refresh_from_db()
|
||||||
|
self.assertEqual(import_job.task_id, '7')
|
||||||
|
|
||||||
|
|
||||||
@responses.activate
|
@responses.activate
|
||||||
def test_import_data(self):
|
def test_import_data(self):
|
||||||
''' resolve entry '''
|
''' resolve entry '''
|
||||||
|
import_job = goodreads_import.create_job(
|
||||||
|
self.user, self.csv, False, 'unlisted')
|
||||||
|
book = models.Edition.objects.create(title='Test Book')
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
'bookwyrm.models.import_job.ImportItem.get_book_from_isbn'
|
||||||
|
) as resolve:
|
||||||
|
resolve.return_value = book
|
||||||
|
with patch('bookwyrm.outgoing.handle_imported_book'):
|
||||||
|
goodreads_import.import_data(import_job.id)
|
||||||
|
|
||||||
|
import_item = models.ImportItem.objects.get(job=import_job, index=0)
|
||||||
|
self.assertEqual(import_item.book.id, book.id)
|
||||||
|
|
Loading…
Reference in a new issue