Use many small tasks instead of one big task

This commit is contained in:
Mouse Reeve 2021-11-11 14:39:53 -08:00
parent f0ce236ffc
commit 908c9dc689

View file

@ -102,39 +102,40 @@ class Importer:
def start_import(self, job):
"""initalizes a csv import job"""
result = import_data.delay(self.service, job.id)
result = start_import_task.delay(self.service, job.id)
job.task_id = result.id
job.save()
@app.task(queue="low_priority")
def import_data(source, job_id):
"""does the actual lookup work in a celery task"""
def start_import_task(source, job_id):
"""trigger the child tasks for each row"""
job = ImportJob.objects.get(id=job_id)
# these are sub-tasks so that one big task doesn't use up all the memory in celery
for item in job.items.values("id").all():
import_item_task.delay(source, item.id)
@app.task(queue="low_priority")
def import_item_task(source, item_id):
"""resolve a row into a book"""
item = models.ImportItem.objets.get(id=item_id)
try:
for item in job.items.all():
try:
item.resolve()
except Exception as err: # pylint: disable=broad-except
logger.exception(err)
item.fail_reason = _("Error loading book")
item.save()
continue
item.resolve()
except Exception as err: # pylint: disable=broad-except
logger.exception(err)
item.fail_reason = _("Error loading book")
item.save()
return
if item.book or item.book_guess:
item.save()
if item.book:
job = item.job
# shelves book and handles reviews
handle_imported_book(source, job.user, item, job.include_reviews, job.privacy)
else:
item.fail_reason = _("Could not find a match for book")
if item.book:
# shelves book and handles reviews
handle_imported_book(
source, job.user, item, job.include_reviews, job.privacy
)
else:
item.fail_reason = _("Could not find a match for book")
item.save()
finally:
job.complete = True
job.save()
item.save()
def handle_imported_book(source, user, item, include_reviews, privacy):