Use different export job fields for the different storage backends

This way, the database definition is not depdendent on the runtime configuration.
This commit is contained in:
Bart Schuurmans 2024-03-24 12:22:17 +01:00
parent 073f62d5bb
commit 471233c1dc
2 changed files with 49 additions and 6 deletions

View file

@ -0,0 +1,28 @@
# Generated by Django 3.2.25 on 2024-03-24 11:20
import bookwyrm.storage_backends
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0197_merge_20240324_0235"),
]
operations = [
migrations.RenameField(
model_name="bookwyrmexportjob",
old_name="export_data",
new_name="export_data_file",
),
migrations.AddField(
model_name="bookwyrmexportjob",
name="export_data_s3",
field=models.FileField(
null=True,
storage=bookwyrm.storage_backends.ExportsS3Storage,
upload_to="",
),
),
]

View file

@ -37,15 +37,30 @@ class BookwyrmAwsSession(BotoSession):
class BookwyrmExportJob(ParentJob):
"""entry for a specific request to export a bookwyrm user"""
if settings.USE_S3:
storage = storage_backends.ExportsS3Storage
else:
storage = storage_backends.ExportsFileStorage
# Only one of these fields is used, dependent on the configuration.
export_data_file = FileField(null=True, storage=storage_backends.ExportsFileStorage)
export_data_s3 = FileField(null=True, storage=storage_backends.ExportsS3Storage)
export_data = FileField(null=True, storage=storage)
export_json = JSONField(null=True, encoder=DjangoJSONEncoder)
json_completed = BooleanField(default=False)
@property
def export_data(self):
"""returns the file field of the configured storage backend"""
# TODO: We could check whether a field for a different backend is
# filled, to support migrating to a different backend.
if settings.USE_S3:
return self.export_data_s3
return self.export_data_file
@export_data.setter
def export_data(self, value):
"""sets the file field of the configured storage backend"""
if settings.USE_S3:
self.export_data_s3 = value
else:
self.export_data_file = value
def start_job(self):
"""Start the job"""
@ -284,7 +299,7 @@ def start_export_task(**kwargs):
# prepare the initial file and base json
job.export_data = ContentFile(b"", str(uuid4()))
job.export_json = job.user.to_activity()
job.save(update_fields=["export_data", "export_json"])
job.save(update_fields=["export_data_file", "export_data_s3", "export_json"])
# let's go
json_export.delay(job_id=job.id, job_user=job.user.id, no_children=False)