mirror of
https://github.com/bookwyrm-social/bookwyrm.git
synced 2025-01-26 17:08:09 +00:00
Use different export job fields for the different storage backends
This way, the database definition is not depdendent on the runtime configuration.
This commit is contained in:
parent
073f62d5bb
commit
471233c1dc
2 changed files with 49 additions and 6 deletions
28
bookwyrm/migrations/0198_export_job_separate_file_fields.py
Normal file
28
bookwyrm/migrations/0198_export_job_separate_file_fields.py
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
# Generated by Django 3.2.25 on 2024-03-24 11:20
|
||||||
|
|
||||||
|
import bookwyrm.storage_backends
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("bookwyrm", "0197_merge_20240324_0235"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RenameField(
|
||||||
|
model_name="bookwyrmexportjob",
|
||||||
|
old_name="export_data",
|
||||||
|
new_name="export_data_file",
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="bookwyrmexportjob",
|
||||||
|
name="export_data_s3",
|
||||||
|
field=models.FileField(
|
||||||
|
null=True,
|
||||||
|
storage=bookwyrm.storage_backends.ExportsS3Storage,
|
||||||
|
upload_to="",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
|
@ -37,15 +37,30 @@ class BookwyrmAwsSession(BotoSession):
|
||||||
class BookwyrmExportJob(ParentJob):
|
class BookwyrmExportJob(ParentJob):
|
||||||
"""entry for a specific request to export a bookwyrm user"""
|
"""entry for a specific request to export a bookwyrm user"""
|
||||||
|
|
||||||
if settings.USE_S3:
|
# Only one of these fields is used, dependent on the configuration.
|
||||||
storage = storage_backends.ExportsS3Storage
|
export_data_file = FileField(null=True, storage=storage_backends.ExportsFileStorage)
|
||||||
else:
|
export_data_s3 = FileField(null=True, storage=storage_backends.ExportsS3Storage)
|
||||||
storage = storage_backends.ExportsFileStorage
|
|
||||||
|
|
||||||
export_data = FileField(null=True, storage=storage)
|
|
||||||
export_json = JSONField(null=True, encoder=DjangoJSONEncoder)
|
export_json = JSONField(null=True, encoder=DjangoJSONEncoder)
|
||||||
json_completed = BooleanField(default=False)
|
json_completed = BooleanField(default=False)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def export_data(self):
|
||||||
|
"""returns the file field of the configured storage backend"""
|
||||||
|
# TODO: We could check whether a field for a different backend is
|
||||||
|
# filled, to support migrating to a different backend.
|
||||||
|
if settings.USE_S3:
|
||||||
|
return self.export_data_s3
|
||||||
|
return self.export_data_file
|
||||||
|
|
||||||
|
@export_data.setter
|
||||||
|
def export_data(self, value):
|
||||||
|
"""sets the file field of the configured storage backend"""
|
||||||
|
if settings.USE_S3:
|
||||||
|
self.export_data_s3 = value
|
||||||
|
else:
|
||||||
|
self.export_data_file = value
|
||||||
|
|
||||||
def start_job(self):
|
def start_job(self):
|
||||||
"""Start the job"""
|
"""Start the job"""
|
||||||
|
|
||||||
|
@ -284,7 +299,7 @@ def start_export_task(**kwargs):
|
||||||
# prepare the initial file and base json
|
# prepare the initial file and base json
|
||||||
job.export_data = ContentFile(b"", str(uuid4()))
|
job.export_data = ContentFile(b"", str(uuid4()))
|
||||||
job.export_json = job.user.to_activity()
|
job.export_json = job.user.to_activity()
|
||||||
job.save(update_fields=["export_data", "export_json"])
|
job.save(update_fields=["export_data_file", "export_data_s3", "export_json"])
|
||||||
|
|
||||||
# let's go
|
# let's go
|
||||||
json_export.delay(job_id=job.id, job_user=job.user.id, no_children=False)
|
json_export.delay(job_id=job.id, job_user=job.user.id, no_children=False)
|
||||||
|
|
Loading…
Reference in a new issue