Update django-storages to 1.14.2

The problem that boto3 closes files has been worked around in django-storages.
This commit is contained in:
Bart Schuurmans 2024-03-29 22:13:09 +01:00
parent cfcb873235
commit 4fa823e8df
3 changed files with 23 additions and 55 deletions

View file

@ -1,55 +1,23 @@
"""Handles backends for storages""" """Handles backends for storages"""
import os
from tempfile import SpooledTemporaryFile
from django.core.files.storage import FileSystemStorage from django.core.files.storage import FileSystemStorage
from storages.backends.s3boto3 import S3Boto3Storage from storages.backends.s3 import S3Storage
from storages.backends.azure_storage import AzureStorage from storages.backends.azure_storage import AzureStorage
class StaticStorage(S3Boto3Storage): # pylint: disable=abstract-method class StaticStorage(S3Storage): # pylint: disable=abstract-method
"""Storage class for Static contents""" """Storage class for Static contents"""
location = "static" location = "static"
default_acl = "public-read" default_acl = "public-read"
class ImagesStorage(S3Boto3Storage): # pylint: disable=abstract-method class ImagesStorage(S3Storage): # pylint: disable=abstract-method
"""Storage class for Image files""" """Storage class for Image files"""
location = "images" location = "images"
default_acl = "public-read" default_acl = "public-read"
file_overwrite = False file_overwrite = False
"""
This is our custom version of S3Boto3Storage that fixes a bug in
boto3 where the passed in file is closed upon upload.
From:
https://github.com/matthewwithanm/django-imagekit/issues/391#issuecomment-275367006
https://github.com/boto/boto3/issues/929
https://github.com/matthewwithanm/django-imagekit/issues/391
"""
def _save(self, name, content):
"""
We create a clone of the content file as when this is passed to
boto3 it wrongly closes the file upon upload where as the storage
backend expects it to still be open
"""
# Seek our content back to the start
content.seek(0, os.SEEK_SET)
# Create a temporary file that will write to disk after a specified
# size. This file will be automatically deleted when closed by
# boto3 or after exiting the `with` statement if the boto3 is fixed
with SpooledTemporaryFile() as content_autoclose:
# Write our original content into our copy that will be closed by boto3
content_autoclose.write(content.read())
# Upload the object which will auto close the
# content_autoclose instance
return super()._save(name, content_autoclose)
class AzureStaticStorage(AzureStorage): # pylint: disable=abstract-method class AzureStaticStorage(AzureStorage): # pylint: disable=abstract-method
"""Storage class for Static contents""" """Storage class for Static contents"""
@ -71,7 +39,7 @@ class ExportsFileStorage(FileSystemStorage): # pylint: disable=abstract-method
overwrite_files = False overwrite_files = False
class ExportsS3Storage(S3Boto3Storage): # pylint: disable=abstract-method class ExportsS3Storage(S3Storage): # pylint: disable=abstract-method
"""Storage class for exports contents with S3""" """Storage class for exports contents with S3"""
location = "exports" location = "exports"

View file

@ -14,9 +14,9 @@ from django.urls import reverse
from django.utils.decorators import method_decorator from django.utils.decorators import method_decorator
from django.shortcuts import redirect from django.shortcuts import redirect
from storages.backends.s3boto3 import S3Boto3Storage from storages.backends.s3 import S3Storage
from bookwyrm import models, storage_backends from bookwyrm import models
from bookwyrm.models.bookwyrm_export_job import BookwyrmExportJob from bookwyrm.models.bookwyrm_export_job import BookwyrmExportJob
from bookwyrm import settings from bookwyrm import settings
@ -220,17 +220,16 @@ class ExportUser(View):
class ExportArchive(View): class ExportArchive(View):
"""Serve the archive file""" """Serve the archive file"""
# TODO: how do we serve s3 files?
def get(self, request, archive_id): def get(self, request, archive_id):
"""download user export file""" """download user export file"""
export = BookwyrmExportJob.objects.get(task_id=archive_id, user=request.user) export = BookwyrmExportJob.objects.get(task_id=archive_id, user=request.user)
if isinstance(export.export_data.storage, storage_backends.ExportsS3Storage): if settings.USE_S3:
# make custom_domain None so we can sign the url # make custom_domain None so we can sign the url
# see https://github.com/jschneier/django-storages/issues/944 # see https://github.com/jschneier/django-storages/issues/944
storage = S3Boto3Storage(querystring_auth=True, custom_domain=None) storage = S3Storage(querystring_auth=True, custom_domain=None)
try: try:
url = S3Boto3Storage.url( url = S3Storage.url(
storage, storage,
f"/exports/{export.task_id}.tar.gz", f"/exports/{export.task_id}.tar.gz",
expire=settings.S3_SIGNED_URL_EXPIRY, expire=settings.S3_SIGNED_URL_EXPIRY,
@ -239,7 +238,10 @@ class ExportArchive(View):
raise Http404() raise Http404()
return redirect(url) return redirect(url)
if isinstance(export.export_data.storage, storage_backends.ExportsFileStorage): if settings.USE_AZURE:
# not implemented
return HttpResponseServerError()
try: try:
return HttpResponse( return HttpResponse(
export.export_data, export.export_data,
@ -250,5 +252,3 @@ class ExportArchive(View):
) )
except FileNotFoundError: except FileNotFoundError:
raise Http404() raise Http404()
return HttpResponseServerError()

View file

@ -14,7 +14,7 @@ django-oauth-toolkit==2.3.0
django-pgtrigger==4.11.0 django-pgtrigger==4.11.0
django-redis==5.2.0 django-redis==5.2.0
django-sass-processor==1.2.2 django-sass-processor==1.2.2
django-storages==1.13.2 django-storages==1.14.2
django-storages[azure] django-storages[azure]
environs==11.0.0 environs==11.0.0
flower==2.0.1 flower==2.0.1