Error handling for attachment batch delete process (#28184)

Co-authored-by: Claire <claire.github-309c@sitedethib.com>
This commit is contained in:
Michael Stanclift 2023-12-07 08:40:44 -06:00 committed by GitHub
parent ad34d33bfd
commit da3d8aff79
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23

View file

@ -4,7 +4,8 @@ class AttachmentBatch
# Maximum amount of objects you can delete in an S3 API call. It's
# important to remember that this does not correspond to the number
# of records in the batch, since records can have multiple attachments
LIMIT = 1_000
LIMIT = ENV.fetch('S3_BATCH_DELETE_LIMIT', 1000).to_i
MAX_RETRY = ENV.fetch('S3_BATCH_DELETE_RETRY', 3).to_i
# Attributes generated and maintained by Paperclip (not all of them
# are always used on every class, however)
@ -95,6 +96,7 @@ class AttachmentBatch
# objects can be processed at once, so we have to potentially
# separate them into multiple calls.
retries = 0
keys.each_slice(LIMIT) do |keys_slice|
logger.debug { "Deleting #{keys_slice.size} objects" }
@ -102,6 +104,17 @@ class AttachmentBatch
objects: keys_slice.map { |key| { key: key } },
quiet: true,
})
rescue => e
retries += 1
if retries < MAX_RETRY
logger.debug "Retry #{retries}/#{MAX_RETRY} after #{e.message}"
sleep 2**retries
retry
else
logger.error "Batch deletion from S3 failed after #{e.message}"
raise e
end
end
end