Make GCS delete concurrency configurable

This commit is contained in:
Simon Detheridge 2020-03-16 15:54:05 +00:00
parent 9b658dda18
commit b37c52fc3a
2 changed files with 8 additions and 3 deletions

View file

@ -209,9 +209,13 @@ async function deleteDirectory(bucketName, key) {
.bucket(bucketName)
.getFiles({ directory: key })
await asyncPool(50, files, async file => {
await deleteFile(bucketName, file.name)
})
await asyncPool(
settings.filestore.gcs.deleteConcurrency,
files,
async file => {
await deleteFile(bucketName, file.name)
}
)
} catch (err) {
const error = PersistorHelper.wrapError(
err,

View file

@ -42,6 +42,7 @@ settings =
projectId: process.env['GCS_PROJECT_ID']
unlockBeforeDelete: process.env['GCS_UNLOCK_BEFORE_DELETE'] == "true" # unlock an event-based hold before deleting. default false
deletedBucketSuffix: process.env['GCS_DELETED_BUCKET_SUFFIX'] # if present, copy file to another bucket on delete. default null
deleteConcurrency: parseInt(process.env['GCS_DELETE_CONCURRENCY']) || 50
s3:
if process.env['AWS_ACCESS_KEY_ID']? or process.env['S3_BUCKET_CREDENTIALS']?