diff --git a/services/docstore/app/js/DocArchiveManager.js b/services/docstore/app/js/DocArchiveManager.js index b9fe93ac0f..13bf8db6cd 100644 --- a/services/docstore/app/js/DocArchiveManager.js +++ b/services/docstore/app/js/DocArchiveManager.js @@ -11,6 +11,7 @@ const pMap = require('p-map') const PARALLEL_JOBS = settings.parallelArchiveJobs const DESTROY_BATCH_SIZE = settings.destroyBatchSize +const DESTROY_RETRY_COUNT = settings.destroyRetryCount module.exports = { archiveAllDocs: callbackify(archiveAllDocs), @@ -204,14 +205,32 @@ async function destroyDoc(projectId, docId) { } if (doc.inS3) { - await PersistorManager.deleteObject( - settings.docstore.bucket, - `${projectId}/${docId}` - ) + await destroyArchiveWithRetry(projectId, docId) } await MongoManager.destroyDoc(docId) } +async function destroyArchiveWithRetry(projectId, docId) { + let attempt = 0 + let lastError + while (attempt++ <= DESTROY_RETRY_COUNT) { + try { + await PersistorManager.deleteObject( + settings.docstore.bucket, + `${projectId}/${docId}` + ) + return + } catch (err) { + lastError = err + logger.warn( + { projectId, docId, err, attempt }, + 'destroying archive failed' + ) + } + } + throw lastError +} + async function _streamToString(stream) { const chunks = [] return new Promise((resolve, reject) => { diff --git a/services/docstore/config/settings.defaults.js b/services/docstore/config/settings.defaults.js index 7f42154770..69fed07181 100644 --- a/services/docstore/config/settings.defaults.js +++ b/services/docstore/config/settings.defaults.js @@ -43,6 +43,7 @@ const Settings = { max_doc_length: parseInt(process.env.MAX_DOC_LENGTH) || 2 * 1024 * 1024, // 2mb destroyBatchSize: parseInt(process.env.DESTROY_BATCH_SIZE, 10) || 2000, + destroyRetryCount: parseInt(process.env.DESTROY_RETRY_COUNT || '3', 10), parallelArchiveJobs: parseInt(process.env.PARALLEL_ARCHIVE_JOBS, 10) || 5 } diff --git a/services/docstore/test/unit/js/DocArchiveManagerTests.js b/services/docstore/test/unit/js/DocArchiveManagerTests.js index ceb490108f..a324cfe916 100644 --- a/services/docstore/test/unit/js/DocArchiveManagerTests.js +++ b/services/docstore/test/unit/js/DocArchiveManagerTests.js @@ -32,7 +32,10 @@ describe('DocArchiveManager', function () { Settings = { docstore: { bucket: 'wombat' - } + }, + parallelArchiveJobs: 3, + destroyBatchSize: 10, + destroyRetryCount: 3 } HashDigest = sinon.stub().returns(md5Sum) HashUpdate = sinon.stub().returns({ digest: HashDigest }) @@ -436,6 +439,41 @@ describe('DocArchiveManager', function () { it('should delete the doc in mongo', async function () { await DocArchiveManager.promises.destroyDoc(projectId, docId) }) + + describe('when the destroy request errors', function () { + beforeEach(function () { + mongoDocs[0].inS3 = true + PersistorManager.deleteObject.onFirstCall().rejects(new Error('1')) + PersistorManager.deleteObject.onSecondCall().rejects(new Error('2')) + PersistorManager.deleteObject.onThirdCall().resolves() + }) + + it('should retry', async function () { + await DocArchiveManager.promises.destroyDoc(projectId, docId) + expect(PersistorManager.deleteObject).to.have.been.calledWith( + Settings.docstore.bucket, + `${projectId}/${docId}` + ) + expect(PersistorManager.deleteObject.callCount).to.equal(3) + }) + }) + + describe('when the destroy request errors permanent', function () { + beforeEach(function () { + mongoDocs[0].inS3 = true + PersistorManager.deleteObject.rejects(new Error('permanent')) + }) + + it('should retry and fail eventually', async function () { + await expect(DocArchiveManager.promises.destroyDoc(projectId, docId)) + .to.eventually.be.rejected + expect(PersistorManager.deleteObject).to.have.been.calledWith( + Settings.docstore.bucket, + `${projectId}/${docId}` + ) + expect(PersistorManager.deleteObject.callCount).to.equal(4) + }) + }) }) describe('when the doc is not in s3', function () {