mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
[DocArchiveManager] retry the deletion of doc archives three times
This commit is contained in:
parent
54c6cce902
commit
9ba8237850
3 changed files with 63 additions and 5 deletions
|
@ -11,6 +11,7 @@ const pMap = require('p-map')
|
|||
|
||||
const PARALLEL_JOBS = settings.parallelArchiveJobs
|
||||
const DESTROY_BATCH_SIZE = settings.destroyBatchSize
|
||||
const DESTROY_RETRY_COUNT = settings.destroyRetryCount
|
||||
|
||||
module.exports = {
|
||||
archiveAllDocs: callbackify(archiveAllDocs),
|
||||
|
@ -204,14 +205,32 @@ async function destroyDoc(projectId, docId) {
|
|||
}
|
||||
|
||||
if (doc.inS3) {
|
||||
await PersistorManager.deleteObject(
|
||||
settings.docstore.bucket,
|
||||
`${projectId}/${docId}`
|
||||
)
|
||||
await destroyArchiveWithRetry(projectId, docId)
|
||||
}
|
||||
await MongoManager.destroyDoc(docId)
|
||||
}
|
||||
|
||||
async function destroyArchiveWithRetry(projectId, docId) {
|
||||
let attempt = 0
|
||||
let lastError
|
||||
while (attempt++ <= DESTROY_RETRY_COUNT) {
|
||||
try {
|
||||
await PersistorManager.deleteObject(
|
||||
settings.docstore.bucket,
|
||||
`${projectId}/${docId}`
|
||||
)
|
||||
return
|
||||
} catch (err) {
|
||||
lastError = err
|
||||
logger.warn(
|
||||
{ projectId, docId, err, attempt },
|
||||
'destroying archive failed'
|
||||
)
|
||||
}
|
||||
}
|
||||
throw lastError
|
||||
}
|
||||
|
||||
async function _streamToString(stream) {
|
||||
const chunks = []
|
||||
return new Promise((resolve, reject) => {
|
||||
|
|
|
@ -43,6 +43,7 @@ const Settings = {
|
|||
max_doc_length: parseInt(process.env.MAX_DOC_LENGTH) || 2 * 1024 * 1024, // 2mb
|
||||
|
||||
destroyBatchSize: parseInt(process.env.DESTROY_BATCH_SIZE, 10) || 2000,
|
||||
destroyRetryCount: parseInt(process.env.DESTROY_RETRY_COUNT || '3', 10),
|
||||
parallelArchiveJobs: parseInt(process.env.PARALLEL_ARCHIVE_JOBS, 10) || 5
|
||||
}
|
||||
|
||||
|
|
|
@ -32,7 +32,10 @@ describe('DocArchiveManager', function () {
|
|||
Settings = {
|
||||
docstore: {
|
||||
bucket: 'wombat'
|
||||
}
|
||||
},
|
||||
parallelArchiveJobs: 3,
|
||||
destroyBatchSize: 10,
|
||||
destroyRetryCount: 3
|
||||
}
|
||||
HashDigest = sinon.stub().returns(md5Sum)
|
||||
HashUpdate = sinon.stub().returns({ digest: HashDigest })
|
||||
|
@ -436,6 +439,41 @@ describe('DocArchiveManager', function () {
|
|||
it('should delete the doc in mongo', async function () {
|
||||
await DocArchiveManager.promises.destroyDoc(projectId, docId)
|
||||
})
|
||||
|
||||
describe('when the destroy request errors', function () {
|
||||
beforeEach(function () {
|
||||
mongoDocs[0].inS3 = true
|
||||
PersistorManager.deleteObject.onFirstCall().rejects(new Error('1'))
|
||||
PersistorManager.deleteObject.onSecondCall().rejects(new Error('2'))
|
||||
PersistorManager.deleteObject.onThirdCall().resolves()
|
||||
})
|
||||
|
||||
it('should retry', async function () {
|
||||
await DocArchiveManager.promises.destroyDoc(projectId, docId)
|
||||
expect(PersistorManager.deleteObject).to.have.been.calledWith(
|
||||
Settings.docstore.bucket,
|
||||
`${projectId}/${docId}`
|
||||
)
|
||||
expect(PersistorManager.deleteObject.callCount).to.equal(3)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the destroy request errors permanent', function () {
|
||||
beforeEach(function () {
|
||||
mongoDocs[0].inS3 = true
|
||||
PersistorManager.deleteObject.rejects(new Error('permanent'))
|
||||
})
|
||||
|
||||
it('should retry and fail eventually', async function () {
|
||||
await expect(DocArchiveManager.promises.destroyDoc(projectId, docId))
|
||||
.to.eventually.be.rejected
|
||||
expect(PersistorManager.deleteObject).to.have.been.calledWith(
|
||||
Settings.docstore.bucket,
|
||||
`${projectId}/${docId}`
|
||||
)
|
||||
expect(PersistorManager.deleteObject.callCount).to.equal(4)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc is not in s3', function () {
|
||||
|
|
Loading…
Reference in a new issue