2020-02-16 09:02:55 -05:00
|
|
|
const sinon = require('sinon')
|
2021-03-12 17:16:01 -05:00
|
|
|
const { expect } = require('chai')
|
2020-02-16 09:02:55 -05:00
|
|
|
const modulePath = '../../../app/js/DocArchiveManager.js'
|
|
|
|
const SandboxedModule = require('sandboxed-module')
|
2020-08-21 12:50:01 -04:00
|
|
|
const { ObjectId } = require('mongodb')
|
2020-02-16 09:02:55 -05:00
|
|
|
const Errors = require('../../../app/js/Errors')
|
2020-07-23 14:43:51 -04:00
|
|
|
|
2020-05-28 09:20:54 -04:00
|
|
|
describe('DocArchiveManager', function () {
|
2020-07-23 14:43:51 -04:00
|
|
|
let DocArchiveManager,
|
|
|
|
PersistorManager,
|
|
|
|
MongoManager,
|
|
|
|
RangeManager,
|
|
|
|
Settings,
|
|
|
|
Crypto,
|
|
|
|
Streamifier,
|
|
|
|
HashDigest,
|
|
|
|
HashUpdate,
|
|
|
|
archivedDocs,
|
|
|
|
mongoDocs,
|
|
|
|
docJson,
|
|
|
|
md5Sum,
|
|
|
|
projectId,
|
|
|
|
readStream,
|
|
|
|
stream
|
|
|
|
|
2020-05-28 09:20:54 -04:00
|
|
|
beforeEach(function () {
|
2020-07-23 14:43:51 -04:00
|
|
|
md5Sum = 'decafbad'
|
|
|
|
|
|
|
|
RangeManager = {
|
2021-07-13 07:04:48 -04:00
|
|
|
jsonRangesToMongo: sinon.stub().returns({ mongo: 'ranges' }),
|
2020-07-23 14:43:51 -04:00
|
|
|
}
|
|
|
|
Settings = {
|
2020-02-16 09:02:55 -05:00
|
|
|
docstore: {
|
2021-07-13 07:04:48 -04:00
|
|
|
bucket: 'wombat',
|
2021-05-05 05:57:12 -04:00
|
|
|
},
|
|
|
|
parallelArchiveJobs: 3,
|
|
|
|
destroyBatchSize: 10,
|
2021-07-13 07:04:48 -04:00
|
|
|
destroyRetryCount: 3,
|
2020-02-16 09:02:55 -05:00
|
|
|
}
|
2020-07-23 14:43:51 -04:00
|
|
|
HashDigest = sinon.stub().returns(md5Sum)
|
|
|
|
HashUpdate = sinon.stub().returns({ digest: HashDigest })
|
|
|
|
Crypto = {
|
2021-07-13 07:04:48 -04:00
|
|
|
createHash: sinon.stub().returns({ update: HashUpdate }),
|
2020-07-23 14:43:51 -04:00
|
|
|
}
|
|
|
|
Streamifier = {
|
2021-07-13 07:04:48 -04:00
|
|
|
createReadStream: sinon.stub().returns({ stream: 'readStream' }),
|
2020-02-16 09:02:55 -05:00
|
|
|
}
|
|
|
|
|
2020-07-23 14:43:51 -04:00
|
|
|
projectId = ObjectId()
|
|
|
|
archivedDocs = [
|
2020-02-16 09:02:55 -05:00
|
|
|
{
|
|
|
|
_id: ObjectId(),
|
|
|
|
inS3: true,
|
2021-07-13 07:04:48 -04:00
|
|
|
rev: 2,
|
2020-02-16 09:02:55 -05:00
|
|
|
},
|
|
|
|
{
|
|
|
|
_id: ObjectId(),
|
|
|
|
inS3: true,
|
2021-07-13 07:04:48 -04:00
|
|
|
rev: 4,
|
2020-02-16 09:02:55 -05:00
|
|
|
},
|
|
|
|
{
|
|
|
|
_id: ObjectId(),
|
|
|
|
inS3: true,
|
2021-07-13 07:04:48 -04:00
|
|
|
rev: 6,
|
|
|
|
},
|
2020-02-16 09:02:55 -05:00
|
|
|
]
|
2020-07-23 14:43:51 -04:00
|
|
|
mongoDocs = [
|
2020-02-16 09:02:55 -05:00
|
|
|
{
|
|
|
|
_id: ObjectId(),
|
|
|
|
lines: ['one', 'two', 'three'],
|
2021-07-13 07:04:48 -04:00
|
|
|
rev: 2,
|
2020-02-16 09:02:55 -05:00
|
|
|
},
|
|
|
|
{
|
|
|
|
_id: ObjectId(),
|
|
|
|
lines: ['aaa', 'bbb', 'ccc'],
|
2021-07-13 07:04:48 -04:00
|
|
|
rev: 4,
|
2020-02-16 09:02:55 -05:00
|
|
|
},
|
|
|
|
{
|
|
|
|
_id: ObjectId(),
|
|
|
|
inS3: true,
|
2021-07-13 07:04:48 -04:00
|
|
|
rev: 6,
|
2020-02-16 09:02:55 -05:00
|
|
|
},
|
|
|
|
{
|
|
|
|
_id: ObjectId(),
|
|
|
|
inS3: true,
|
2021-07-13 07:04:48 -04:00
|
|
|
rev: 6,
|
2020-02-16 09:02:55 -05:00
|
|
|
},
|
|
|
|
{
|
|
|
|
_id: ObjectId(),
|
|
|
|
lines: ['111', '222', '333'],
|
2021-07-13 07:04:48 -04:00
|
|
|
rev: 6,
|
|
|
|
},
|
2020-02-16 09:02:55 -05:00
|
|
|
]
|
|
|
|
|
2020-07-23 14:43:51 -04:00
|
|
|
docJson = JSON.stringify({
|
|
|
|
lines: mongoDocs[0].lines,
|
|
|
|
ranges: mongoDocs[0].ranges,
|
2021-07-13 07:04:48 -04:00
|
|
|
schema_v: 1,
|
2020-07-23 14:43:51 -04:00
|
|
|
})
|
2020-02-16 09:02:55 -05:00
|
|
|
|
2020-07-23 14:43:51 -04:00
|
|
|
stream = {
|
|
|
|
on: sinon.stub(),
|
2021-07-13 07:04:48 -04:00
|
|
|
resume: sinon.stub(),
|
2020-07-23 14:43:51 -04:00
|
|
|
}
|
|
|
|
stream.on.withArgs('data').yields(Buffer.from(docJson, 'utf8'))
|
|
|
|
stream.on.withArgs('end').yields()
|
2020-02-16 09:02:55 -05:00
|
|
|
|
2020-07-23 14:43:51 -04:00
|
|
|
readStream = {
|
2021-07-13 07:04:48 -04:00
|
|
|
stream: 'readStream',
|
2020-02-16 09:02:55 -05:00
|
|
|
}
|
|
|
|
|
2020-07-23 14:43:51 -04:00
|
|
|
PersistorManager = {
|
|
|
|
getObjectStream: sinon.stub().resolves(stream),
|
|
|
|
sendStream: sinon.stub().resolves(),
|
|
|
|
getObjectMd5Hash: sinon.stub().resolves(md5Sum),
|
2021-07-13 07:04:48 -04:00
|
|
|
deleteObject: sinon.stub().resolves(),
|
2020-07-23 14:43:51 -04:00
|
|
|
}
|
|
|
|
|
2021-06-08 14:29:10 -04:00
|
|
|
const getNonArchivedProjectDocs = sinon.stub()
|
|
|
|
getNonArchivedProjectDocs
|
2021-06-08 07:59:13 -04:00
|
|
|
.onCall(0)
|
2021-07-13 07:04:48 -04:00
|
|
|
.resolves(mongoDocs.filter(doc => !doc.inS3))
|
2021-06-08 14:29:10 -04:00
|
|
|
getNonArchivedProjectDocs.onCall(1).resolves([])
|
2021-06-08 07:59:13 -04:00
|
|
|
|
2021-06-08 14:29:10 -04:00
|
|
|
const getArchivedProjectDocs = sinon.stub()
|
|
|
|
getArchivedProjectDocs.onCall(0).resolves(archivedDocs)
|
|
|
|
getArchivedProjectDocs.onCall(1).resolves([])
|
2021-06-08 07:59:13 -04:00
|
|
|
|
2020-07-23 14:43:51 -04:00
|
|
|
MongoManager = {
|
2020-07-31 11:20:07 -04:00
|
|
|
promises: {
|
|
|
|
markDocAsArchived: sinon.stub().resolves(),
|
|
|
|
upsertIntoDocCollection: sinon.stub().resolves(),
|
|
|
|
getProjectsDocs: sinon.stub().resolves(mongoDocs),
|
2021-06-08 14:29:10 -04:00
|
|
|
getNonDeletedArchivedProjectDocs: getArchivedProjectDocs,
|
|
|
|
getNonArchivedProjectDocs,
|
|
|
|
getArchivedProjectDocs,
|
2020-09-23 09:12:22 -04:00
|
|
|
findDoc: sinon.stub().rejects(new Errors.NotFoundError()),
|
2021-07-13 07:04:48 -04:00
|
|
|
destroyDoc: sinon.stub().resolves(),
|
|
|
|
},
|
2020-07-23 14:43:51 -04:00
|
|
|
}
|
2020-09-23 09:12:22 -04:00
|
|
|
for (const mongoDoc of mongoDocs.concat(archivedDocs)) {
|
2020-07-31 11:20:07 -04:00
|
|
|
MongoManager.promises.findDoc
|
2020-09-23 09:12:22 -04:00
|
|
|
.withArgs(projectId, mongoDoc._id, sinon.match.any)
|
2020-07-31 11:20:07 -04:00
|
|
|
.resolves(mongoDoc)
|
2020-02-16 09:02:55 -05:00
|
|
|
}
|
2020-07-23 14:43:51 -04:00
|
|
|
|
|
|
|
DocArchiveManager = SandboxedModule.require(modulePath, {
|
|
|
|
requires: {
|
2021-07-12 12:47:20 -04:00
|
|
|
'@overleaf/settings': Settings,
|
2020-07-23 14:43:51 -04:00
|
|
|
crypto: Crypto,
|
|
|
|
streamifier: Streamifier,
|
|
|
|
'./MongoManager': MongoManager,
|
|
|
|
'./RangeManager': RangeManager,
|
|
|
|
'./PersistorManager': PersistorManager,
|
2021-07-13 07:04:48 -04:00
|
|
|
'./Errors': Errors,
|
|
|
|
},
|
2020-07-23 14:43:51 -04:00
|
|
|
})
|
2020-02-16 09:02:55 -05:00
|
|
|
})
|
|
|
|
|
2020-05-28 09:20:54 -04:00
|
|
|
describe('archiveDoc', function () {
|
2020-07-23 14:43:51 -04:00
|
|
|
it('should resolve when passed a valid document', async function () {
|
|
|
|
await expect(
|
|
|
|
DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0])
|
|
|
|
).to.eventually.be.fulfilled
|
|
|
|
})
|
|
|
|
|
|
|
|
it('should throw an error if the doc has no lines', async function () {
|
|
|
|
const doc = mongoDocs[0]
|
|
|
|
doc.lines = null
|
|
|
|
|
|
|
|
await expect(
|
|
|
|
DocArchiveManager.promises.archiveDoc(projectId, doc)
|
|
|
|
).to.eventually.be.rejectedWith('doc has no lines')
|
|
|
|
})
|
|
|
|
|
|
|
|
it('should add the schema version', async function () {
|
|
|
|
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[1])
|
|
|
|
expect(Streamifier.createReadStream).to.have.been.calledWith(
|
|
|
|
sinon.match(/"schema_v":1/)
|
2020-02-16 09:02:55 -05:00
|
|
|
)
|
|
|
|
})
|
|
|
|
|
2020-07-23 14:43:51 -04:00
|
|
|
it('should calculate the hex md5 sum of the content', async function () {
|
|
|
|
const json = JSON.stringify({
|
|
|
|
lines: mongoDocs[0].lines,
|
|
|
|
ranges: mongoDocs[0].ranges,
|
2021-07-13 07:04:48 -04:00
|
|
|
schema_v: 1,
|
2020-02-16 09:02:55 -05:00
|
|
|
})
|
2020-07-23 14:43:51 -04:00
|
|
|
|
|
|
|
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0])
|
|
|
|
|
|
|
|
expect(Crypto.createHash).to.have.been.calledWith('md5')
|
|
|
|
expect(HashUpdate).to.have.been.calledWith(json)
|
|
|
|
expect(HashDigest).to.have.been.calledWith('hex')
|
2020-02-16 09:02:55 -05:00
|
|
|
})
|
|
|
|
|
2020-07-23 14:43:51 -04:00
|
|
|
it('should pass the md5 hash to the object persistor for verification', async function () {
|
|
|
|
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0])
|
|
|
|
|
|
|
|
expect(PersistorManager.sendStream).to.have.been.calledWith(
|
|
|
|
sinon.match.any,
|
|
|
|
sinon.match.any,
|
|
|
|
sinon.match.any,
|
|
|
|
{ sourceMd5: md5Sum }
|
2020-02-16 09:02:55 -05:00
|
|
|
)
|
|
|
|
})
|
|
|
|
|
2020-07-23 14:43:51 -04:00
|
|
|
it('should pass the correct bucket and key to the persistor', async function () {
|
|
|
|
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0])
|
|
|
|
|
|
|
|
expect(PersistorManager.sendStream).to.have.been.calledWith(
|
|
|
|
Settings.docstore.bucket,
|
|
|
|
`${projectId}/${mongoDocs[0]._id}`
|
2020-02-16 09:02:55 -05:00
|
|
|
)
|
|
|
|
})
|
|
|
|
|
2020-07-23 14:43:51 -04:00
|
|
|
it('should create a stream from the encoded json and send it', async function () {
|
|
|
|
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0])
|
|
|
|
expect(Streamifier.createReadStream).to.have.been.calledWith(docJson)
|
|
|
|
expect(PersistorManager.sendStream).to.have.been.calledWith(
|
|
|
|
sinon.match.any,
|
|
|
|
sinon.match.any,
|
|
|
|
readStream
|
2020-02-16 09:02:55 -05:00
|
|
|
)
|
|
|
|
})
|
|
|
|
|
2020-07-23 14:43:51 -04:00
|
|
|
it('should mark the doc as archived', async function () {
|
|
|
|
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0])
|
2020-07-31 11:20:07 -04:00
|
|
|
expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith(
|
2020-07-23 14:43:51 -04:00
|
|
|
mongoDocs[0]._id,
|
|
|
|
mongoDocs[0].rev
|
2020-02-16 09:02:55 -05:00
|
|
|
)
|
|
|
|
})
|
|
|
|
|
2020-07-23 14:43:51 -04:00
|
|
|
describe('with null bytes in the result', function () {
|
|
|
|
const _stringify = JSON.stringify
|
|
|
|
|
|
|
|
beforeEach(function () {
|
|
|
|
JSON.stringify = sinon.stub().returns('{"bad": "\u0000"}')
|
2020-02-16 09:02:55 -05:00
|
|
|
})
|
|
|
|
|
2020-07-23 14:43:51 -04:00
|
|
|
afterEach(function () {
|
|
|
|
JSON.stringify = _stringify
|
|
|
|
})
|
2020-02-16 09:02:55 -05:00
|
|
|
|
2020-07-23 14:43:51 -04:00
|
|
|
it('should return an error', async function () {
|
|
|
|
await expect(
|
|
|
|
DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0])
|
|
|
|
).to.eventually.be.rejectedWith('null bytes detected')
|
2020-02-16 09:02:55 -05:00
|
|
|
})
|
|
|
|
})
|
2020-07-23 14:43:51 -04:00
|
|
|
})
|
2020-02-16 09:02:55 -05:00
|
|
|
|
2020-07-23 14:43:51 -04:00
|
|
|
describe('unarchiveDoc', function () {
|
|
|
|
let docId
|
2020-02-16 09:02:55 -05:00
|
|
|
|
2020-09-23 09:12:22 -04:00
|
|
|
describe('when the doc is in S3', function () {
|
|
|
|
beforeEach(function () {
|
|
|
|
MongoManager.promises.findDoc = sinon.stub().resolves({ inS3: true })
|
|
|
|
docId = mongoDocs[0]._id
|
|
|
|
})
|
2020-02-16 09:02:55 -05:00
|
|
|
|
2020-09-23 09:12:22 -04:00
|
|
|
it('should resolve when passed a valid document', async function () {
|
|
|
|
await expect(DocArchiveManager.promises.unarchiveDoc(projectId, docId))
|
|
|
|
.to.eventually.be.fulfilled
|
|
|
|
})
|
2020-07-23 14:43:51 -04:00
|
|
|
|
2021-11-29 05:04:35 -05:00
|
|
|
it('should test md5 validity with the raw buffer', async function () {
|
|
|
|
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
|
|
|
expect(HashUpdate).to.have.been.calledWithMatch(
|
|
|
|
sinon.match.instanceOf(Buffer)
|
|
|
|
)
|
|
|
|
})
|
|
|
|
|
2020-09-23 09:12:22 -04:00
|
|
|
it('should throw an error if the md5 does not match', async function () {
|
|
|
|
PersistorManager.getObjectMd5Hash.resolves('badf00d')
|
|
|
|
await expect(
|
|
|
|
DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
|
|
|
).to.eventually.be.rejected.and.be.instanceof(Errors.Md5MismatchError)
|
|
|
|
})
|
2020-07-23 14:43:51 -04:00
|
|
|
|
2020-09-23 09:12:22 -04:00
|
|
|
it('should update the doc lines in mongo', async function () {
|
|
|
|
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
|
|
|
expect(
|
|
|
|
MongoManager.promises.upsertIntoDocCollection
|
|
|
|
).to.have.been.calledWith(projectId, docId, {
|
2021-07-13 07:04:48 -04:00
|
|
|
lines: mongoDocs[0].lines,
|
2020-09-23 09:12:22 -04:00
|
|
|
})
|
|
|
|
})
|
2020-02-16 09:02:55 -05:00
|
|
|
|
2020-09-23 09:12:22 -04:00
|
|
|
it('should delete the doc in s3', async function () {
|
|
|
|
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
|
|
|
expect(PersistorManager.deleteObject).to.have.been.calledWith(
|
|
|
|
Settings.docstore.bucket,
|
|
|
|
`${projectId}/${docId}`
|
|
|
|
)
|
|
|
|
})
|
2020-07-23 14:43:51 -04:00
|
|
|
|
2020-09-23 09:12:22 -04:00
|
|
|
describe('doc contents', function () {
|
|
|
|
let mongoDoc, s3Doc
|
|
|
|
|
|
|
|
describe('when the doc has the old schema', function () {
|
|
|
|
beforeEach(function () {
|
|
|
|
mongoDoc = {
|
2021-07-13 07:04:48 -04:00
|
|
|
lines: ['doc', 'lines'],
|
2020-09-23 09:12:22 -04:00
|
|
|
}
|
|
|
|
s3Doc = ['doc', 'lines']
|
|
|
|
docJson = JSON.stringify(s3Doc)
|
|
|
|
stream.on.withArgs('data').yields(Buffer.from(docJson, 'utf8'))
|
|
|
|
})
|
|
|
|
|
|
|
|
it('should return the docs lines', async function () {
|
|
|
|
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
|
|
|
expect(
|
|
|
|
MongoManager.promises.upsertIntoDocCollection
|
|
|
|
).to.have.been.calledWith(projectId, docId, mongoDoc)
|
|
|
|
})
|
2020-07-23 14:43:51 -04:00
|
|
|
})
|
|
|
|
|
2020-09-23 09:12:22 -04:00
|
|
|
describe('with the new schema and ranges', function () {
|
|
|
|
beforeEach(function () {
|
|
|
|
s3Doc = {
|
|
|
|
lines: ['doc', 'lines'],
|
|
|
|
ranges: { json: 'ranges' },
|
2021-07-13 07:04:48 -04:00
|
|
|
schema_v: 1,
|
2020-09-23 09:12:22 -04:00
|
|
|
}
|
|
|
|
mongoDoc = {
|
|
|
|
lines: ['doc', 'lines'],
|
2021-07-13 07:04:48 -04:00
|
|
|
ranges: { mongo: 'ranges' },
|
2020-09-23 09:12:22 -04:00
|
|
|
}
|
|
|
|
docJson = JSON.stringify(s3Doc)
|
|
|
|
stream.on.withArgs('data').yields(Buffer.from(docJson, 'utf8'))
|
|
|
|
})
|
|
|
|
|
|
|
|
it('should return the doc lines and ranges', async function () {
|
|
|
|
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
|
|
|
expect(
|
|
|
|
MongoManager.promises.upsertIntoDocCollection
|
|
|
|
).to.have.been.calledWith(projectId, docId, mongoDoc)
|
|
|
|
})
|
2020-02-16 09:02:55 -05:00
|
|
|
})
|
|
|
|
|
2020-09-23 09:12:22 -04:00
|
|
|
describe('with the new schema and no ranges', function () {
|
|
|
|
beforeEach(function () {
|
|
|
|
s3Doc = {
|
|
|
|
lines: ['doc', 'lines'],
|
2021-07-13 07:04:48 -04:00
|
|
|
schema_v: 1,
|
2020-09-23 09:12:22 -04:00
|
|
|
}
|
|
|
|
mongoDoc = {
|
2021-07-13 07:04:48 -04:00
|
|
|
lines: ['doc', 'lines'],
|
2020-09-23 09:12:22 -04:00
|
|
|
}
|
|
|
|
docJson = JSON.stringify(s3Doc)
|
|
|
|
stream.on.withArgs('data').yields(Buffer.from(docJson, 'utf8'))
|
|
|
|
})
|
|
|
|
|
|
|
|
it('should return only the doc lines', async function () {
|
|
|
|
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
|
|
|
expect(
|
|
|
|
MongoManager.promises.upsertIntoDocCollection
|
|
|
|
).to.have.been.calledWith(projectId, docId, mongoDoc)
|
|
|
|
})
|
2020-07-23 14:43:51 -04:00
|
|
|
})
|
|
|
|
|
2020-09-23 09:12:22 -04:00
|
|
|
describe('with an unrecognised schema', function () {
|
|
|
|
beforeEach(function () {
|
|
|
|
s3Doc = {
|
|
|
|
lines: ['doc', 'lines'],
|
2021-07-13 07:04:48 -04:00
|
|
|
schema_v: 2,
|
2020-09-23 09:12:22 -04:00
|
|
|
}
|
|
|
|
docJson = JSON.stringify(s3Doc)
|
|
|
|
stream.on.withArgs('data').yields(Buffer.from(docJson, 'utf8'))
|
|
|
|
})
|
|
|
|
|
|
|
|
it('should throw an error', async function () {
|
|
|
|
await expect(
|
|
|
|
DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
|
|
|
).to.eventually.be.rejectedWith(
|
|
|
|
"I don't understand the doc format in s3"
|
|
|
|
)
|
|
|
|
})
|
2020-07-23 14:43:51 -04:00
|
|
|
})
|
2020-02-16 09:02:55 -05:00
|
|
|
})
|
2020-09-23 09:12:22 -04:00
|
|
|
})
|
2020-02-16 09:02:55 -05:00
|
|
|
|
2020-09-23 09:12:22 -04:00
|
|
|
it('should not do anything if the file is already unarchived', async function () {
|
|
|
|
MongoManager.promises.findDoc.resolves({ inS3: false })
|
|
|
|
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
|
|
|
expect(PersistorManager.getObjectStream).not.to.have.been.called
|
|
|
|
})
|
2020-07-23 14:43:51 -04:00
|
|
|
|
2020-09-23 09:12:22 -04:00
|
|
|
describe('when the file is removed while we are processing it', function () {
|
|
|
|
beforeEach(function () {
|
|
|
|
MongoManager.promises.findDoc = sinon.stub().resolves({ inS3: true })
|
|
|
|
MongoManager.promises.findDoc.onSecondCall().resolves({ inS3: false })
|
2020-02-16 09:02:55 -05:00
|
|
|
})
|
|
|
|
|
2020-09-23 09:12:22 -04:00
|
|
|
it('should not throw an error if the file is unarchived before we get for its hash', async function () {
|
|
|
|
PersistorManager.getObjectMd5Hash = sinon
|
|
|
|
.stub()
|
|
|
|
.rejects(new Errors.NotFoundError())
|
|
|
|
await expect(DocArchiveManager.promises.unarchiveDoc(projectId, docId))
|
|
|
|
.to.eventually.be.fulfilled
|
|
|
|
expect(PersistorManager.getObjectStream).not.to.have.been.called
|
|
|
|
})
|
2020-07-23 14:43:51 -04:00
|
|
|
|
2020-09-23 09:12:22 -04:00
|
|
|
it('should not throw an error if the file is unarchived before we download it', async function () {
|
|
|
|
PersistorManager.getObjectStream = sinon
|
|
|
|
.stub()
|
|
|
|
.rejects(new Errors.NotFoundError())
|
|
|
|
await expect(DocArchiveManager.promises.unarchiveDoc(projectId, docId))
|
|
|
|
.to.eventually.be.fulfilled
|
|
|
|
expect(MongoManager.promises.upsertIntoDocCollection).not.to.have.been
|
|
|
|
.called
|
2020-02-16 09:02:55 -05:00
|
|
|
})
|
|
|
|
})
|
2020-09-23 09:12:22 -04:00
|
|
|
|
|
|
|
it('should throw an error if the file is not found but is still listed as archived', async function () {
|
|
|
|
PersistorManager.getObjectStream = sinon
|
|
|
|
.stub()
|
|
|
|
.rejects(new Errors.NotFoundError())
|
|
|
|
await expect(
|
|
|
|
DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
|
|
|
).to.eventually.be.rejected.and.be.instanceof(Errors.NotFoundError)
|
|
|
|
})
|
2020-02-16 09:02:55 -05:00
|
|
|
})
|
|
|
|
|
2020-07-23 14:43:51 -04:00
|
|
|
describe('destroyDoc', function () {
|
|
|
|
let docId
|
|
|
|
|
2020-05-28 09:20:54 -04:00
|
|
|
beforeEach(function () {
|
2020-07-23 14:43:51 -04:00
|
|
|
docId = mongoDocs[0]._id
|
2020-02-16 09:02:55 -05:00
|
|
|
})
|
|
|
|
|
2020-07-23 14:43:51 -04:00
|
|
|
it('should resolve when passed a valid document', async function () {
|
|
|
|
await expect(DocArchiveManager.promises.destroyDoc(projectId, docId)).to
|
|
|
|
.eventually.be.fulfilled
|
2020-02-16 09:02:55 -05:00
|
|
|
})
|
|
|
|
|
2020-07-23 14:43:51 -04:00
|
|
|
it('should throw a not found error when there is no document', async function () {
|
|
|
|
await expect(
|
|
|
|
DocArchiveManager.promises.destroyDoc(projectId, 'wombat')
|
|
|
|
).to.eventually.be.rejected.and.be.instanceof(Errors.NotFoundError)
|
|
|
|
})
|
2020-02-16 09:02:55 -05:00
|
|
|
|
2020-07-23 14:43:51 -04:00
|
|
|
describe('when the doc is in s3', function () {
|
|
|
|
beforeEach(function () {
|
|
|
|
mongoDocs[0].inS3 = true
|
|
|
|
})
|
2020-02-16 09:02:55 -05:00
|
|
|
|
2020-07-23 14:43:51 -04:00
|
|
|
it('should delete the document from s3, if it is in s3', async function () {
|
|
|
|
await DocArchiveManager.promises.destroyDoc(projectId, docId)
|
|
|
|
expect(PersistorManager.deleteObject).to.have.been.calledWith(
|
|
|
|
Settings.docstore.bucket,
|
|
|
|
`${projectId}/${docId}`
|
|
|
|
)
|
|
|
|
})
|
2020-02-16 09:02:55 -05:00
|
|
|
|
2020-07-23 14:43:51 -04:00
|
|
|
it('should delete the doc in mongo', async function () {
|
|
|
|
await DocArchiveManager.promises.destroyDoc(projectId, docId)
|
2020-02-16 09:02:55 -05:00
|
|
|
})
|
2021-05-05 05:57:12 -04:00
|
|
|
|
|
|
|
describe('when the destroy request errors', function () {
|
|
|
|
beforeEach(function () {
|
|
|
|
mongoDocs[0].inS3 = true
|
|
|
|
PersistorManager.deleteObject.onFirstCall().rejects(new Error('1'))
|
|
|
|
PersistorManager.deleteObject.onSecondCall().rejects(new Error('2'))
|
|
|
|
PersistorManager.deleteObject.onThirdCall().resolves()
|
|
|
|
})
|
|
|
|
|
|
|
|
it('should retry', async function () {
|
|
|
|
await DocArchiveManager.promises.destroyDoc(projectId, docId)
|
|
|
|
expect(PersistorManager.deleteObject).to.have.been.calledWith(
|
|
|
|
Settings.docstore.bucket,
|
|
|
|
`${projectId}/${docId}`
|
|
|
|
)
|
|
|
|
expect(PersistorManager.deleteObject.callCount).to.equal(3)
|
|
|
|
})
|
|
|
|
})
|
|
|
|
|
|
|
|
describe('when the destroy request errors permanent', function () {
|
|
|
|
beforeEach(function () {
|
|
|
|
mongoDocs[0].inS3 = true
|
|
|
|
PersistorManager.deleteObject.rejects(new Error('permanent'))
|
|
|
|
})
|
|
|
|
|
|
|
|
it('should retry and fail eventually', async function () {
|
|
|
|
await expect(DocArchiveManager.promises.destroyDoc(projectId, docId))
|
|
|
|
.to.eventually.be.rejected
|
|
|
|
expect(PersistorManager.deleteObject).to.have.been.calledWith(
|
|
|
|
Settings.docstore.bucket,
|
|
|
|
`${projectId}/${docId}`
|
|
|
|
)
|
|
|
|
expect(PersistorManager.deleteObject.callCount).to.equal(4)
|
|
|
|
})
|
|
|
|
})
|
2020-02-16 09:02:55 -05:00
|
|
|
})
|
|
|
|
|
2020-07-23 14:43:51 -04:00
|
|
|
describe('when the doc is not in s3', function () {
|
|
|
|
beforeEach(function () {
|
|
|
|
mongoDocs[0].inS3 = false
|
2020-02-16 09:02:55 -05:00
|
|
|
})
|
|
|
|
|
2020-07-23 14:43:51 -04:00
|
|
|
it('should not delete the document from s3, if it is not in s3', async function () {
|
|
|
|
await DocArchiveManager.promises.destroyDoc(projectId, docId)
|
|
|
|
expect(PersistorManager.deleteObject).not.to.have.been.called
|
|
|
|
})
|
2020-02-16 09:02:55 -05:00
|
|
|
|
2020-07-23 14:43:51 -04:00
|
|
|
it('should delete the doc in mongo', async function () {
|
|
|
|
await DocArchiveManager.promises.destroyDoc(projectId, docId)
|
|
|
|
})
|
2020-02-16 09:02:55 -05:00
|
|
|
})
|
|
|
|
})
|
|
|
|
|
2020-07-23 14:43:51 -04:00
|
|
|
describe('archiveAllDocs', function () {
|
|
|
|
it('should resolve with valid arguments', async function () {
|
|
|
|
await expect(DocArchiveManager.promises.archiveAllDocs(projectId)).to
|
|
|
|
.eventually.be.fulfilled
|
2020-02-16 09:02:55 -05:00
|
|
|
})
|
|
|
|
|
2020-07-23 14:43:51 -04:00
|
|
|
it('should archive all project docs which are not in s3', async function () {
|
|
|
|
await DocArchiveManager.promises.archiveAllDocs(projectId)
|
|
|
|
// not inS3
|
2020-07-31 11:20:07 -04:00
|
|
|
expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith(
|
2020-07-23 14:43:51 -04:00
|
|
|
mongoDocs[0]._id
|
|
|
|
)
|
2020-07-31 11:20:07 -04:00
|
|
|
expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith(
|
2020-07-23 14:43:51 -04:00
|
|
|
mongoDocs[1]._id
|
|
|
|
)
|
2020-07-31 11:20:07 -04:00
|
|
|
expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith(
|
2020-07-23 14:43:51 -04:00
|
|
|
mongoDocs[4]._id
|
|
|
|
)
|
2020-02-16 09:02:55 -05:00
|
|
|
|
2020-07-23 14:43:51 -04:00
|
|
|
// inS3
|
2020-07-31 11:20:07 -04:00
|
|
|
expect(
|
|
|
|
MongoManager.promises.markDocAsArchived
|
|
|
|
).not.to.have.been.calledWith(mongoDocs[2]._id)
|
|
|
|
expect(
|
|
|
|
MongoManager.promises.markDocAsArchived
|
|
|
|
).not.to.have.been.calledWith(mongoDocs[3]._id)
|
2020-02-16 09:02:55 -05:00
|
|
|
})
|
|
|
|
})
|
|
|
|
|
2020-07-23 14:43:51 -04:00
|
|
|
describe('unArchiveAllDocs', function () {
|
|
|
|
it('should resolve with valid arguments', async function () {
|
|
|
|
await expect(DocArchiveManager.promises.unArchiveAllDocs(projectId)).to
|
|
|
|
.eventually.be.fulfilled
|
|
|
|
})
|
|
|
|
|
|
|
|
it('should unarchive all inS3 docs', async function () {
|
|
|
|
await DocArchiveManager.promises.unArchiveAllDocs(projectId)
|
|
|
|
|
|
|
|
for (const doc of archivedDocs) {
|
|
|
|
expect(PersistorManager.getObjectStream).to.have.been.calledWith(
|
|
|
|
Settings.docstore.bucket,
|
|
|
|
`${projectId}/${doc._id}`
|
2020-02-16 09:02:55 -05:00
|
|
|
)
|
2020-07-23 14:43:51 -04:00
|
|
|
}
|
2020-02-16 09:02:55 -05:00
|
|
|
})
|
2020-07-23 14:43:51 -04:00
|
|
|
})
|
2020-02-16 09:02:55 -05:00
|
|
|
|
2020-07-23 14:43:51 -04:00
|
|
|
describe('destroyAllDocs', function () {
|
2021-04-16 04:27:09 -04:00
|
|
|
beforeEach(function () {
|
|
|
|
MongoManager.promises.getProjectsDocs.onCall(0).resolves(mongoDocs)
|
|
|
|
MongoManager.promises.getProjectsDocs.onCall(1).resolves([])
|
|
|
|
})
|
|
|
|
|
2020-07-23 14:43:51 -04:00
|
|
|
it('should resolve with valid arguments', async function () {
|
|
|
|
await expect(DocArchiveManager.promises.destroyAllDocs(projectId)).to
|
|
|
|
.eventually.be.fulfilled
|
|
|
|
})
|
|
|
|
|
|
|
|
it('should delete all docs that are in s3 from s3', async function () {
|
|
|
|
await DocArchiveManager.promises.destroyAllDocs(projectId)
|
|
|
|
|
|
|
|
// not inS3
|
|
|
|
for (const index of [0, 1, 4]) {
|
|
|
|
expect(PersistorManager.deleteObject).not.to.have.been.calledWith(
|
|
|
|
Settings.docstore.bucket,
|
|
|
|
`${projectId}/${mongoDocs[index]._id}`
|
2020-02-16 09:02:55 -05:00
|
|
|
)
|
2020-07-23 14:43:51 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
// inS3
|
|
|
|
for (const index of [2, 3]) {
|
|
|
|
expect(PersistorManager.deleteObject).to.have.been.calledWith(
|
|
|
|
Settings.docstore.bucket,
|
|
|
|
`${projectId}/${mongoDocs[index]._id}`
|
|
|
|
)
|
|
|
|
}
|
2020-02-16 09:02:55 -05:00
|
|
|
})
|
|
|
|
|
2020-07-23 14:43:51 -04:00
|
|
|
it('should destroy all docs in mongo', async function () {
|
|
|
|
await DocArchiveManager.promises.destroyAllDocs(projectId)
|
|
|
|
|
|
|
|
for (const mongoDoc of mongoDocs) {
|
2020-07-31 11:20:07 -04:00
|
|
|
expect(MongoManager.promises.destroyDoc).to.have.been.calledWith(
|
|
|
|
mongoDoc._id
|
|
|
|
)
|
2020-07-23 14:43:51 -04:00
|
|
|
}
|
2020-02-16 09:02:55 -05:00
|
|
|
})
|
|
|
|
})
|
|
|
|
})
|