mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-29 09:13:39 -05:00
Don't throw 404 errors when unarchiving, if the doc is already unarchived
This commit is contained in:
parent
0386b871ee
commit
31c757cce2
2 changed files with 183 additions and 119 deletions
|
@ -93,15 +93,33 @@ async function unarchiveDoc(projectId, docId) {
|
||||||
{ project_id: projectId, doc_id: docId },
|
{ project_id: projectId, doc_id: docId },
|
||||||
'getting doc from persistor'
|
'getting doc from persistor'
|
||||||
)
|
)
|
||||||
|
const originalDoc = await MongoManager.findDoc(projectId, docId, { inS3: 1 })
|
||||||
|
if (!originalDoc.inS3) {
|
||||||
|
// return if it's not actually in S3 as there's nothing to do
|
||||||
|
return
|
||||||
|
}
|
||||||
const key = `${projectId}/${docId}`
|
const key = `${projectId}/${docId}`
|
||||||
const sourceMd5 = await PersistorManager.getObjectMd5Hash(
|
let stream, sourceMd5
|
||||||
|
try {
|
||||||
|
sourceMd5 = await PersistorManager.getObjectMd5Hash(
|
||||||
settings.docstore.bucket,
|
settings.docstore.bucket,
|
||||||
key
|
key
|
||||||
)
|
)
|
||||||
const stream = await PersistorManager.getObjectStream(
|
stream = await PersistorManager.getObjectStream(
|
||||||
settings.docstore.bucket,
|
settings.docstore.bucket,
|
||||||
key
|
key
|
||||||
)
|
)
|
||||||
|
} catch (err) {
|
||||||
|
// if we get a 404, we could be in a race and something else has unarchived the doc already
|
||||||
|
if (err instanceof Errors.NotFoundError) {
|
||||||
|
const doc = await MongoManager.findDoc(projectId, docId, { inS3: 1 })
|
||||||
|
if (!doc.inS3) {
|
||||||
|
// the doc has been archived while we were looking for it, so no error
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
throw err
|
||||||
|
}
|
||||||
stream.resume()
|
stream.resume()
|
||||||
const json = await _streamToString(stream)
|
const json = await _streamToString(stream)
|
||||||
const md5 = crypto.createHash('md5').update(json).digest('hex')
|
const md5 = crypto.createHash('md5').update(json).digest('hex')
|
||||||
|
|
|
@ -128,13 +128,13 @@ describe('DocArchiveManager', function () {
|
||||||
upsertIntoDocCollection: sinon.stub().resolves(),
|
upsertIntoDocCollection: sinon.stub().resolves(),
|
||||||
getProjectsDocs: sinon.stub().resolves(mongoDocs),
|
getProjectsDocs: sinon.stub().resolves(mongoDocs),
|
||||||
getArchivedProjectDocs: sinon.stub().resolves(archivedDocs),
|
getArchivedProjectDocs: sinon.stub().resolves(archivedDocs),
|
||||||
findDoc: sinon.stub().resolves(),
|
findDoc: sinon.stub().rejects(new Errors.NotFoundError()),
|
||||||
destroyDoc: sinon.stub().resolves()
|
destroyDoc: sinon.stub().resolves()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for (const mongoDoc of mongoDocs) {
|
for (const mongoDoc of mongoDocs.concat(archivedDocs)) {
|
||||||
MongoManager.promises.findDoc
|
MongoManager.promises.findDoc
|
||||||
.withArgs(projectId, mongoDoc._id)
|
.withArgs(projectId, mongoDoc._id, sinon.match.any)
|
||||||
.resolves(mongoDoc)
|
.resolves(mongoDoc)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -253,13 +253,15 @@ describe('DocArchiveManager', function () {
|
||||||
describe('unarchiveDoc', function () {
|
describe('unarchiveDoc', function () {
|
||||||
let docId
|
let docId
|
||||||
|
|
||||||
|
describe('when the doc is in S3', function () {
|
||||||
beforeEach(function () {
|
beforeEach(function () {
|
||||||
|
MongoManager.promises.findDoc = sinon.stub().resolves({ inS3: true })
|
||||||
docId = mongoDocs[0]._id
|
docId = mongoDocs[0]._id
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should resolve when passed a valid document', async function () {
|
it('should resolve when passed a valid document', async function () {
|
||||||
await expect(DocArchiveManager.promises.unarchiveDoc(projectId, docId)).to
|
await expect(DocArchiveManager.promises.unarchiveDoc(projectId, docId))
|
||||||
.eventually.be.fulfilled
|
.to.eventually.be.fulfilled
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should throw an error if the md5 does not match', async function () {
|
it('should throw an error if the md5 does not match', async function () {
|
||||||
|
@ -273,7 +275,9 @@ describe('DocArchiveManager', function () {
|
||||||
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
||||||
expect(
|
expect(
|
||||||
MongoManager.promises.upsertIntoDocCollection
|
MongoManager.promises.upsertIntoDocCollection
|
||||||
).to.have.been.calledWith(projectId, docId, { lines: mongoDocs[0].lines })
|
).to.have.been.calledWith(projectId, docId, {
|
||||||
|
lines: mongoDocs[0].lines
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should delete the doc in s3', async function () {
|
it('should delete the doc in s3', async function () {
|
||||||
|
@ -370,6 +374,48 @@ describe('DocArchiveManager', function () {
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('should not do anything if the file is already unarchived', async function () {
|
||||||
|
MongoManager.promises.findDoc.resolves({ inS3: false })
|
||||||
|
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
||||||
|
expect(PersistorManager.getObjectStream).not.to.have.been.called
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when the file is removed while we are processing it', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
MongoManager.promises.findDoc = sinon.stub().resolves({ inS3: true })
|
||||||
|
MongoManager.promises.findDoc.onSecondCall().resolves({ inS3: false })
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not throw an error if the file is unarchived before we get for its hash', async function () {
|
||||||
|
PersistorManager.getObjectMd5Hash = sinon
|
||||||
|
.stub()
|
||||||
|
.rejects(new Errors.NotFoundError())
|
||||||
|
await expect(DocArchiveManager.promises.unarchiveDoc(projectId, docId))
|
||||||
|
.to.eventually.be.fulfilled
|
||||||
|
expect(PersistorManager.getObjectStream).not.to.have.been.called
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not throw an error if the file is unarchived before we download it', async function () {
|
||||||
|
PersistorManager.getObjectStream = sinon
|
||||||
|
.stub()
|
||||||
|
.rejects(new Errors.NotFoundError())
|
||||||
|
await expect(DocArchiveManager.promises.unarchiveDoc(projectId, docId))
|
||||||
|
.to.eventually.be.fulfilled
|
||||||
|
expect(MongoManager.promises.upsertIntoDocCollection).not.to.have.been
|
||||||
|
.called
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should throw an error if the file is not found but is still listed as archived', async function () {
|
||||||
|
PersistorManager.getObjectStream = sinon
|
||||||
|
.stub()
|
||||||
|
.rejects(new Errors.NotFoundError())
|
||||||
|
await expect(
|
||||||
|
DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
||||||
|
).to.eventually.be.rejected.and.be.instanceof(Errors.NotFoundError)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
describe('destroyDoc', function () {
|
describe('destroyDoc', function () {
|
||||||
let docId
|
let docId
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue