mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
Merge pull request #21631 from overleaf/jpa-live-s3
[object-persistor] s3SSEC: make compatible with AWS S3 GitOrigin-RevId: 3cd1f5ec43141f15defe081fc916d2ca2d78ca01
This commit is contained in:
parent
d5478c11ea
commit
65dc6bf940
3 changed files with 40 additions and 1 deletions
|
@ -184,7 +184,7 @@ class S3Persistor extends AbstractPersistor {
|
|||
case 404: // NoSuchKey
|
||||
return reject(new NotFoundError('not found'))
|
||||
default:
|
||||
return reject(new Error('non success status: ' + statusCode))
|
||||
// handled by stream.on('error') handler below
|
||||
}
|
||||
})
|
||||
// The AWS SDK is forwarding any errors from the request to the stream.
|
||||
|
|
|
@ -91,6 +91,14 @@ describe('S3PersistorTests', function () {
|
|||
|
||||
createReadStream() {
|
||||
setTimeout(() => {
|
||||
if (this.notFoundSSEC) {
|
||||
// special case for AWS S3: 404 NoSuchKey wrapped in a 400. A single request received a single response, and multiple httpHeaders events are triggered. Don't ask.
|
||||
this.emit('httpHeaders', 400)
|
||||
this.emit('httpHeaders', 404)
|
||||
ReadStream.emit('error', S3NotFoundError)
|
||||
return
|
||||
}
|
||||
|
||||
if (this.err) return ReadStream.emit('error', this.err)
|
||||
this.emit('httpHeaders', this.statusCode)
|
||||
if (this.statusCode === 403) {
|
||||
|
@ -344,6 +352,34 @@ describe('S3PersistorTests', function () {
|
|||
})
|
||||
})
|
||||
|
||||
describe("when the file doesn't exist -- SSEC", function () {
|
||||
let error, stream
|
||||
|
||||
beforeEach(async function () {
|
||||
S3GetObjectRequest.notFoundSSEC = 404
|
||||
try {
|
||||
stream = await S3Persistor.getObjectStream(bucket, key)
|
||||
} catch (err) {
|
||||
error = err
|
||||
}
|
||||
})
|
||||
it('does not return a stream', function () {
|
||||
expect(stream).not.to.exist
|
||||
})
|
||||
|
||||
it('throws a NotFoundError', function () {
|
||||
expect(error).to.be.an.instanceOf(Errors.NotFoundError)
|
||||
})
|
||||
|
||||
it('wraps the error', function () {
|
||||
expect(error.cause).to.exist
|
||||
})
|
||||
|
||||
it('stores the bucket and key in the error', function () {
|
||||
expect(error.info).to.include({ bucketName: bucket, key })
|
||||
})
|
||||
})
|
||||
|
||||
describe('when access to the file is denied', function () {
|
||||
let error, stream
|
||||
|
||||
|
|
|
@ -467,6 +467,7 @@ describe('Filestore', function () {
|
|||
})
|
||||
|
||||
describe('with a large file', function () {
|
||||
this.timeout(1000 * 20)
|
||||
let fileId, fileUrl, largeFileContent, error
|
||||
|
||||
beforeEach('upload large file', async function () {
|
||||
|
@ -1221,6 +1222,8 @@ describe('Filestore', function () {
|
|||
|
||||
for (const { name, prev, cur, fail } of stages) {
|
||||
describe(name, function () {
|
||||
this.timeout(1000 * 30)
|
||||
|
||||
it('can read old writes', async function () {
|
||||
await checkWrites(fileKey1, prev, [prev, cur], fail)
|
||||
await checkWrites(fileKey2, prev, [prev, cur], fail) // check again after access
|
||||
|
|
Loading…
Reference in a new issue