Re-add bucket-specific credentials support for S3

This commit is contained in:
Simon Detheridge 2020-01-07 10:24:46 +00:00
parent 2bb2caf7b3
commit 6cc5d94f13
3 changed files with 166 additions and 32 deletions

View file

@ -23,6 +23,7 @@ class ReadError extends BackwardCompatibleError {}
class HealthCheckError extends BackwardCompatibleError {}
class ConversionsDisabledError extends BackwardCompatibleError {}
class ConversionError extends BackwardCompatibleError {}
class SettingsError extends BackwardCompatibleError {}
class FailedCommandError extends OError {
constructor(command, code, stdout, stderr) {
@ -46,5 +47,6 @@ module.exports = {
WriteError,
ReadError,
ConversionError,
HealthCheckError
HealthCheckError,
SettingsError
}

View file

@ -12,7 +12,12 @@ const fs = require('fs')
const S3 = require('aws-sdk/clients/s3')
const { URL } = require('url')
const { callbackify } = require('util')
const { WriteError, ReadError, NotFoundError } = require('./Errors')
const {
WriteError,
ReadError,
NotFoundError,
SettingsError
} = require('./Errors')
module.exports = {
sendFile: callbackify(sendFile),
@ -37,8 +42,6 @@ module.exports = {
}
}
const _client = new S3(_defaultOptions())
async function sendFile(bucketName, key, fsPath) {
let readStream
try {
@ -61,7 +64,7 @@ async function sendStream(bucketName, key, readStream) {
metrics.count('s3.egress', meteredStream.bytes)
})
const response = await _client
const response = await _client(bucketName)
.upload({
Bucket: bucketName,
Key: key,
@ -92,7 +95,9 @@ async function getFileStream(bucketName, key, opts) {
}
return new Promise((resolve, reject) => {
const stream = _client.getObject(params).createReadStream()
const stream = _client(bucketName)
.getObject(params)
.createReadStream()
const meteredStream = meter()
meteredStream.on('finish', () => {
@ -115,7 +120,7 @@ async function deleteDirectory(bucketName, key) {
let response
try {
response = await _client
response = await _client(bucketName)
.listObjects({ Bucket: bucketName, Prefix: key })
.promise()
} catch (err) {
@ -130,7 +135,7 @@ async function deleteDirectory(bucketName, key) {
const objects = response.Contents.map(item => ({ Key: item.Key }))
if (objects.length) {
try {
await _client
await _client(bucketName)
.deleteObjects({
Bucket: bucketName,
Delete: {
@ -152,7 +157,7 @@ async function deleteDirectory(bucketName, key) {
async function getFileSize(bucketName, key) {
try {
const response = await _client
const response = await _client(bucketName)
.headObject({ Bucket: bucketName, Key: key })
.promise()
return response.ContentLength
@ -168,7 +173,9 @@ async function getFileSize(bucketName, key) {
async function deleteFile(bucketName, key) {
try {
await _client.deleteObject({ Bucket: bucketName, Key: key }).promise()
await _client(bucketName)
.deleteObject({ Bucket: bucketName, Key: key })
.promise()
} catch (err) {
throw _wrapError(
err,
@ -186,7 +193,9 @@ async function copyFile(bucketName, sourceKey, destKey) {
CopySource: `${bucketName}/${sourceKey}`
}
try {
await _client.copyObject(params).promise()
await _client(bucketName)
.copyObject(params)
.promise()
} catch (err) {
throw _wrapError(err, 'failed to copy file in S3', params, WriteError)
}
@ -211,7 +220,7 @@ async function checkIfFileExists(bucketName, key) {
async function directorySize(bucketName, key) {
try {
const response = await _client
const response = await _client(bucketName)
.listObjects({ Bucket: bucketName, Prefix: key })
.promise()
@ -240,9 +249,49 @@ function _wrapError(error, message, params, ErrorType) {
}
}
function _defaultOptions() {
const options = {
credentials: {
const _clients = {}
function _client(bucket) {
if (_clients[bucket]) {
return _clients[bucket]
}
if (
settings.filestore.s3.s3BucketCreds &&
settings.filestore.s3.s3BucketCreds[bucket]
) {
_clients[bucket] = new S3(
_clientOptions(settings.filestore.s3.s3BucketCreds[bucket])
)
return _clients[bucket]
}
// no specific credentials for the bucket
if (_clients.default) {
return _clients.default
}
if (settings.filestore.s3.key) {
_clients.default = new S3(_clientOptions())
return _clients.default
}
throw new SettingsError({
message: 'no bucket-specific or default credentials provided',
info: { bucket }
})
}
function _clientOptions(bucketCredentials) {
const options = {}
if (bucketCredentials) {
options.credentials = {
accessKeyId: bucketCredentials.auth_key,
secretAccessKey: bucketCredentials.auth_secret
}
} else {
options.credentials = {
accessKeyId: settings.filestore.s3.key,
secretAccessKey: settings.filestore.s3.secret
}

View file

@ -7,16 +7,12 @@ const SandboxedModule = require('sandboxed-module')
const Errors = require('../../../app/js/Errors')
describe('S3PersistorManagerTests', function() {
const settings = {
filestore: {
backend: 's3',
s3: {
secret: 'secret',
key: 'this_key'
},
stores: {
user_files: 'sl_user_files'
}
const defaultS3Key = 'frog'
const defaultS3Secret = 'prince'
const defaultS3Credentials = {
credentials: {
accessKeyId: defaultS3Key,
secretAccessKey: defaultS3Secret
}
}
const filename = '/wombat/potato.tex'
@ -42,9 +38,23 @@ describe('S3PersistorManagerTests', function() {
S3ReadStream,
S3NotFoundError,
FileNotFoundError,
EmptyPromise
EmptyPromise,
settings
beforeEach(function() {
settings = {
filestore: {
backend: 's3',
s3: {
secret: defaultS3Secret,
key: defaultS3Key
},
stores: {
user_files: 'sl_user_files'
}
}
}
EmptyPromise = {
promise: sinon.stub().resolves()
}
@ -131,12 +141,7 @@ describe('S3PersistorManagerTests', function() {
})
it('sets the AWS client up with credentials from settings', function() {
expect(S3).to.have.been.calledWith({
credentials: {
accessKeyId: settings.filestore.s3.key,
secretAccessKey: settings.filestore.s3.secret
}
})
expect(S3).to.have.been.calledWith(defaultS3Credentials)
})
it('fetches the right key from the right bucket', function() {
@ -178,6 +183,84 @@ describe('S3PersistorManagerTests', function() {
})
})
describe('when there are alternative credentials', function() {
let stream
const alternativeSecret = 'giraffe'
const alternativeKey = 'hippo'
const alternativeS3Credentials = {
credentials: {
accessKeyId: alternativeKey,
secretAccessKey: alternativeSecret
}
}
beforeEach(async function() {
settings.filestore.s3.s3BucketCreds = {}
settings.filestore.s3.s3BucketCreds[bucket] = {
auth_key: alternativeKey,
auth_secret: alternativeSecret
}
stream = await S3PersistorManager.promises.getFileStream(bucket, key)
})
it('returns a stream', function() {
expect(stream).to.equal('s3Stream')
})
it('sets the AWS client up with the alternative credentials', function() {
expect(S3).to.have.been.calledWith(alternativeS3Credentials)
})
it('fetches the right key from the right bucket', function() {
expect(S3Client.getObject).to.have.been.calledWith({
Bucket: bucket,
Key: key
})
})
it('caches the credentials', async function() {
stream = await S3PersistorManager.promises.getFileStream(bucket, key)
expect(S3).to.have.been.calledOnceWith(alternativeS3Credentials)
})
it('uses the default credentials for an unknown bucket', async function() {
stream = await S3PersistorManager.promises.getFileStream(
'anotherBucket',
key
)
expect(S3).to.have.been.calledTwice
expect(S3.firstCall).to.have.been.calledWith(alternativeS3Credentials)
expect(S3.secondCall).to.have.been.calledWith(defaultS3Credentials)
})
it('caches the default credentials', async function() {
stream = await S3PersistorManager.promises.getFileStream(
'anotherBucket',
key
)
stream = await S3PersistorManager.promises.getFileStream(
'anotherBucket',
key
)
expect(S3).to.have.been.calledTwice
expect(S3.firstCall).to.have.been.calledWith(alternativeS3Credentials)
expect(S3.secondCall).to.have.been.calledWith(defaultS3Credentials)
})
it('throws an error if there are no credentials for the bucket', async function() {
delete settings.filestore.s3.key
delete settings.filestore.s3.secret
await expect(
S3PersistorManager.promises.getFileStream('anotherBucket', key)
).to.eventually.be.rejected.and.be.an.instanceOf(Errors.SettingsError)
})
})
describe("when the file doesn't exist", function() {
let error, stream