From 2bb2caf7b3fa8aac1a6148ddbb23f8b4c12d0efc Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Tue, 7 Jan 2020 09:46:53 +0000 Subject: [PATCH 1/4] Clean up settings tests --- .../filestore/test/unit/js/SettingsTests.js | 32 ++++++------------- 1 file changed, 9 insertions(+), 23 deletions(-) diff --git a/services/filestore/test/unit/js/SettingsTests.js b/services/filestore/test/unit/js/SettingsTests.js index 472c6d1179..4563449fde 100644 --- a/services/filestore/test/unit/js/SettingsTests.js +++ b/services/filestore/test/unit/js/SettingsTests.js @@ -1,33 +1,19 @@ -/* eslint-disable - camelcase, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const { assert } = require('chai') -const sinon = require('sinon') const chai = require('chai') -const should = chai.should() const { expect } = chai -const modulePath = '../../../app/js/BucketController.js' -describe('Settings', () => - describe('s3', () => - it('should use JSONified env var if present', function(done) { - const s3_settings = { +describe('Settings', function() { + describe('s3', function() { + it('should use JSONified env var if present', function() { + const s3Settings = { bucket1: { auth_key: 'bucket1_key', auth_secret: 'bucket1_secret' } } - process.env.S3_BUCKET_CREDENTIALS = JSON.stringify(s3_settings) + process.env.S3_BUCKET_CREDENTIALS = JSON.stringify(s3Settings) const settings = require('settings-sharelatex') - expect(settings.filestore.s3BucketCreds).to.deep.equal(s3_settings) - return done() - }))) + expect(settings.filestore.s3BucketCreds).to.deep.equal(s3Settings) + }) + }) +}) From 6cc5d94f13bb8c5328579e8226171f2c85addc38 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Tue, 7 Jan 2020 10:24:46 +0000 Subject: [PATCH 2/4] Re-add bucket-specific credentials support for S3 --- services/filestore/app/js/Errors.js | 4 +- .../filestore/app/js/S3PersistorManager.js | 77 +++++++++--- .../test/unit/js/S3PersistorManagerTests.js | 117 +++++++++++++++--- 3 files changed, 166 insertions(+), 32 deletions(-) diff --git a/services/filestore/app/js/Errors.js b/services/filestore/app/js/Errors.js index 65af6dc056..06091a13ba 100644 --- a/services/filestore/app/js/Errors.js +++ b/services/filestore/app/js/Errors.js @@ -23,6 +23,7 @@ class ReadError extends BackwardCompatibleError {} class HealthCheckError extends BackwardCompatibleError {} class ConversionsDisabledError extends BackwardCompatibleError {} class ConversionError extends BackwardCompatibleError {} +class SettingsError extends BackwardCompatibleError {} class FailedCommandError extends OError { constructor(command, code, stdout, stderr) { @@ -46,5 +47,6 @@ module.exports = { WriteError, ReadError, ConversionError, - HealthCheckError + HealthCheckError, + SettingsError } diff --git a/services/filestore/app/js/S3PersistorManager.js b/services/filestore/app/js/S3PersistorManager.js index d0729b80b9..36c49d35bd 100644 --- a/services/filestore/app/js/S3PersistorManager.js +++ b/services/filestore/app/js/S3PersistorManager.js @@ -12,7 +12,12 @@ const fs = require('fs') const S3 = require('aws-sdk/clients/s3') const { URL } = require('url') const { callbackify } = require('util') -const { WriteError, ReadError, NotFoundError } = require('./Errors') +const { + WriteError, + ReadError, + NotFoundError, + SettingsError +} = require('./Errors') module.exports = { sendFile: callbackify(sendFile), @@ -37,8 +42,6 @@ module.exports = { } } -const _client = new S3(_defaultOptions()) - async function sendFile(bucketName, key, fsPath) { let readStream try { @@ -61,7 +64,7 @@ async function sendStream(bucketName, key, readStream) { metrics.count('s3.egress', meteredStream.bytes) }) - const response = await _client + const response = await _client(bucketName) .upload({ Bucket: bucketName, Key: key, @@ -92,7 +95,9 @@ async function getFileStream(bucketName, key, opts) { } return new Promise((resolve, reject) => { - const stream = _client.getObject(params).createReadStream() + const stream = _client(bucketName) + .getObject(params) + .createReadStream() const meteredStream = meter() meteredStream.on('finish', () => { @@ -115,7 +120,7 @@ async function deleteDirectory(bucketName, key) { let response try { - response = await _client + response = await _client(bucketName) .listObjects({ Bucket: bucketName, Prefix: key }) .promise() } catch (err) { @@ -130,7 +135,7 @@ async function deleteDirectory(bucketName, key) { const objects = response.Contents.map(item => ({ Key: item.Key })) if (objects.length) { try { - await _client + await _client(bucketName) .deleteObjects({ Bucket: bucketName, Delete: { @@ -152,7 +157,7 @@ async function deleteDirectory(bucketName, key) { async function getFileSize(bucketName, key) { try { - const response = await _client + const response = await _client(bucketName) .headObject({ Bucket: bucketName, Key: key }) .promise() return response.ContentLength @@ -168,7 +173,9 @@ async function getFileSize(bucketName, key) { async function deleteFile(bucketName, key) { try { - await _client.deleteObject({ Bucket: bucketName, Key: key }).promise() + await _client(bucketName) + .deleteObject({ Bucket: bucketName, Key: key }) + .promise() } catch (err) { throw _wrapError( err, @@ -186,7 +193,9 @@ async function copyFile(bucketName, sourceKey, destKey) { CopySource: `${bucketName}/${sourceKey}` } try { - await _client.copyObject(params).promise() + await _client(bucketName) + .copyObject(params) + .promise() } catch (err) { throw _wrapError(err, 'failed to copy file in S3', params, WriteError) } @@ -211,7 +220,7 @@ async function checkIfFileExists(bucketName, key) { async function directorySize(bucketName, key) { try { - const response = await _client + const response = await _client(bucketName) .listObjects({ Bucket: bucketName, Prefix: key }) .promise() @@ -240,9 +249,49 @@ function _wrapError(error, message, params, ErrorType) { } } -function _defaultOptions() { - const options = { - credentials: { +const _clients = {} + +function _client(bucket) { + if (_clients[bucket]) { + return _clients[bucket] + } + + if ( + settings.filestore.s3.s3BucketCreds && + settings.filestore.s3.s3BucketCreds[bucket] + ) { + _clients[bucket] = new S3( + _clientOptions(settings.filestore.s3.s3BucketCreds[bucket]) + ) + return _clients[bucket] + } + + // no specific credentials for the bucket + if (_clients.default) { + return _clients.default + } + + if (settings.filestore.s3.key) { + _clients.default = new S3(_clientOptions()) + return _clients.default + } + + throw new SettingsError({ + message: 'no bucket-specific or default credentials provided', + info: { bucket } + }) +} + +function _clientOptions(bucketCredentials) { + const options = {} + + if (bucketCredentials) { + options.credentials = { + accessKeyId: bucketCredentials.auth_key, + secretAccessKey: bucketCredentials.auth_secret + } + } else { + options.credentials = { accessKeyId: settings.filestore.s3.key, secretAccessKey: settings.filestore.s3.secret } diff --git a/services/filestore/test/unit/js/S3PersistorManagerTests.js b/services/filestore/test/unit/js/S3PersistorManagerTests.js index 2faa8b52c7..bdac7c8232 100644 --- a/services/filestore/test/unit/js/S3PersistorManagerTests.js +++ b/services/filestore/test/unit/js/S3PersistorManagerTests.js @@ -7,16 +7,12 @@ const SandboxedModule = require('sandboxed-module') const Errors = require('../../../app/js/Errors') describe('S3PersistorManagerTests', function() { - const settings = { - filestore: { - backend: 's3', - s3: { - secret: 'secret', - key: 'this_key' - }, - stores: { - user_files: 'sl_user_files' - } + const defaultS3Key = 'frog' + const defaultS3Secret = 'prince' + const defaultS3Credentials = { + credentials: { + accessKeyId: defaultS3Key, + secretAccessKey: defaultS3Secret } } const filename = '/wombat/potato.tex' @@ -42,9 +38,23 @@ describe('S3PersistorManagerTests', function() { S3ReadStream, S3NotFoundError, FileNotFoundError, - EmptyPromise + EmptyPromise, + settings beforeEach(function() { + settings = { + filestore: { + backend: 's3', + s3: { + secret: defaultS3Secret, + key: defaultS3Key + }, + stores: { + user_files: 'sl_user_files' + } + } + } + EmptyPromise = { promise: sinon.stub().resolves() } @@ -131,12 +141,7 @@ describe('S3PersistorManagerTests', function() { }) it('sets the AWS client up with credentials from settings', function() { - expect(S3).to.have.been.calledWith({ - credentials: { - accessKeyId: settings.filestore.s3.key, - secretAccessKey: settings.filestore.s3.secret - } - }) + expect(S3).to.have.been.calledWith(defaultS3Credentials) }) it('fetches the right key from the right bucket', function() { @@ -178,6 +183,84 @@ describe('S3PersistorManagerTests', function() { }) }) + describe('when there are alternative credentials', function() { + let stream + const alternativeSecret = 'giraffe' + const alternativeKey = 'hippo' + const alternativeS3Credentials = { + credentials: { + accessKeyId: alternativeKey, + secretAccessKey: alternativeSecret + } + } + + beforeEach(async function() { + settings.filestore.s3.s3BucketCreds = {} + settings.filestore.s3.s3BucketCreds[bucket] = { + auth_key: alternativeKey, + auth_secret: alternativeSecret + } + + stream = await S3PersistorManager.promises.getFileStream(bucket, key) + }) + + it('returns a stream', function() { + expect(stream).to.equal('s3Stream') + }) + + it('sets the AWS client up with the alternative credentials', function() { + expect(S3).to.have.been.calledWith(alternativeS3Credentials) + }) + + it('fetches the right key from the right bucket', function() { + expect(S3Client.getObject).to.have.been.calledWith({ + Bucket: bucket, + Key: key + }) + }) + + it('caches the credentials', async function() { + stream = await S3PersistorManager.promises.getFileStream(bucket, key) + + expect(S3).to.have.been.calledOnceWith(alternativeS3Credentials) + }) + + it('uses the default credentials for an unknown bucket', async function() { + stream = await S3PersistorManager.promises.getFileStream( + 'anotherBucket', + key + ) + + expect(S3).to.have.been.calledTwice + expect(S3.firstCall).to.have.been.calledWith(alternativeS3Credentials) + expect(S3.secondCall).to.have.been.calledWith(defaultS3Credentials) + }) + + it('caches the default credentials', async function() { + stream = await S3PersistorManager.promises.getFileStream( + 'anotherBucket', + key + ) + stream = await S3PersistorManager.promises.getFileStream( + 'anotherBucket', + key + ) + + expect(S3).to.have.been.calledTwice + expect(S3.firstCall).to.have.been.calledWith(alternativeS3Credentials) + expect(S3.secondCall).to.have.been.calledWith(defaultS3Credentials) + }) + + it('throws an error if there are no credentials for the bucket', async function() { + delete settings.filestore.s3.key + delete settings.filestore.s3.secret + + await expect( + S3PersistorManager.promises.getFileStream('anotherBucket', key) + ).to.eventually.be.rejected.and.be.an.instanceOf(Errors.SettingsError) + }) + }) + describe("when the file doesn't exist", function() { let error, stream From 80d41cf51bf7d3ed7f7202793c41454fc31b6b82 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Tue, 7 Jan 2020 15:05:51 +0000 Subject: [PATCH 3/4] Move bucket-specific file endpoint into FileController --- services/filestore/app.js | 7 +- services/filestore/app/js/BucketController.js | 48 --------- services/filestore/app/js/KeyBuilder.js | 7 ++ .../filestore/app/js/S3PersistorManager.js | 5 + .../test/acceptance/js/FilestoreTests.js | 46 +++++++- .../test/unit/js/BucketControllerTests.js | 100 ------------------ 6 files changed, 62 insertions(+), 151 deletions(-) delete mode 100644 services/filestore/app/js/BucketController.js delete mode 100644 services/filestore/test/unit/js/BucketControllerTests.js diff --git a/services/filestore/app.js b/services/filestore/app.js index 232c5b24bc..9256cb0029 100644 --- a/services/filestore/app.js +++ b/services/filestore/app.js @@ -9,7 +9,6 @@ const express = require('express') const bodyParser = require('body-parser') const fileController = require('./app/js/FileController') -const bucketController = require('./app/js/BucketController') const keyBuilder = require('./app/js/KeyBuilder') const healthCheckController = require('./app/js/HealthCheckController') @@ -114,7 +113,11 @@ app.get( fileController.directorySize ) -app.get('/bucket/:bucket/key/*', bucketController.getFile) +app.get( + '/bucket/:bucket/key/*', + keyBuilder.bucketFileKeyMiddleware, + fileController.getFile +) app.get('/heapdump', (req, res, next) => require('heapdump').writeSnapshot( diff --git a/services/filestore/app/js/BucketController.js b/services/filestore/app/js/BucketController.js deleted file mode 100644 index 46f69679aa..0000000000 --- a/services/filestore/app/js/BucketController.js +++ /dev/null @@ -1,48 +0,0 @@ -/* eslint-disable - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -let BucketController -const settings = require('settings-sharelatex') -const logger = require('logger-sharelatex') -const FileHandler = require('./FileHandler') -const metrics = require('metrics-sharelatex') -const Errors = require('./Errors') - -module.exports = BucketController = { - getFile(req, res) { - const { bucket } = req.params - const key = req.params[0] - const credentials = - settings.filestore.s3BucketCreds != null - ? settings.filestore.s3BucketCreds[bucket] - : undefined - const options = { - key, - bucket, - credentials - } - metrics.inc(`${bucket}.getFile`) - logger.log({ key, bucket }, 'receiving request to get file from bucket') - return FileHandler.getFile(bucket, key, options, function(err, fileStream) { - if (err != null) { - logger.err({ err, key, bucket }, 'problem getting file from bucket') - if (err instanceof Errors.NotFoundError) { - return res.send(404) - } else { - return res.send(500) - } - } else { - logger.log({ key, bucket }, 'sending bucket file to response') - return fileStream.pipe(res) - } - }) - } -} diff --git a/services/filestore/app/js/KeyBuilder.js b/services/filestore/app/js/KeyBuilder.js index 8de7c0be2a..66cf563014 100644 --- a/services/filestore/app/js/KeyBuilder.js +++ b/services/filestore/app/js/KeyBuilder.js @@ -6,6 +6,7 @@ module.exports = { userFileKeyMiddleware, publicFileKeyMiddleware, publicProjectKeyMiddleware, + bucketFileKeyMiddleware, templateFileKeyMiddleware } @@ -48,6 +49,12 @@ function publicFileKeyMiddleware(req, res, next) { next() } +function bucketFileKeyMiddleware(req, res, next) { + req.bucket = req.params.bucket + req.key = req.params[0] + next() +} + function templateFileKeyMiddleware(req, res, next) { const { template_id: templateId, diff --git a/services/filestore/app/js/S3PersistorManager.js b/services/filestore/app/js/S3PersistorManager.js index 36c49d35bd..00ed46379b 100644 --- a/services/filestore/app/js/S3PersistorManager.js +++ b/services/filestore/app/js/S3PersistorManager.js @@ -303,5 +303,10 @@ function _clientOptions(bucketCredentials) { options.sslEnabled = endpoint.protocol === 'https' } + // path-style access is only used for acceptance tests + if (settings.filestore.s3.pathStyle) { + options.s3ForcePathStyle = true + } + return options } diff --git a/services/filestore/test/acceptance/js/FilestoreTests.js b/services/filestore/test/acceptance/js/FilestoreTests.js index 9260b1bd62..d7dfbce57c 100644 --- a/services/filestore/test/acceptance/js/FilestoreTests.js +++ b/services/filestore/test/acceptance/js/FilestoreTests.js @@ -7,6 +7,7 @@ const FilestoreApp = require('./FilestoreApp') const rp = require('request-promise-native').defaults({ resolveWithFullResponse: true }) +const S3 = require('aws-sdk/clients/s3') const Stream = require('stream') const request = require('request') const { promisify } = require('util') @@ -43,7 +44,8 @@ if (process.env.AWS_ACCESS_KEY_ID) { s3: { key: process.env.AWS_ACCESS_KEY_ID, secret: process.env.AWS_SECRET_ACCESS_KEY, - endpoint: process.env.AWS_S3_ENDPOINT + endpoint: process.env.AWS_S3_ENDPOINT, + pathStyle: true }, stores: { user_files: process.env.AWS_S3_USER_FILES_BUCKET_NAME, @@ -288,6 +290,48 @@ describe('Filestore', function() { }) }) + if (backend === 'S3Persistor') { + describe('with a file in a specific bucket', function() { + let constantFileContents, fileId, fileUrl, bucketName + + beforeEach(async function() { + constantFileContents = `This is a file in a different S3 bucket ${Math.random()}` + fileId = Math.random().toString() + bucketName = Math.random().toString() + fileUrl = `${filestoreUrl}/bucket/${bucketName}/key/${fileId}` + + const s3ClientSettings = { + credentials: { + accessKeyId: 'fake', + secretAccessKey: 'fake' + }, + endpoint: process.env.AWS_S3_ENDPOINT, + sslEnabled: false, + s3ForcePathStyle: true + } + + const s3 = new S3(s3ClientSettings) + await s3 + .createBucket({ + Bucket: bucketName + }) + .promise() + await s3 + .upload({ + Bucket: bucketName, + Key: fileId, + Body: constantFileContents + }) + .promise() + }) + + it('should get the file from the specified bucket', async function() { + const response = await rp.get(fileUrl) + expect(response.body).to.equal(constantFileContents) + }) + }) + } + describe('with a pdf file', function() { let fileId, fileUrl, localFileSize const localFileReadPath = Path.resolve( diff --git a/services/filestore/test/unit/js/BucketControllerTests.js b/services/filestore/test/unit/js/BucketControllerTests.js deleted file mode 100644 index ef74b3f6c0..0000000000 --- a/services/filestore/test/unit/js/BucketControllerTests.js +++ /dev/null @@ -1,100 +0,0 @@ -/* eslint-disable - no-return-assign, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const { assert } = require('chai') -const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() -const { expect } = chai -const modulePath = '../../../app/js/BucketController.js' -const SandboxedModule = require('sandboxed-module') - -describe('BucketController', function() { - beforeEach(function() { - this.PersistorManager = { - sendStream: sinon.stub(), - copyFile: sinon.stub(), - deleteFile: sinon.stub() - } - - this.settings = { - s3: { - buckets: { - user_files: 'user_files' - } - }, - filestore: { - backend: 's3', - s3: { - secret: 'secret', - key: 'this_key' - } - } - } - - this.FileHandler = { - getFile: sinon.stub(), - deleteFile: sinon.stub(), - insertFile: sinon.stub(), - getDirectorySize: sinon.stub() - } - this.LocalFileWriter = {} - this.controller = SandboxedModule.require(modulePath, { - requires: { - './LocalFileWriter': this.LocalFileWriter, - './FileHandler': this.FileHandler, - './PersistorManager': this.PersistorManager, - 'settings-sharelatex': this.settings, - 'metrics-sharelatex': { - inc() {} - }, - 'logger-sharelatex': { - log() {}, - err() {} - } - } - }) - this.project_id = 'project_id' - this.file_id = 'file_id' - this.bucket = 'user_files' - this.key = `${this.project_id}/${this.file_id}` - this.req = { - query: {}, - params: { - bucket: this.bucket, - 0: this.key - }, - headers: {} - } - this.res = { setHeader() {} } - return (this.fileStream = {}) - }) - - return describe('getFile', function() { - it('should pipe the stream', function(done) { - this.FileHandler.getFile.callsArgWith(3, null, this.fileStream) - this.fileStream.pipe = res => { - res.should.equal(this.res) - return done() - } - return this.controller.getFile(this.req, this.res) - }) - - return it('should send a 500 if there is a problem', function(done) { - this.FileHandler.getFile.callsArgWith(3, 'error') - this.res.send = code => { - code.should.equal(500) - return done() - } - return this.controller.getFile(this.req, this.res) - }) - }) -}) From 3bf51cac67a5675352fd051e6378e6a3209310f9 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Wed, 8 Jan 2020 09:17:30 +0000 Subject: [PATCH 4/4] Improve naming on internal '_client' method and use Map over object --- .../filestore/app/js/S3PersistorManager.js | 33 ++++++++++--------- 1 file changed, 17 insertions(+), 16 deletions(-) diff --git a/services/filestore/app/js/S3PersistorManager.js b/services/filestore/app/js/S3PersistorManager.js index 00ed46379b..4ccc1642a4 100644 --- a/services/filestore/app/js/S3PersistorManager.js +++ b/services/filestore/app/js/S3PersistorManager.js @@ -64,7 +64,7 @@ async function sendStream(bucketName, key, readStream) { metrics.count('s3.egress', meteredStream.bytes) }) - const response = await _client(bucketName) + const response = await _getClientForBucket(bucketName) .upload({ Bucket: bucketName, Key: key, @@ -95,7 +95,7 @@ async function getFileStream(bucketName, key, opts) { } return new Promise((resolve, reject) => { - const stream = _client(bucketName) + const stream = _getClientForBucket(bucketName) .getObject(params) .createReadStream() @@ -120,7 +120,7 @@ async function deleteDirectory(bucketName, key) { let response try { - response = await _client(bucketName) + response = await _getClientForBucket(bucketName) .listObjects({ Bucket: bucketName, Prefix: key }) .promise() } catch (err) { @@ -135,7 +135,7 @@ async function deleteDirectory(bucketName, key) { const objects = response.Contents.map(item => ({ Key: item.Key })) if (objects.length) { try { - await _client(bucketName) + await _getClientForBucket(bucketName) .deleteObjects({ Bucket: bucketName, Delete: { @@ -157,7 +157,7 @@ async function deleteDirectory(bucketName, key) { async function getFileSize(bucketName, key) { try { - const response = await _client(bucketName) + const response = await _getClientForBucket(bucketName) .headObject({ Bucket: bucketName, Key: key }) .promise() return response.ContentLength @@ -173,7 +173,7 @@ async function getFileSize(bucketName, key) { async function deleteFile(bucketName, key) { try { - await _client(bucketName) + await _getClientForBucket(bucketName) .deleteObject({ Bucket: bucketName, Key: key }) .promise() } catch (err) { @@ -193,7 +193,7 @@ async function copyFile(bucketName, sourceKey, destKey) { CopySource: `${bucketName}/${sourceKey}` } try { - await _client(bucketName) + await _getClientForBucket(bucketName) .copyObject(params) .promise() } catch (err) { @@ -220,7 +220,7 @@ async function checkIfFileExists(bucketName, key) { async function directorySize(bucketName, key) { try { - const response = await _client(bucketName) + const response = await _getClientForBucket(bucketName) .listObjects({ Bucket: bucketName, Prefix: key }) .promise() @@ -249,9 +249,10 @@ function _wrapError(error, message, params, ErrorType) { } } -const _clients = {} +const _clients = new Map() +let _defaultClient -function _client(bucket) { +function _getClientForBucket(bucket) { if (_clients[bucket]) { return _clients[bucket] } @@ -261,19 +262,19 @@ function _client(bucket) { settings.filestore.s3.s3BucketCreds[bucket] ) { _clients[bucket] = new S3( - _clientOptions(settings.filestore.s3.s3BucketCreds[bucket]) + _buildClientOptions(settings.filestore.s3.s3BucketCreds[bucket]) ) return _clients[bucket] } // no specific credentials for the bucket - if (_clients.default) { - return _clients.default + if (_defaultClient) { + return _defaultClient } if (settings.filestore.s3.key) { - _clients.default = new S3(_clientOptions()) - return _clients.default + _defaultClient = new S3(_buildClientOptions()) + return _defaultClient } throw new SettingsError({ @@ -282,7 +283,7 @@ function _client(bucket) { }) } -function _clientOptions(bucketCredentials) { +function _buildClientOptions(bucketCredentials) { const options = {} if (bucketCredentials) {