1
0
Fork 0
mirror of https://github.com/overleaf/overleaf.git synced 2025-04-08 20:10:48 +00:00

Merge pull request from overleaf/spd-decaf-cleanup-11

Restore bucket-specific settings and clean up related endpoint
This commit is contained in:
Simon Detheridge 2020-01-08 09:29:41 +00:00 committed by GitHub
commit 9d52c521c4
9 changed files with 238 additions and 206 deletions

View file

@ -9,7 +9,6 @@ const express = require('express')
const bodyParser = require('body-parser')
const fileController = require('./app/js/FileController')
const bucketController = require('./app/js/BucketController')
const keyBuilder = require('./app/js/KeyBuilder')
const healthCheckController = require('./app/js/HealthCheckController')
@ -114,7 +113,11 @@ app.get(
fileController.directorySize
)
app.get('/bucket/:bucket/key/*', bucketController.getFile)
app.get(
'/bucket/:bucket/key/*',
keyBuilder.bucketFileKeyMiddleware,
fileController.getFile
)
app.get('/heapdump', (req, res, next) =>
require('heapdump').writeSnapshot(

View file

@ -1,48 +0,0 @@
/* eslint-disable
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let BucketController
const settings = require('settings-sharelatex')
const logger = require('logger-sharelatex')
const FileHandler = require('./FileHandler')
const metrics = require('metrics-sharelatex')
const Errors = require('./Errors')
module.exports = BucketController = {
getFile(req, res) {
const { bucket } = req.params
const key = req.params[0]
const credentials =
settings.filestore.s3BucketCreds != null
? settings.filestore.s3BucketCreds[bucket]
: undefined
const options = {
key,
bucket,
credentials
}
metrics.inc(`${bucket}.getFile`)
logger.log({ key, bucket }, 'receiving request to get file from bucket')
return FileHandler.getFile(bucket, key, options, function(err, fileStream) {
if (err != null) {
logger.err({ err, key, bucket }, 'problem getting file from bucket')
if (err instanceof Errors.NotFoundError) {
return res.send(404)
} else {
return res.send(500)
}
} else {
logger.log({ key, bucket }, 'sending bucket file to response')
return fileStream.pipe(res)
}
})
}
}

View file

@ -23,6 +23,7 @@ class ReadError extends BackwardCompatibleError {}
class HealthCheckError extends BackwardCompatibleError {}
class ConversionsDisabledError extends BackwardCompatibleError {}
class ConversionError extends BackwardCompatibleError {}
class SettingsError extends BackwardCompatibleError {}
class FailedCommandError extends OError {
constructor(command, code, stdout, stderr) {
@ -46,5 +47,6 @@ module.exports = {
WriteError,
ReadError,
ConversionError,
HealthCheckError
HealthCheckError,
SettingsError
}

View file

@ -6,6 +6,7 @@ module.exports = {
userFileKeyMiddleware,
publicFileKeyMiddleware,
publicProjectKeyMiddleware,
bucketFileKeyMiddleware,
templateFileKeyMiddleware
}
@ -48,6 +49,12 @@ function publicFileKeyMiddleware(req, res, next) {
next()
}
function bucketFileKeyMiddleware(req, res, next) {
req.bucket = req.params.bucket
req.key = req.params[0]
next()
}
function templateFileKeyMiddleware(req, res, next) {
const {
template_id: templateId,

View file

@ -12,7 +12,12 @@ const fs = require('fs')
const S3 = require('aws-sdk/clients/s3')
const { URL } = require('url')
const { callbackify } = require('util')
const { WriteError, ReadError, NotFoundError } = require('./Errors')
const {
WriteError,
ReadError,
NotFoundError,
SettingsError
} = require('./Errors')
module.exports = {
sendFile: callbackify(sendFile),
@ -37,8 +42,6 @@ module.exports = {
}
}
const _client = new S3(_defaultOptions())
async function sendFile(bucketName, key, fsPath) {
let readStream
try {
@ -61,7 +64,7 @@ async function sendStream(bucketName, key, readStream) {
metrics.count('s3.egress', meteredStream.bytes)
})
const response = await _client
const response = await _getClientForBucket(bucketName)
.upload({
Bucket: bucketName,
Key: key,
@ -92,7 +95,9 @@ async function getFileStream(bucketName, key, opts) {
}
return new Promise((resolve, reject) => {
const stream = _client.getObject(params).createReadStream()
const stream = _getClientForBucket(bucketName)
.getObject(params)
.createReadStream()
const meteredStream = meter()
meteredStream.on('finish', () => {
@ -115,7 +120,7 @@ async function deleteDirectory(bucketName, key) {
let response
try {
response = await _client
response = await _getClientForBucket(bucketName)
.listObjects({ Bucket: bucketName, Prefix: key })
.promise()
} catch (err) {
@ -130,7 +135,7 @@ async function deleteDirectory(bucketName, key) {
const objects = response.Contents.map(item => ({ Key: item.Key }))
if (objects.length) {
try {
await _client
await _getClientForBucket(bucketName)
.deleteObjects({
Bucket: bucketName,
Delete: {
@ -152,7 +157,7 @@ async function deleteDirectory(bucketName, key) {
async function getFileSize(bucketName, key) {
try {
const response = await _client
const response = await _getClientForBucket(bucketName)
.headObject({ Bucket: bucketName, Key: key })
.promise()
return response.ContentLength
@ -168,7 +173,9 @@ async function getFileSize(bucketName, key) {
async function deleteFile(bucketName, key) {
try {
await _client.deleteObject({ Bucket: bucketName, Key: key }).promise()
await _getClientForBucket(bucketName)
.deleteObject({ Bucket: bucketName, Key: key })
.promise()
} catch (err) {
throw _wrapError(
err,
@ -186,7 +193,9 @@ async function copyFile(bucketName, sourceKey, destKey) {
CopySource: `${bucketName}/${sourceKey}`
}
try {
await _client.copyObject(params).promise()
await _getClientForBucket(bucketName)
.copyObject(params)
.promise()
} catch (err) {
throw _wrapError(err, 'failed to copy file in S3', params, WriteError)
}
@ -211,7 +220,7 @@ async function checkIfFileExists(bucketName, key) {
async function directorySize(bucketName, key) {
try {
const response = await _client
const response = await _getClientForBucket(bucketName)
.listObjects({ Bucket: bucketName, Prefix: key })
.promise()
@ -240,9 +249,50 @@ function _wrapError(error, message, params, ErrorType) {
}
}
function _defaultOptions() {
const options = {
credentials: {
const _clients = new Map()
let _defaultClient
function _getClientForBucket(bucket) {
if (_clients[bucket]) {
return _clients[bucket]
}
if (
settings.filestore.s3.s3BucketCreds &&
settings.filestore.s3.s3BucketCreds[bucket]
) {
_clients[bucket] = new S3(
_buildClientOptions(settings.filestore.s3.s3BucketCreds[bucket])
)
return _clients[bucket]
}
// no specific credentials for the bucket
if (_defaultClient) {
return _defaultClient
}
if (settings.filestore.s3.key) {
_defaultClient = new S3(_buildClientOptions())
return _defaultClient
}
throw new SettingsError({
message: 'no bucket-specific or default credentials provided',
info: { bucket }
})
}
function _buildClientOptions(bucketCredentials) {
const options = {}
if (bucketCredentials) {
options.credentials = {
accessKeyId: bucketCredentials.auth_key,
secretAccessKey: bucketCredentials.auth_secret
}
} else {
options.credentials = {
accessKeyId: settings.filestore.s3.key,
secretAccessKey: settings.filestore.s3.secret
}
@ -254,5 +304,10 @@ function _defaultOptions() {
options.sslEnabled = endpoint.protocol === 'https'
}
// path-style access is only used for acceptance tests
if (settings.filestore.s3.pathStyle) {
options.s3ForcePathStyle = true
}
return options
}

View file

@ -7,6 +7,7 @@ const FilestoreApp = require('./FilestoreApp')
const rp = require('request-promise-native').defaults({
resolveWithFullResponse: true
})
const S3 = require('aws-sdk/clients/s3')
const Stream = require('stream')
const request = require('request')
const { promisify } = require('util')
@ -43,7 +44,8 @@ if (process.env.AWS_ACCESS_KEY_ID) {
s3: {
key: process.env.AWS_ACCESS_KEY_ID,
secret: process.env.AWS_SECRET_ACCESS_KEY,
endpoint: process.env.AWS_S3_ENDPOINT
endpoint: process.env.AWS_S3_ENDPOINT,
pathStyle: true
},
stores: {
user_files: process.env.AWS_S3_USER_FILES_BUCKET_NAME,
@ -288,6 +290,48 @@ describe('Filestore', function() {
})
})
if (backend === 'S3Persistor') {
describe('with a file in a specific bucket', function() {
let constantFileContents, fileId, fileUrl, bucketName
beforeEach(async function() {
constantFileContents = `This is a file in a different S3 bucket ${Math.random()}`
fileId = Math.random().toString()
bucketName = Math.random().toString()
fileUrl = `${filestoreUrl}/bucket/${bucketName}/key/${fileId}`
const s3ClientSettings = {
credentials: {
accessKeyId: 'fake',
secretAccessKey: 'fake'
},
endpoint: process.env.AWS_S3_ENDPOINT,
sslEnabled: false,
s3ForcePathStyle: true
}
const s3 = new S3(s3ClientSettings)
await s3
.createBucket({
Bucket: bucketName
})
.promise()
await s3
.upload({
Bucket: bucketName,
Key: fileId,
Body: constantFileContents
})
.promise()
})
it('should get the file from the specified bucket', async function() {
const response = await rp.get(fileUrl)
expect(response.body).to.equal(constantFileContents)
})
})
}
describe('with a pdf file', function() {
let fileId, fileUrl, localFileSize
const localFileReadPath = Path.resolve(

View file

@ -1,100 +0,0 @@
/* eslint-disable
no-return-assign,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const { assert } = require('chai')
const sinon = require('sinon')
const chai = require('chai')
const should = chai.should()
const { expect } = chai
const modulePath = '../../../app/js/BucketController.js'
const SandboxedModule = require('sandboxed-module')
describe('BucketController', function() {
beforeEach(function() {
this.PersistorManager = {
sendStream: sinon.stub(),
copyFile: sinon.stub(),
deleteFile: sinon.stub()
}
this.settings = {
s3: {
buckets: {
user_files: 'user_files'
}
},
filestore: {
backend: 's3',
s3: {
secret: 'secret',
key: 'this_key'
}
}
}
this.FileHandler = {
getFile: sinon.stub(),
deleteFile: sinon.stub(),
insertFile: sinon.stub(),
getDirectorySize: sinon.stub()
}
this.LocalFileWriter = {}
this.controller = SandboxedModule.require(modulePath, {
requires: {
'./LocalFileWriter': this.LocalFileWriter,
'./FileHandler': this.FileHandler,
'./PersistorManager': this.PersistorManager,
'settings-sharelatex': this.settings,
'metrics-sharelatex': {
inc() {}
},
'logger-sharelatex': {
log() {},
err() {}
}
}
})
this.project_id = 'project_id'
this.file_id = 'file_id'
this.bucket = 'user_files'
this.key = `${this.project_id}/${this.file_id}`
this.req = {
query: {},
params: {
bucket: this.bucket,
0: this.key
},
headers: {}
}
this.res = { setHeader() {} }
return (this.fileStream = {})
})
return describe('getFile', function() {
it('should pipe the stream', function(done) {
this.FileHandler.getFile.callsArgWith(3, null, this.fileStream)
this.fileStream.pipe = res => {
res.should.equal(this.res)
return done()
}
return this.controller.getFile(this.req, this.res)
})
return it('should send a 500 if there is a problem', function(done) {
this.FileHandler.getFile.callsArgWith(3, 'error')
this.res.send = code => {
code.should.equal(500)
return done()
}
return this.controller.getFile(this.req, this.res)
})
})
})

View file

@ -7,16 +7,12 @@ const SandboxedModule = require('sandboxed-module')
const Errors = require('../../../app/js/Errors')
describe('S3PersistorManagerTests', function() {
const settings = {
filestore: {
backend: 's3',
s3: {
secret: 'secret',
key: 'this_key'
},
stores: {
user_files: 'sl_user_files'
}
const defaultS3Key = 'frog'
const defaultS3Secret = 'prince'
const defaultS3Credentials = {
credentials: {
accessKeyId: defaultS3Key,
secretAccessKey: defaultS3Secret
}
}
const filename = '/wombat/potato.tex'
@ -42,9 +38,23 @@ describe('S3PersistorManagerTests', function() {
S3ReadStream,
S3NotFoundError,
FileNotFoundError,
EmptyPromise
EmptyPromise,
settings
beforeEach(function() {
settings = {
filestore: {
backend: 's3',
s3: {
secret: defaultS3Secret,
key: defaultS3Key
},
stores: {
user_files: 'sl_user_files'
}
}
}
EmptyPromise = {
promise: sinon.stub().resolves()
}
@ -131,12 +141,7 @@ describe('S3PersistorManagerTests', function() {
})
it('sets the AWS client up with credentials from settings', function() {
expect(S3).to.have.been.calledWith({
credentials: {
accessKeyId: settings.filestore.s3.key,
secretAccessKey: settings.filestore.s3.secret
}
})
expect(S3).to.have.been.calledWith(defaultS3Credentials)
})
it('fetches the right key from the right bucket', function() {
@ -178,6 +183,84 @@ describe('S3PersistorManagerTests', function() {
})
})
describe('when there are alternative credentials', function() {
let stream
const alternativeSecret = 'giraffe'
const alternativeKey = 'hippo'
const alternativeS3Credentials = {
credentials: {
accessKeyId: alternativeKey,
secretAccessKey: alternativeSecret
}
}
beforeEach(async function() {
settings.filestore.s3.s3BucketCreds = {}
settings.filestore.s3.s3BucketCreds[bucket] = {
auth_key: alternativeKey,
auth_secret: alternativeSecret
}
stream = await S3PersistorManager.promises.getFileStream(bucket, key)
})
it('returns a stream', function() {
expect(stream).to.equal('s3Stream')
})
it('sets the AWS client up with the alternative credentials', function() {
expect(S3).to.have.been.calledWith(alternativeS3Credentials)
})
it('fetches the right key from the right bucket', function() {
expect(S3Client.getObject).to.have.been.calledWith({
Bucket: bucket,
Key: key
})
})
it('caches the credentials', async function() {
stream = await S3PersistorManager.promises.getFileStream(bucket, key)
expect(S3).to.have.been.calledOnceWith(alternativeS3Credentials)
})
it('uses the default credentials for an unknown bucket', async function() {
stream = await S3PersistorManager.promises.getFileStream(
'anotherBucket',
key
)
expect(S3).to.have.been.calledTwice
expect(S3.firstCall).to.have.been.calledWith(alternativeS3Credentials)
expect(S3.secondCall).to.have.been.calledWith(defaultS3Credentials)
})
it('caches the default credentials', async function() {
stream = await S3PersistorManager.promises.getFileStream(
'anotherBucket',
key
)
stream = await S3PersistorManager.promises.getFileStream(
'anotherBucket',
key
)
expect(S3).to.have.been.calledTwice
expect(S3.firstCall).to.have.been.calledWith(alternativeS3Credentials)
expect(S3.secondCall).to.have.been.calledWith(defaultS3Credentials)
})
it('throws an error if there are no credentials for the bucket', async function() {
delete settings.filestore.s3.key
delete settings.filestore.s3.secret
await expect(
S3PersistorManager.promises.getFileStream('anotherBucket', key)
).to.eventually.be.rejected.and.be.an.instanceOf(Errors.SettingsError)
})
})
describe("when the file doesn't exist", function() {
let error, stream

View file

@ -1,33 +1,19 @@
/* eslint-disable
camelcase,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const { assert } = require('chai')
const sinon = require('sinon')
const chai = require('chai')
const should = chai.should()
const { expect } = chai
const modulePath = '../../../app/js/BucketController.js'
describe('Settings', () =>
describe('s3', () =>
it('should use JSONified env var if present', function(done) {
const s3_settings = {
describe('Settings', function() {
describe('s3', function() {
it('should use JSONified env var if present', function() {
const s3Settings = {
bucket1: {
auth_key: 'bucket1_key',
auth_secret: 'bucket1_secret'
}
}
process.env.S3_BUCKET_CREDENTIALS = JSON.stringify(s3_settings)
process.env.S3_BUCKET_CREDENTIALS = JSON.stringify(s3Settings)
const settings = require('settings-sharelatex')
expect(settings.filestore.s3BucketCreds).to.deep.equal(s3_settings)
return done()
})))
expect(settings.filestore.s3BucketCreds).to.deep.equal(s3Settings)
})
})
})