Move bucket-specific file endpoint into FileController

This commit is contained in:
Simon Detheridge 2020-01-07 15:05:51 +00:00
parent 6cc5d94f13
commit 80d41cf51b
6 changed files with 62 additions and 151 deletions

View file

@ -9,7 +9,6 @@ const express = require('express')
const bodyParser = require('body-parser') const bodyParser = require('body-parser')
const fileController = require('./app/js/FileController') const fileController = require('./app/js/FileController')
const bucketController = require('./app/js/BucketController')
const keyBuilder = require('./app/js/KeyBuilder') const keyBuilder = require('./app/js/KeyBuilder')
const healthCheckController = require('./app/js/HealthCheckController') const healthCheckController = require('./app/js/HealthCheckController')
@ -114,7 +113,11 @@ app.get(
fileController.directorySize fileController.directorySize
) )
app.get('/bucket/:bucket/key/*', bucketController.getFile) app.get(
'/bucket/:bucket/key/*',
keyBuilder.bucketFileKeyMiddleware,
fileController.getFile
)
app.get('/heapdump', (req, res, next) => app.get('/heapdump', (req, res, next) =>
require('heapdump').writeSnapshot( require('heapdump').writeSnapshot(

View file

@ -1,48 +0,0 @@
/* eslint-disable
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let BucketController
const settings = require('settings-sharelatex')
const logger = require('logger-sharelatex')
const FileHandler = require('./FileHandler')
const metrics = require('metrics-sharelatex')
const Errors = require('./Errors')
module.exports = BucketController = {
getFile(req, res) {
const { bucket } = req.params
const key = req.params[0]
const credentials =
settings.filestore.s3BucketCreds != null
? settings.filestore.s3BucketCreds[bucket]
: undefined
const options = {
key,
bucket,
credentials
}
metrics.inc(`${bucket}.getFile`)
logger.log({ key, bucket }, 'receiving request to get file from bucket')
return FileHandler.getFile(bucket, key, options, function(err, fileStream) {
if (err != null) {
logger.err({ err, key, bucket }, 'problem getting file from bucket')
if (err instanceof Errors.NotFoundError) {
return res.send(404)
} else {
return res.send(500)
}
} else {
logger.log({ key, bucket }, 'sending bucket file to response')
return fileStream.pipe(res)
}
})
}
}

View file

@ -6,6 +6,7 @@ module.exports = {
userFileKeyMiddleware, userFileKeyMiddleware,
publicFileKeyMiddleware, publicFileKeyMiddleware,
publicProjectKeyMiddleware, publicProjectKeyMiddleware,
bucketFileKeyMiddleware,
templateFileKeyMiddleware templateFileKeyMiddleware
} }
@ -48,6 +49,12 @@ function publicFileKeyMiddleware(req, res, next) {
next() next()
} }
function bucketFileKeyMiddleware(req, res, next) {
req.bucket = req.params.bucket
req.key = req.params[0]
next()
}
function templateFileKeyMiddleware(req, res, next) { function templateFileKeyMiddleware(req, res, next) {
const { const {
template_id: templateId, template_id: templateId,

View file

@ -303,5 +303,10 @@ function _clientOptions(bucketCredentials) {
options.sslEnabled = endpoint.protocol === 'https' options.sslEnabled = endpoint.protocol === 'https'
} }
// path-style access is only used for acceptance tests
if (settings.filestore.s3.pathStyle) {
options.s3ForcePathStyle = true
}
return options return options
} }

View file

@ -7,6 +7,7 @@ const FilestoreApp = require('./FilestoreApp')
const rp = require('request-promise-native').defaults({ const rp = require('request-promise-native').defaults({
resolveWithFullResponse: true resolveWithFullResponse: true
}) })
const S3 = require('aws-sdk/clients/s3')
const Stream = require('stream') const Stream = require('stream')
const request = require('request') const request = require('request')
const { promisify } = require('util') const { promisify } = require('util')
@ -43,7 +44,8 @@ if (process.env.AWS_ACCESS_KEY_ID) {
s3: { s3: {
key: process.env.AWS_ACCESS_KEY_ID, key: process.env.AWS_ACCESS_KEY_ID,
secret: process.env.AWS_SECRET_ACCESS_KEY, secret: process.env.AWS_SECRET_ACCESS_KEY,
endpoint: process.env.AWS_S3_ENDPOINT endpoint: process.env.AWS_S3_ENDPOINT,
pathStyle: true
}, },
stores: { stores: {
user_files: process.env.AWS_S3_USER_FILES_BUCKET_NAME, user_files: process.env.AWS_S3_USER_FILES_BUCKET_NAME,
@ -288,6 +290,48 @@ describe('Filestore', function() {
}) })
}) })
if (backend === 'S3Persistor') {
describe('with a file in a specific bucket', function() {
let constantFileContents, fileId, fileUrl, bucketName
beforeEach(async function() {
constantFileContents = `This is a file in a different S3 bucket ${Math.random()}`
fileId = Math.random().toString()
bucketName = Math.random().toString()
fileUrl = `${filestoreUrl}/bucket/${bucketName}/key/${fileId}`
const s3ClientSettings = {
credentials: {
accessKeyId: 'fake',
secretAccessKey: 'fake'
},
endpoint: process.env.AWS_S3_ENDPOINT,
sslEnabled: false,
s3ForcePathStyle: true
}
const s3 = new S3(s3ClientSettings)
await s3
.createBucket({
Bucket: bucketName
})
.promise()
await s3
.upload({
Bucket: bucketName,
Key: fileId,
Body: constantFileContents
})
.promise()
})
it('should get the file from the specified bucket', async function() {
const response = await rp.get(fileUrl)
expect(response.body).to.equal(constantFileContents)
})
})
}
describe('with a pdf file', function() { describe('with a pdf file', function() {
let fileId, fileUrl, localFileSize let fileId, fileUrl, localFileSize
const localFileReadPath = Path.resolve( const localFileReadPath = Path.resolve(

View file

@ -1,100 +0,0 @@
/* eslint-disable
no-return-assign,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const { assert } = require('chai')
const sinon = require('sinon')
const chai = require('chai')
const should = chai.should()
const { expect } = chai
const modulePath = '../../../app/js/BucketController.js'
const SandboxedModule = require('sandboxed-module')
describe('BucketController', function() {
beforeEach(function() {
this.PersistorManager = {
sendStream: sinon.stub(),
copyFile: sinon.stub(),
deleteFile: sinon.stub()
}
this.settings = {
s3: {
buckets: {
user_files: 'user_files'
}
},
filestore: {
backend: 's3',
s3: {
secret: 'secret',
key: 'this_key'
}
}
}
this.FileHandler = {
getFile: sinon.stub(),
deleteFile: sinon.stub(),
insertFile: sinon.stub(),
getDirectorySize: sinon.stub()
}
this.LocalFileWriter = {}
this.controller = SandboxedModule.require(modulePath, {
requires: {
'./LocalFileWriter': this.LocalFileWriter,
'./FileHandler': this.FileHandler,
'./PersistorManager': this.PersistorManager,
'settings-sharelatex': this.settings,
'metrics-sharelatex': {
inc() {}
},
'logger-sharelatex': {
log() {},
err() {}
}
}
})
this.project_id = 'project_id'
this.file_id = 'file_id'
this.bucket = 'user_files'
this.key = `${this.project_id}/${this.file_id}`
this.req = {
query: {},
params: {
bucket: this.bucket,
0: this.key
},
headers: {}
}
this.res = { setHeader() {} }
return (this.fileStream = {})
})
return describe('getFile', function() {
it('should pipe the stream', function(done) {
this.FileHandler.getFile.callsArgWith(3, null, this.fileStream)
this.fileStream.pipe = res => {
res.should.equal(this.res)
return done()
}
return this.controller.getFile(this.req, this.res)
})
return it('should send a 500 if there is a problem', function(done) {
this.FileHandler.getFile.callsArgWith(3, 'error')
this.res.send = code => {
code.should.equal(500)
return done()
}
return this.controller.getFile(this.req, this.res)
})
})
})