Merge pull request #28 from sharelatex/mm-conversions-endpoint

Add endpoint for arbitrary bucket fetch
This commit is contained in:
Henry Oswald 2018-07-10 14:45:33 +01:00 committed by GitHub
commit 00fa99564e
8 changed files with 184 additions and 13 deletions

View file

@ -4,6 +4,7 @@ logger.initialize("filestore")
settings = require("settings-sharelatex")
request = require("request")
fileController = require("./app/js/FileController")
bucketController = require("./app/js/BucketController")
keyBuilder = require("./app/js/KeyBuilder")
healthCheckController = require("./app/js/HealthCheckController")
domain = require("domain")
@ -86,6 +87,8 @@ app.del "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey,
app.get "/project/:project_id/size", keyBuilder.publicProjectKey, fileController.directorySize
app.get "/bucket/:bucket/key/*", bucketController.getFile
app.get "/heapdump", (req, res)->
require('heapdump').writeSnapshot '/tmp/' + Date.now() + '.filestore.heapsnapshot', (err, filename)->
res.send filename
@ -105,8 +108,6 @@ app.get '/status', (req, res)->
app.get "/health_check", healthCheckController.check
app.get '*', (req, res)->
res.send 404

View file

@ -0,0 +1,29 @@
settings = require("settings-sharelatex")
logger = require("logger-sharelatex")
FileHandler = require("./FileHandler")
metrics = require("metrics-sharelatex")
Errors = require('./Errors')
module.exports = BucketController =
getFile: (req, res)->
{bucket} = req.params
key = req.params[0]
credentials = settings.filestore.s3BucketCreds?[bucket]
options = {
key: key,
bucket: bucket,
credentials: credentials
}
metrics.inc "#{bucket}.getFile"
logger.log key:key, bucket:bucket, "receiving request to get file from bucket"
FileHandler.getFile bucket, key, options, (err, fileStream)->
if err?
logger.err err:err, key:key, bucket:bucket, "problem getting file from bucket"
if err instanceof Errors.NotFoundError
return res.send 404
else
return res.send 500
else
logger.log key:key, bucket:bucket, "sending bucket file to response"
fileStream.pipe res

View file

@ -7,7 +7,7 @@ KeyBuilder = require("./KeyBuilder")
async = require("async")
ImageOptimiser = require("./ImageOptimiser")
module.exports =
module.exports = FileHandler =
insertFile: (bucket, key, stream, callback)->
convertedKey = KeyBuilder.getConvertedFolderKey key
@ -23,7 +23,8 @@ module.exports =
], callback
getFile: (bucket, key, opts = {}, callback)->
logger.log bucket:bucket, key:key, opts:opts, "getting file"
# In this call, opts can contain credentials
logger.log bucket:bucket, key:key, opts:@_scrubSecrets(opts), "getting file"
if !opts.format? and !opts.style?
@_getStandardFile bucket, key, opts, callback
else
@ -32,7 +33,7 @@ module.exports =
_getStandardFile: (bucket, key, opts, callback)->
PersistorManager.getFileStream bucket, key, opts, (err, fileStream)->
if err?
logger.err bucket:bucket, key:key, opts:opts, "error getting fileStream"
logger.err bucket:bucket, key:key, opts:FileHandler._scrubSecrets(opts), "error getting fileStream"
callback err, fileStream
_getConvertedFile: (bucket, key, opts, callback)->
@ -71,7 +72,7 @@ module.exports =
return callback(err)
done = (err, destPath)->
if err?
logger.err err:err, bucket:bucket, originalKey:originalKey, opts:opts, "error converting file"
logger.err err:err, bucket:bucket, originalKey:originalKey, opts:FileHandler._scrubSecrets(opts), "error converting file"
return callback(err)
LocalFileWriter.deleteFile originalFsPath, ->
callback(err, destPath, originalFsPath)
@ -98,3 +99,8 @@ module.exports =
if err?
logger.err bucket:bucket, project_id:project_id, "error getting size"
callback err, size
_scrubSecrets: (opts)->
safe = Object.assign {}, opts
delete safe.credentials
safe

View file

@ -68,8 +68,8 @@ module.exports =
callback = _.once callback
logger.log bucketName:bucketName, key:key, "getting file from s3"
s3Client = knox.createClient
key: settings.filestore.s3.key
secret: settings.filestore.s3.secret
key: opts.credentials?.auth_key || settings.filestore.s3.key
secret: opts.credentials?.auth_secret || settings.filestore.s3.secret
bucket: bucketName
s3Stream = s3Client.get(key, headers)
s3Stream.end()

View file

@ -23,10 +23,19 @@ module.exports =
user_files: Path.resolve(__dirname + "/../user_files")
public_files: Path.resolve(__dirname + "/../public_files")
template_files: Path.resolve(__dirname + "/../template_files")
# if you are using S3, then fill in your S3 details below
# if you are using S3, then fill in your S3 details below,
# or use env var with the same structure.
# s3:
# key: ""
# secret: ""
# key: "" # default
# secret: "" # default
#
# s3BucketCreds:
# bucketname1: # secrets for bucketname1
# auth_key: ""
# auth_secret: ""
# bucketname2: # secrets for bucketname2...
s3BucketCreds: JSON.parse process.env['S3_BUCKET_CREDENTIALS'] if process.env['S3_BUCKET_CREDENTIALS']
path:
uploadFolder: Path.resolve(__dirname + "/../uploads")

View file

@ -0,0 +1,69 @@
assert = require("chai").assert
sinon = require('sinon')
chai = require('chai')
should = chai.should()
expect = chai.expect
modulePath = "../../../app/js/BucketController.js"
SandboxedModule = require('sandboxed-module')
describe "BucketController", ->
beforeEach ->
@PersistorManager =
sendStream: sinon.stub()
copyFile: sinon.stub()
deleteFile:sinon.stub()
@settings =
s3:
buckets:
user_files:"user_files"
filestore:
backend: "s3"
s3:
secret: "secret"
key: "this_key"
@FileHandler =
getFile: sinon.stub()
deleteFile: sinon.stub()
insertFile: sinon.stub()
getDirectorySize: sinon.stub()
@LocalFileWriter = {}
@controller = SandboxedModule.require modulePath, requires:
"./LocalFileWriter":@LocalFileWriter
"./FileHandler": @FileHandler
"./PersistorManager":@PersistorManager
"settings-sharelatex": @settings
"logger-sharelatex":
log:->
err:->
@project_id = "project_id"
@file_id = "file_id"
@bucket = "user_files"
@key = "#{@project_id}/#{@file_id}"
@req =
query:{}
params:
bucket: @bucket
0: @key
headers: {}
@res =
setHeader: ->
@fileStream = {}
describe "getFile", ->
it "should pipe the stream", (done)->
@FileHandler.getFile.callsArgWith(3, null, @fileStream)
@fileStream.pipe = (res)=>
res.should.equal @res
done()
@controller.getFile @req, @res
it "should send a 500 if there is a problem", (done)->
@FileHandler.getFile.callsArgWith(3, "error")
@res.send = (code)=>
code.should.equal 500
done()
@controller.getFile @req, @res

View file

@ -55,6 +55,41 @@ describe "S3PersistorManagerTests", ->
@stubbedKnoxClient.get.calledWith(@key).should.equal true
done()
it "should use default auth", (done)->
@stubbedKnoxClient.get.returns(
on:->
end:->
)
@S3PersistorManager.getFileStream @bucketName, @key, @opts, (err)=> # empty callback
clientParams =
key: @settings.filestore.s3.key
secret: @settings.filestore.s3.secret
bucket: @bucketName
@knox.createClient.calledWith(clientParams).should.equal true
done()
describe "with supplied auth", ->
beforeEach ->
@S3PersistorManager = SandboxedModule.require modulePath, requires: @requires
@credentials =
auth_key: "that_key"
auth_secret: "that_secret"
@opts =
credentials: @credentials
it "should use supplied auth", (done)->
@stubbedKnoxClient.get.returns(
on:->
end:->
)
@S3PersistorManager.getFileStream @bucketName, @key, @opts, (err)=> # empty callback
clientParams =
key: @credentials.auth_key
secret: @credentials.auth_secret
bucket: @bucketName
@knox.createClient.calledWith(clientParams).should.equal true
done()
describe "with start and end options", ->
beforeEach ->
@opts =

View file

@ -0,0 +1,22 @@
assert = require("chai").assert
sinon = require('sinon')
chai = require('chai')
should = chai.should()
expect = chai.expect
modulePath = "../../../app/js/BucketController.js"
SandboxedModule = require('sandboxed-module')
describe "Settings", ->
describe "s3", ->
it "should use JSONified env var if present", (done)->
s3_settings =
key: 'default_key'
secret: 'default_secret'
bucket1:
auth_key: 'bucket1_key'
auth_secret: 'bucket1_secret'
process.env['S3_CREDENTIALS'] = JSON.stringify s3_settings
settings =require('settings-sharelatex')
expect(settings.filestore.s3).to.deep.equal s3_settings
done()