Merge branch 'master' into ho-docker

This commit is contained in:
Henry Oswald 2018-09-14 16:49:12 +01:00
commit aacb54d54c
10 changed files with 765 additions and 558 deletions

View file

@ -5,12 +5,16 @@ logger.initialize("filestore")
settings = require("settings-sharelatex")
request = require("request")
fileController = require("./app/js/FileController")
bucketController = require("./app/js/BucketController")
keyBuilder = require("./app/js/KeyBuilder")
healthCheckController = require("./app/js/HealthCheckController")
domain = require("domain")
appIsOk = true
app = express()
if settings.sentry?.dsn?
logger.initializeErrorReporting(settings.sentry.dsn)
Metrics = require "metrics-sharelatex"
Metrics.initialize("filestore")
Metrics.open_sockets.monitor(logger)
@ -78,6 +82,8 @@ app.del "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey,
app.get "/project/:project_id/size", keyBuilder.publicProjectKey, fileController.directorySize
app.get "/bucket/:bucket/key/*", bucketController.getFile
app.get "/heapdump", (req, res)->
require('heapdump').writeSnapshot '/tmp/' + Date.now() + '.filestore.heapsnapshot', (err, filename)->
res.send filename
@ -95,8 +101,6 @@ app.get '/status', (req, res)->
app.get "/health_check", healthCheckController.check
app.get '*', (req, res)->

View file

@ -0,0 +1,29 @@
settings = require("settings-sharelatex")
logger = require("logger-sharelatex")
FileHandler = require("./FileHandler")
metrics = require("metrics-sharelatex")
Errors = require('./Errors')
module.exports = BucketController =
getFile: (req, res)->
{bucket} = req.params
key = req.params[0]
credentials = settings.filestore.s3BucketCreds?[bucket]
options = {
key: key,
bucket: bucket,
credentials: credentials
}
metrics.inc "#{bucket}.getFile"
logger.log key:key, bucket:bucket, "receiving request to get file from bucket"
FileHandler.getFile bucket, key, options, (err, fileStream)->
if err?
logger.err err:err, key:key, bucket:bucket, "problem getting file from bucket"
if err instanceof Errors.NotFoundError
return res.send 404
else
return res.send 500
else
logger.log key:key, bucket:bucket, "sending bucket file to response"
fileStream.pipe res

View file

@ -7,7 +7,7 @@ KeyBuilder = require("./KeyBuilder")
async = require("async")
ImageOptimiser = require("./ImageOptimiser")
module.exports =
module.exports = FileHandler =
insertFile: (bucket, key, stream, callback)->
convertedKey = KeyBuilder.getConvertedFolderKey key
@ -23,7 +23,8 @@ module.exports =
], callback
getFile: (bucket, key, opts = {}, callback)->
logger.log bucket:bucket, key:key, opts:opts, "getting file"
# In this call, opts can contain credentials
logger.log bucket:bucket, key:key, opts:@_scrubSecrets(opts), "getting file"
if !opts.format? and !opts.style?
@_getStandardFile bucket, key, opts, callback
else
@ -32,7 +33,7 @@ module.exports =
_getStandardFile: (bucket, key, opts, callback)->
PersistorManager.getFileStream bucket, key, opts, (err, fileStream)->
if err?
logger.err bucket:bucket, key:key, opts:opts, "error getting fileStream"
logger.err bucket:bucket, key:key, opts:FileHandler._scrubSecrets(opts), "error getting fileStream"
callback err, fileStream
_getConvertedFile: (bucket, key, opts, callback)->
@ -71,7 +72,7 @@ module.exports =
return callback(err)
done = (err, destPath)->
if err?
logger.err err:err, bucket:bucket, originalKey:originalKey, opts:opts, "error converting file"
logger.err err:err, bucket:bucket, originalKey:originalKey, opts:FileHandler._scrubSecrets(opts), "error converting file"
return callback(err)
LocalFileWriter.deleteFile originalFsPath, ->
callback(err, destPath, originalFsPath)
@ -100,3 +101,8 @@ module.exports =
if err?
logger.err bucket:bucket, project_id:project_id, "error getting size"
callback err, size
_scrubSecrets: (opts)->
safe = Object.assign {}, opts
delete safe.credentials
safe

View file

@ -68,8 +68,8 @@ module.exports =
callback = _.once callback
logger.log bucketName:bucketName, key:key, "getting file from s3"
s3Client = knox.createClient
key: settings.filestore.s3.key
secret: settings.filestore.s3.secret
key: opts.credentials?.auth_key || settings.filestore.s3.key
secret: opts.credentials?.auth_secret || settings.filestore.s3.secret
bucket: bucketName
s3Stream = s3Client.get(key, headers)
s3Stream.end()

View file

@ -11,7 +11,7 @@ settings =
# Choices are
# s3 - Amazon S3
# fs - local filesystem
if process.env['AWS_KEY']?
if process.env['AWS_KEY']? or process.env['S3_BUCKET_CREDENTIALS']?
backend: "s3"
s3:
key: process.env['AWS_KEY']
@ -20,6 +20,18 @@ settings =
user_files: process.env['AWS_S3_USER_FILES_BUCKET_NAME']
template_files: process.env['AWS_S3_TEMPLATE_FILES_BUCKET_NAME']
public_files: process.env['AWS_S3_PUBLIC_FILES_BUCKET_NAME']
# if you are using S3, then fill in your S3 details below,
# or use env var with the same structure.
# s3:
# key: "" # default
# secret: "" # default
#
# s3BucketCreds:
# bucketname1: # secrets for bucketname1
# auth_key: ""
# auth_secret: ""
# bucketname2: # secrets for bucketname2...
s3BucketCreds: JSON.parse process.env['S3_BUCKET_CREDENTIALS'] if process.env['S3_BUCKET_CREDENTIALS']?
else
backend: "fs"
stores:
@ -31,10 +43,9 @@ settings =
public_files: Path.resolve(__dirname + "/../public_files")
template_files: Path.resolve(__dirname + "/../template_files")
path:
uploadFolder: Path.resolve(__dirname + "/../uploads")
commands:
# Any commands to wrap the convert utility in, for example ["nice"], or ["firejail", "--profile=/etc/firejail/convert.profile"]
convertCommandPrefix: []

File diff suppressed because it is too large Load diff

View file

@ -29,8 +29,8 @@
"fs-extra": "^1.0.0",
"heapdump": "^0.3.2",
"knox": "~0.9.1",
"logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.1.0",
"metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.8.1",
"logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.7",
"node-transloadit": "0.0.4",
"node-uuid": "~1.4.1",
"pngcrush": "0.0.3",
@ -40,6 +40,7 @@
"response": "0.14.0",
"rimraf": "2.2.8",
"settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.0.0",
"stream-browserify": "^2.0.1",
"stream-buffers": "~0.2.5",
"underscore": "~1.5.2",
"mocha": "5.2.0",

View file

@ -0,0 +1,69 @@
assert = require("chai").assert
sinon = require('sinon')
chai = require('chai')
should = chai.should()
expect = chai.expect
modulePath = "../../../app/js/BucketController.js"
SandboxedModule = require('sandboxed-module')
describe "BucketController", ->
beforeEach ->
@PersistorManager =
sendStream: sinon.stub()
copyFile: sinon.stub()
deleteFile:sinon.stub()
@settings =
s3:
buckets:
user_files:"user_files"
filestore:
backend: "s3"
s3:
secret: "secret"
key: "this_key"
@FileHandler =
getFile: sinon.stub()
deleteFile: sinon.stub()
insertFile: sinon.stub()
getDirectorySize: sinon.stub()
@LocalFileWriter = {}
@controller = SandboxedModule.require modulePath, requires:
"./LocalFileWriter":@LocalFileWriter
"./FileHandler": @FileHandler
"./PersistorManager":@PersistorManager
"settings-sharelatex": @settings
"logger-sharelatex":
log:->
err:->
@project_id = "project_id"
@file_id = "file_id"
@bucket = "user_files"
@key = "#{@project_id}/#{@file_id}"
@req =
query:{}
params:
bucket: @bucket
0: @key
headers: {}
@res =
setHeader: ->
@fileStream = {}
describe "getFile", ->
it "should pipe the stream", (done)->
@FileHandler.getFile.callsArgWith(3, null, @fileStream)
@fileStream.pipe = (res)=>
res.should.equal @res
done()
@controller.getFile @req, @res
it "should send a 500 if there is a problem", (done)->
@FileHandler.getFile.callsArgWith(3, "error")
@res.send = (code)=>
code.should.equal 500
done()
@controller.getFile @req, @res

View file

@ -53,6 +53,41 @@ describe "S3PersistorManagerTests", ->
@stubbedKnoxClient.get.calledWith(@key).should.equal true
done()
it "should use default auth", (done)->
@stubbedKnoxClient.get.returns(
on:->
end:->
)
@S3PersistorManager.getFileStream @bucketName, @key, @opts, (err)=> # empty callback
clientParams =
key: @settings.filestore.s3.key
secret: @settings.filestore.s3.secret
bucket: @bucketName
@knox.createClient.calledWith(clientParams).should.equal true
done()
describe "with supplied auth", ->
beforeEach ->
@S3PersistorManager = SandboxedModule.require modulePath, requires: @requires
@credentials =
auth_key: "that_key"
auth_secret: "that_secret"
@opts =
credentials: @credentials
it "should use supplied auth", (done)->
@stubbedKnoxClient.get.returns(
on:->
end:->
)
@S3PersistorManager.getFileStream @bucketName, @key, @opts, (err)=> # empty callback
clientParams =
key: @credentials.auth_key
secret: @credentials.auth_secret
bucket: @bucketName
@knox.createClient.calledWith(clientParams).should.equal true
done()
describe "with start and end options", ->
beforeEach ->
@opts =

View file

@ -0,0 +1,19 @@
assert = require("chai").assert
sinon = require('sinon')
chai = require('chai')
should = chai.should()
expect = chai.expect
modulePath = "../../../app/js/BucketController.js"
describe "Settings", ->
describe "s3", ->
it "should use JSONified env var if present", (done)->
s3_settings =
bucket1:
auth_key: 'bucket1_key'
auth_secret: 'bucket1_secret'
process.env['S3_BUCKET_CREDENTIALS'] = JSON.stringify s3_settings
settings = require("settings-sharelatex")
expect(settings.filestore.s3BucketCreds).to.deep.equal s3_settings
done()