Merge pull request #11 from cwoac/FSPersistorManager

Fs persistor manager - merge from cwoac
This commit is contained in:
Henry Oswald 2014-03-05 10:39:26 +00:00
commit 1e1c14e5fe
8 changed files with 288 additions and 41 deletions

View file

@ -0,0 +1,70 @@
logger = require("logger-sharelatex")
fs = require("fs")
LocalFileWriter = require("./LocalFileWriter")
filterName = (key) ->
return key.replace /\//, "_"
module.exports =
sendFile: ( location, target, source, callback = (err)->) ->
filteredTarget = filterName target
logger.log location:location, target:filteredTarget, source:source, "sending file"
fs.rename source, "#{location}/#{filteredTarget}", (err) ->
if err!=null
logger.err err:err, location:location, target:filteredTarget, source:source, "Error on put of file"
callback err
sendStream: ( location, target, sourceStream, callback = (err)->) ->
logger.log location:location, target:target, "sending file stream"
sourceStream.on "error", (err)->
logger.err location:location, target:target, err:err "error on stream to send"
LocalFileWriter.writeStream sourceStream, null, (err, fsPath)=>
if err?
logger.err location:location, target:target, fsPath:fsPath, err:err, "something went wrong writing stream to disk"
return callback err
@sendFile location, target, fsPath, callback
getFileStream: (location, name, callback = (err, res)->)->
filteredName = filterName name
logger.log location:location, name:filteredName, "getting file"
sourceStream = fs.createReadStream "#{location}/#{filteredName}"
sourceStream.on 'error', (err) ->
logger.err err:err, location:location, name:name, "Error reading from file"
callback err
callback null,sourceStream
copyFile: (location, fromName, toName, callback = (err)->)->
filteredFromName=filterName fromName
filteredToName=filterName toName
logger.log location:location, fromName:filteredFromName, toName:filteredToName, "copying file"
sourceStream = fs.createReadStream "#{location}/#{filteredFromName}"
sourceStream.on 'error', (err) ->
logger.err err:err, location:location, key:filteredFromName, "Error reading from file"
callback err
targetStream = fs.createWriteStream "#{location}/#{filteredToName}"
targetStream.on 'error', (err) ->
logger.err err:err, location:location, key:filteredToName, "Error writing to file"
callback err
sourceStream.pipe targetStream
deleteFile: (location, name, callback)->
filteredName = filterName name
logger.log location:location, name:filteredName, "delete file"
fs.unlink "#{location}/#{filteredName}", (err) ->
logger.err err:err, location:location, name:filteredName, "Error on delete."
callback err
deleteDirectory: (location, name, callback = (err)->)->
filteredName = filterName name
fs.rmdir "#{location}/#{filteredName}", (err) ->
logger.err err:err, location:location, name:filteredName, "Error on rmdir."
callback err
checkIfFileExists:(location, name, callback = (err,exists)->)->
filteredName = filterName name
logger.log location:location, name:filteredName, "checking if file exists"
fs.exists "#{location}/#{filteredName}", (exists) ->
logger.log location:location, name:filteredName, exists:exists, "checked if file exists"
callback null, exists

View file

@ -20,13 +20,13 @@ module.exports =
userFileKey: (req, res, next)-> userFileKey: (req, res, next)->
{project_id, file_id} = req.params {project_id, file_id} = req.params
req.key = "#{project_id}/#{file_id}" req.key = "#{project_id}/#{file_id}"
req.bucket = settings.s3.buckets.user_files req.bucket = settings.filestore.stores.user_files
next() next()
templateFileKey: (req, res, next)-> templateFileKey: (req, res, next)->
{template_id, format, version} = req.params {template_id, format, version} = req.params
req.key = "#{template_id}/#{version}/#{format}" req.key = "#{template_id}/#{version}/#{format}"
req.bucket = settings.s3.buckets.template_files req.bucket = settings.filestore.stores.template_files
req.version = version req.version = version
opts = req.query opts = req.query
next() next()

View file

@ -1,14 +1,15 @@
settings = require("settings-sharelatex") settings = require("settings-sharelatex")
logger = require("logger-sharelatex") logger = require("logger-sharelatex")
S3PersistorManager = require("./S3PersistorManager")
# assume s3 if none specified # assume s3 if none specified
settings.filestoreBackend ||= "s3" settings.filestore.backend ||= "s3"
logger.log backend:settings.filestoreBackend, "Loading backend" logger.log backend:settings.filestore.backend, "Loading backend"
module.exports = switch settings.filestoreBackend module.exports = switch settings.filestore.backend
when "s3" when "s3"
S3PersistorManager require("./S3PersistorManager")
when "fs"
require("./FSPersistorManager")
else else
throw new Error( "Unknown filestore backend: #{settings.filestoreBackend}" ) throw new Error( "Unknown filestore backend: #{settings.filestore.backend}" )

View file

@ -24,8 +24,8 @@ printSockets()
buildDefaultOptions = (bucketName, method, key)-> buildDefaultOptions = (bucketName, method, key)->
return { return {
aws: aws:
key: settings.s3.key key: settings.filestore.s3.key
secret: settings.s3.secret secret: settings.filestore.s3.secret
bucket: bucketName bucket: bucketName
method: method method: method
timeout: thirtySeconds timeout: thirtySeconds
@ -36,8 +36,8 @@ module.exports =
sendFile: (bucketName, key, fsPath, callback)-> sendFile: (bucketName, key, fsPath, callback)->
s3Client = knox.createClient s3Client = knox.createClient
key: settings.s3.key key: settings.filestore.s3.key
secret: settings.s3.secret secret: settings.filestore.s3.secret
bucket: bucketName bucket: bucketName
putEventEmiter = s3Client.putFile fsPath, key, (err, res)-> putEventEmiter = s3Client.putFile fsPath, key, (err, res)->
if err? if err?
@ -70,8 +70,8 @@ module.exports =
getFileStream: (bucketName, key, callback = (err, res)->)-> getFileStream: (bucketName, key, callback = (err, res)->)->
logger.log bucketName:bucketName, key:key, "getting file from s3" logger.log bucketName:bucketName, key:key, "getting file from s3"
s3Client = knox.createClient s3Client = knox.createClient
key: settings.s3.key key: settings.filestore.s3.key
secret: settings.s3.secret secret: settings.filestore.s3.secret
bucket: bucketName bucket: bucketName
s3Stream = s3Client.get(key) s3Stream = s3Client.get(key)
s3Stream.end() s3Stream.end()
@ -84,8 +84,8 @@ module.exports =
copyFile: (bucketName, sourceKey, destKey, callback)-> copyFile: (bucketName, sourceKey, destKey, callback)->
logger.log bucketName:bucketName, sourceKey:sourceKey, destKey:destKey, "copying file in s3" logger.log bucketName:bucketName, sourceKey:sourceKey, destKey:destKey, "copying file in s3"
s3Client = knox.createClient s3Client = knox.createClient
key: settings.s3.key key: settings.filestore.s3.key
secret: settings.s3.secret secret: settings.filestore.s3.secret
bucket: bucketName bucket: bucketName
s3Client.copyFile sourceKey, destKey, (err)-> s3Client.copyFile sourceKey, destKey, (err)->
if err? if err?
@ -102,8 +102,8 @@ module.exports =
deleteDirectory: (bucketName, key, callback)-> deleteDirectory: (bucketName, key, callback)->
s3Client = knox.createClient s3Client = knox.createClient
key: settings.s3.key key: settings.filestore.s3.key
secret: settings.s3.secret secret: settings.filestore.s3.secret
bucket: bucketName bucket: bucketName
s3Client.list prefix:key, (err, data)-> s3Client.list prefix:key, (err, data)->
keys = _.map data.Contents, (entry)-> keys = _.map data.Contents, (entry)->

View file

@ -4,11 +4,26 @@ module.exports =
port: 3009 port: 3009
host: "localhost" host: "localhost"
# which persistor to use for file storage filestore:
# current options are: # which backend persistor to use.
# "s3" - Amazon S3 # choices are
# if no persistor is chosen, s3 will be used by default # s3 - Amazon S3
filestoreBackend: "s3" # fs - local filesystem
backend: "s3"
stores:
# where to store user and template binary files
#
# For Amazon S3 this is the bucket name to store binary files in
# Must contain full url like: <bucketname>.s3.amazonaws.com
#
# For local filesystem this is the directory to store the files in.
# Must contain full path, e.g. "/var/lib/sharelatex/data"
# This path must exist, not be tmpfs and be writable to by the user sharelatex is run as.
user_files: ""
s3:
# if you are using S3, then fill in your S3 details below
key: ""
secret: ""
# ShareLaTeX stores binary files like images in S3. # ShareLaTeX stores binary files like images in S3.
# Fill in your Amazon S3 credentials below. # Fill in your Amazon S3 credentials below.

View file

@ -0,0 +1,157 @@
assert = require("chai").assert
sinon = require('sinon')
chai = require('chai')
should = chai.should
expect = chai.expect
modulePath = "../../../app/js/FSPersistorManager.js"
SandboxedModule = require('sandboxed-module')
fs = require("fs")
describe "FSPersistorManagerTests", ->
beforeEach ->
@Fs =
rename:sinon.stub()
createReadStream:sinon.stub()
createWriteStream:sinon.stub()
unlink:sinon.stub()
rmdir:sinon.stub()
exists:sinon.stub()
@LocalFileWriter =
writeStream: sinon.stub()
@requires =
"./LocalFileWriter":@LocalFileWriter
"fs":@Fs
"logger-sharelatex":
log:->
err:->
@location = "/tmp"
@name1 = "530f2407e7ef165704000007/530f838b46d9a9e859000008"
@name1Filtered ="530f2407e7ef165704000007_530f838b46d9a9e859000008"
@name2 = "second_file"
@error = "error_message"
@FSPersistorManager = SandboxedModule.require modulePath, requires: @requires
describe "sendFile", ->
it "should put the file", (done) ->
@Fs.rename.callsArgWith(2,@error)
@FSPersistorManager.sendFile @location, @name1, @name2, (err)=>
@Fs.rename.calledWith( @name2, "#{@location}/#{@name1Filtered}" ).should.equal true
err.should.equal @error
done()
describe "sendStream", ->
beforeEach ->
@FSPersistorManager.sendFile = sinon.stub().callsArgWith(3)
@LocalFileWriter.writeStream.callsArgWith(2, null, @name1)
@SourceStream =
on:->
it "should sent stream to LocalFileWriter", (done)->
@FSPersistorManager.sendStream @location, @name1, @SourceStream, =>
@LocalFileWriter.writeStream.calledWith(@SourceStream).should.equal true
done()
it "should return the error from LocalFileWriter", (done)->
@LocalFileWriter.writeStream.callsArgWith(2, @error)
@FSPersistorManager.sendStream @location, @name1, @SourceStream, (err)=>
err.should.equal @error
done()
it "should send the file to the filestore", (done)->
@LocalFileWriter.writeStream.callsArgWith(2)
@FSPersistorManager.sendStream @location, @name1, @SourceStream, (err)=>
@FSPersistorManager.sendFile.called.should.equal true
done()
describe "getFileStream", ->
it "should use correct file location", (done) ->
@Fs.createReadStream.returns(
on:->
)
@FSPersistorManager.getFileStream @location, @name1, (err,res)=>
@Fs.createReadStream.calledWith("#{@location}/#{@name1Filtered}").should.equal.true
done()
describe "copyFile", ->
beforeEach ->
@ReadStream=
on:->
pipe:sinon.stub()
@WriteStream=
on:->
@Fs.createReadStream.returns(@ReadStream)
@Fs.createWriteStream.returns(@WriteStream)
it "Should open the source for reading", (done) ->
@FSPersistorManager.copyFile @location, @name1, @name2, ->
@Fs.createReadStream.calledWith("#{@location}/#{@name1}").should.equal.true
done()
it "Should open the target for writing", (done) ->
@FSPersistorManager.copyFile @location, @name1, @name2, ->
@Fs.createWriteStream.calledWith("#{@location}/#{@name2}").should.equal.true
done()
it "Should pipe the source to the target", (done) ->
@FSPersistorManager.copyFile @location, @name1, @name2, ->
@ReadStream.pipe.calledWith(@WriteStream).should.equal.true
done()
describe "deleteFile", ->
beforeEach ->
@Fs.unlink.callsArgWith(1,@error)
it "Should call unlink with correct options", (done) ->
@FSPersistorManager.deleteFile @location, @name1, (err) =>
@Fs.unlink.calledWith("#{@location}/#{@name1}").should.equal.true
done()
it "Should propogate the error", (done) ->
@FSPersistorManager.deleteFile @location, @name1, (err) =>
err.should.equal @error
done()
describe "deleteDirectory", ->
beforeEach ->
@Fs.rmdir.callsArgWith(1,@error)
it "Should call rmdir with correct options", (done) ->
@FSPersistorManager.deleteDirectory @location, @name1, (err) =>
@Fs.rmdir.calledWith("#{@location}/#{@name1}").should.equal.true
done()
it "Should propogate the error", (done) ->
@FSPersistorManager.deleteDirectory @location, @name1, (err) =>
err.should.equal @error
done()
describe "checkIfFileExists", ->
beforeEach ->
@Fs.exists.callsArgWith(1,true)
it "Should call exists with correct options", (done) ->
@FSPersistorManager.checkIfFileExists @location, @name1, (exists) =>
@Fs.exists.calledWith("#{@location}/#{@name1}").should.equal.true
done()
# fs.exists simply returns false on any error, so...
it "should not return an error", (done) ->
@FSPersistorManager.checkIfFileExists @location, @name1, (err,exists) =>
expect(err).to.be.null
done()
it "Should return true for existing files", (done) ->
@Fs.exists.callsArgWith(1,true)
@FSPersistorManager.checkIfFileExists @location, @name1, (err,exists) =>
exists.should.be.true
done()
it "Should return false for non-existing files", (done) ->
@Fs.exists.callsArgWith(1,false)
@FSPersistorManager.checkIfFileExists @location, @name1, (err,exists) =>
exists.should.be.false
done()

View file

@ -22,7 +22,8 @@ describe "PersistorManagerTests", ->
describe "test s3 mixin", -> describe "test s3 mixin", ->
beforeEach -> beforeEach ->
@settings = @settings =
filestoreBackend: "s3" filestore:
backend: "s3"
@requires = @requires =
"./S3PersistorManager": @S3PersistorManager "./S3PersistorManager": @S3PersistorManager
"settings-sharelatex": @settings "settings-sharelatex": @settings
@ -81,7 +82,8 @@ describe "PersistorManagerTests", ->
describe "test invalid mixins", -> describe "test invalid mixins", ->
it "should not load an invalid wrapper", (done) -> it "should not load an invalid wrapper", (done) ->
@settings = @settings =
filestoreBackend:"magic" filestore:
backend:"magic"
@requires = @requires =
"./S3PersistorManager": @S3PersistorManager "./S3PersistorManager": @S3PersistorManager
"settings-sharelatex": @settings "settings-sharelatex": @settings

View file

@ -10,10 +10,12 @@ describe "S3PersistorManagerTests", ->
beforeEach -> beforeEach ->
@settings = @settings =
filestore:
backend: "s3"
s3: s3:
secret: "secret" secret: "secret"
key: "this_key" key: "this_key"
buckets: stores:
user_files:"sl_user_files" user_files:"sl_user_files"
@stubbedKnoxClient = @stubbedKnoxClient =
putFile:sinon.stub() putFile:sinon.stub()
@ -138,7 +140,7 @@ describe "S3PersistorManagerTests", ->
@S3PersistorManager.deleteFile @bucketName, @key, (err)=> @S3PersistorManager.deleteFile @bucketName, @key, (err)=>
opts = @request.args[0][0] opts = @request.args[0][0]
assert.deepEqual(opts.aws, {key:@settings.s3.key, secret:@settings.s3.secret, bucket:@bucketName}) assert.deepEqual(opts.aws, {key:@settings.filestore.s3.key, secret:@settings.filestore.s3.secret, bucket:@bucketName})
opts.method.should.equal "delete" opts.method.should.equal "delete"
opts.timeout.should.equal (30*1000) opts.timeout.should.equal (30*1000)
opts.uri.should.equal "https://#{@bucketName}.s3.amazonaws.com/#{@key}" opts.uri.should.equal "https://#{@bucketName}.s3.amazonaws.com/#{@key}"
@ -162,7 +164,7 @@ describe "S3PersistorManagerTests", ->
@S3PersistorManager.checkIfFileExists @bucketName, @key, (err)=> @S3PersistorManager.checkIfFileExists @bucketName, @key, (err)=>
opts = @request.args[0][0] opts = @request.args[0][0]
assert.deepEqual(opts.aws, {key:@settings.s3.key, secret:@settings.s3.secret, bucket:@bucketName}) assert.deepEqual(opts.aws, {key:@settings.filestore.s3.key, secret:@settings.filestore.s3.secret, bucket:@bucketName})
opts.method.should.equal "head" opts.method.should.equal "head"
opts.timeout.should.equal (30*1000) opts.timeout.should.equal (30*1000)
opts.uri.should.equal "https://#{@bucketName}.s3.amazonaws.com/#{@key}" opts.uri.should.equal "https://#{@bucketName}.s3.amazonaws.com/#{@key}"