Merge pull request #8 from cwoac/localfs

Allow multiple backends
This commit is contained in:
Henry Oswald 2014-02-25 16:37:16 +00:00
commit d41eca68a2
7 changed files with 159 additions and 53 deletions

View file

@ -1,4 +1,4 @@
s3Wrapper = require("./s3Wrapper")
fsWrapper = require("./fsWrapper")
settings = require("settings-sharelatex")
logger = require("logger-sharelatex")
FileHandler = require("./FileHandler")
@ -37,9 +37,9 @@ module.exports =
oldProject_id = req.body.source.project_id
oldFile_id = req.body.source.file_id
logger.log key:key, bucket:bucket, oldProject_id:oldProject_id, oldFile_id:oldFile_id, "reciving request to copy file"
s3Wrapper.copyFile bucket, "#{oldProject_id}/#{oldFile_id}", key, (err)->
fsWrapper.copyFile bucket, "#{oldProject_id}/#{oldFile_id}", key, (err)->
if err?
logger.log err:err, oldProject_id:oldProject_id, oldFile_id:oldFile_id, "something went wrong copying file in s3Wrapper"
logger.log err:err, oldProject_id:oldProject_id, oldFile_id:oldFile_id, "something went wrong copying file"
res.send 500
else
res.send 200
@ -50,7 +50,7 @@ module.exports =
logger.log key:key, bucket:bucket, "reciving request to delete file"
FileHandler.deleteFile bucket, key, (err)->
if err?
logger.log err:err, key:key, bucket:bucket, "something went wrong deleting file in s3Wrapper"
logger.log err:err, key:key, bucket:bucket, "something went wrong deleting file"
res.send 500
else
res.send 204

View file

@ -1,5 +1,5 @@
settings = require("settings-sharelatex")
s3Wrapper = require("./s3Wrapper")
fsWrapper = require("./fsWrapper")
LocalFileWriter = require("./LocalFileWriter")
logger = require("logger-sharelatex")
FileConverter = require("./FileConverter")
@ -12,15 +12,15 @@ module.exports =
insertFile: (bucket, key, stream, callback)->
convetedKey = KeyBuilder.getConvertedFolderKey(key)
s3Wrapper.deleteDirectory bucket, convetedKey, ->
s3Wrapper.sendStreamToS3 bucket, key, stream, ->
fsWrapper.deleteDirectory bucket, convetedKey, ->
fsWrapper.sendStreamToS3 bucket, key, stream, ->
callback()
deleteFile: (bucket, key, callback)->
convetedKey = KeyBuilder.getConvertedFolderKey(bucket, key)
async.parallel [
(done)-> s3Wrapper.deleteFile bucket, key, done
(done)-> s3Wrapper.deleteFile bucket, convetedKey, done
(done)-> fsWrapper.deleteFile bucket, key, done
(done)-> fsWrapper.deleteFile bucket, convetedKey, done
], callback
getFile: (bucket, key, opts = {}, callback)->
@ -31,16 +31,16 @@ module.exports =
@_getConvertedFile bucket, key, opts, callback
_getStandardFile: (bucket, key, opts, callback)->
s3Wrapper.getFileStream bucket, key, (err, fileStream)->
fsWrapper.getFileStream bucket, key, (err, fileStream)->
if err?
logger.err bucket:bucket, key:key, opts:opts, "error getting fileStream"
callback err, fileStream
_getConvertedFile: (bucket, key, opts, callback)->
convetedKey = KeyBuilder.addCachingToKey(key, opts)
s3Wrapper.checkIfFileExists bucket, convetedKey, (err, exists)=>
fsWrapper.checkIfFileExists bucket, convetedKey, (err, exists)=>
if exists
s3Wrapper.getFileStream bucket, convetedKey, callback
fsWrapper.getFileStream bucket, convetedKey, callback
else
@_getConvertedFileAndCache bucket, key, convetedKey, opts, callback
@ -53,13 +53,13 @@ module.exports =
if err?
logger.err err:err, fsPath:fsPath, bucket:bucket, key:key, opts:opts, "something went wrong optimising png file"
return callback(err)
s3Wrapper.sendFileToS3 bucket, convetedKey, fsPath, (err)->
fsWrapper.sendFileToS3 bucket, convetedKey, fsPath, (err)->
if err?
logger.err err:err, bucket:bucket, key:key, convetedKey:convetedKey, opts:opts, "something went wrong seing file to s3"
logger.err err:err, bucket:bucket, key:key, convetedKey:convetedKey, opts:opts, "something went wrong sending the file"
return callback(err)
s3Wrapper.getFileStream bucket, convetedKey, callback
fsWrapper.getFileStream bucket, convetedKey, callback
_convertFile: (bucket, origonalKey, opts, callback)->
_convertFile: (bucket, origonalKey, opts, callback)->
@_writeS3FileToDisk bucket, origonalKey, (err, origonalFsPath)->
if opts.format?
FileConverter.convert origonalFsPath, opts.format, callback
@ -72,9 +72,6 @@ module.exports =
_writeS3FileToDisk: (bucket, key, callback)->
s3Wrapper.getFileStream bucket, key, (err, fileStream)->
fsWrapper.getFileStream bucket, key, (err, fileStream)->
LocalFileWriter.writeStream fileStream, key, callback

View file

@ -0,0 +1,10 @@
settings = require("settings-sharelatex")
logger = require("logger-sharelatex")
s3Wrapper = require("./s3Wrapper")
logger.log backend:settings.filestoreBackend,"Loading backend"
module.exports = switch settings.filestoreBackend
when "s3"
s3Wrapper
else
throw new Error( "Unknown filestore backend: #{settings.filestoreBackend}" )

View file

@ -4,6 +4,11 @@ module.exports =
port: 3009
host: "localhost"
# which backend to use
# current options are:
# "s3" - Amazon S3
filestoreBackend: "s3"
# ShareLaTeX stores binary files like images in S3.
# Fill in your Amazon S3 credentials below.
s3:

View file

@ -9,9 +9,8 @@ SandboxedModule = require('sandboxed-module')
describe "FileController", ->
beforeEach ->
@s3Wrapper =
@FsWrapper =
sendStreamToS3: sinon.stub()
getAndPipe: sinon.stub()
copyFile: sinon.stub()
deleteFile:sinon.stub()
@ -27,7 +26,7 @@ describe "FileController", ->
@controller = SandboxedModule.require modulePath, requires:
"./LocalFileWriter":@LocalFileWriter
"./FileHandler": @FileHandler
"./s3Wrapper":@s3Wrapper
"./fsWrapper":@FsWrapper
"settings-sharelatex": @settings
"logger-sharelatex":
log:->
@ -36,14 +35,14 @@ describe "FileController", ->
@file_id = "file_id"
@bucket = "user_files"
@key = "#{@project_id}/#{@file_id}"
@req =
@req =
key:@key
bucket:@bucket
query:{}
params:
params:
project_id:@project_id
file_id:@file_id
@res =
@res =
setHeader: ->
@fileStream = {}
@ -74,7 +73,7 @@ describe "FileController", ->
describe "insertFile", ->
it "should send bucket name key and res to s3Wrapper", (done)->
it "should send bucket name key and res to FsWrapper", (done)->
@FileHandler.insertFile.callsArgWith(3)
@res.send = =>
@FileHandler.insertFile.calledWith(@bucket, @key, @req).should.equal true
@ -91,17 +90,17 @@ describe "FileController", ->
project_id: @oldProject_id
file_id: @oldFile_id
it "should send bucket name and both keys to s3Wrapper", (done)->
@s3Wrapper.copyFile.callsArgWith(3)
it "should send bucket name and both keys to FsWrapper", (done)->
@FsWrapper.copyFile.callsArgWith(3)
@res.send = (code)=>
code.should.equal 200
@s3Wrapper.copyFile.calledWith(@bucket, "#{@oldProject_id}/#{@oldFile_id}", @key).should.equal true
@FsWrapper.copyFile.calledWith(@bucket, "#{@oldProject_id}/#{@oldFile_id}", @key).should.equal true
done()
@controller.copyFile @req, @res
it "should send a 500 if there was an error", (done)->
@s3Wrapper.copyFile.callsArgWith(3, "error")
@FsWrapper.copyFile.callsArgWith(3, "error")
@res.send = (code)=>
code.should.equal 500
done()

View file

@ -1,4 +1,3 @@
assert = require("chai").assert
sinon = require('sinon')
chai = require('chai')
@ -14,7 +13,7 @@ describe "FileHandler", ->
s3:
buckets:
user_files:"user_files"
@s3Wrapper =
@FsWrapper =
getFileStream: sinon.stub()
checkIfFileExists: sinon.stub()
deleteFile: sinon.stub()
@ -27,14 +26,14 @@ describe "FileHandler", ->
convert: sinon.stub()
thumbnail: sinon.stub()
preview: sinon.stub()
@keyBuilder =
@keyBuilder =
addCachingToKey: sinon.stub()
getConvertedFolderKey: sinon.stub()
@ImageOptimiser =
compressPng: sinon.stub()
@handler = SandboxedModule.require modulePath, requires:
"settings-sharelatex": @settings
"./s3Wrapper":@s3Wrapper
"./fsWrapper":@FsWrapper
"./LocalFileWriter":@LocalFileWriter
"./FileConverter":@FileConverter
"./KeyBuilder": @keyBuilder
@ -51,33 +50,33 @@ describe "FileHandler", ->
describe "insertFile", ->
beforeEach ->
@stream = {}
@s3Wrapper.deleteDirectory.callsArgWith(2)
@s3Wrapper.sendStreamToS3.callsArgWith(3)
@FsWrapper.deleteDirectory.callsArgWith(2)
@FsWrapper.sendStreamToS3.callsArgWith(3)
it "should send file to s3", (done)->
@handler.insertFile @bucket, @key, @stream, =>
@s3Wrapper.sendStreamToS3.calledWith(@bucket, @key, @stream).should.equal true
@FsWrapper.sendStreamToS3.calledWith(@bucket, @key, @stream).should.equal true
done()
it "should delete the convetedKey folder", (done)->
@keyBuilder.getConvertedFolderKey.returns(@stubbedConvetedKey)
@handler.insertFile @bucket, @key, @stream, =>
@s3Wrapper.deleteDirectory.calledWith(@bucket, @stubbedConvetedKey).should.equal true
@FsWrapper.deleteDirectory.calledWith(@bucket, @stubbedConvetedKey).should.equal true
done()
describe "deleteFile", ->
beforeEach ->
@keyBuilder.getConvertedFolderKey.returns(@stubbedConvetedKey)
@s3Wrapper.deleteFile.callsArgWith(2)
@FsWrapper.deleteFile.callsArgWith(2)
it "should tell the s3 wrapper to delete the file", (done)->
@handler.deleteFile @bucket, @key, =>
@s3Wrapper.deleteFile.calledWith(@bucket, @key).should.equal true
@FsWrapper.deleteFile.calledWith(@bucket, @key).should.equal true
done()
it "should tell the s3 wrapper to delete the cached foler", (done)->
@handler.deleteFile @bucket, @key, =>
@s3Wrapper.deleteFile.calledWith(@bucket, @stubbedConvetedKey).should.equal true
@FsWrapper.deleteFile.calledWith(@bucket, @stubbedConvetedKey).should.equal true
done()
describe "getFile", ->
@ -103,11 +102,11 @@ describe "FileHandler", ->
beforeEach ->
@fileStream = {on:->}
@s3Wrapper.getFileStream.callsArgWith(2, "err", @fileStream)
@FsWrapper.getFileStream.callsArgWith(2, "err", @fileStream)
it "should get the stream from s3 ", (done)->
@handler.getFile @bucket, @key, null, =>
@s3Wrapper.getFileStream.calledWith(@bucket, @key).should.equal true
@FsWrapper.getFileStream.calledWith(@bucket, @key).should.equal true
done()
it "should return the stream and error", (done)->
@ -119,14 +118,14 @@ describe "FileHandler", ->
describe "_getConvertedFile", ->
it "should getFileStream if it does exists", (done)->
@s3Wrapper.checkIfFileExists.callsArgWith(2, null, true)
@s3Wrapper.getFileStream.callsArgWith(2)
@FsWrapper.checkIfFileExists.callsArgWith(2, null, true)
@FsWrapper.getFileStream.callsArgWith(2)
@handler._getConvertedFile @bucket, @key, {}, =>
@s3Wrapper.getFileStream.calledWith(@bucket).should.equal true
@FsWrapper.getFileStream.calledWith(@bucket).should.equal true
done()
it "should call _getConvertedFileAndCache if it does exists", (done)->
@s3Wrapper.checkIfFileExists.callsArgWith(2, null, false)
@FsWrapper.checkIfFileExists.callsArgWith(2, null, false)
@handler._getConvertedFileAndCache = sinon.stub().callsArgWith(4)
@handler._getConvertedFile @bucket, @key, {}, =>
@handler._getConvertedFileAndCache.calledWith(@bucket, @key).should.equal true
@ -135,15 +134,15 @@ describe "FileHandler", ->
describe "_getConvertedFileAndCache", ->
it "should _convertFile ", (done)->
@s3Wrapper.sendFileToS3 = sinon.stub().callsArgWith(3)
@s3Wrapper.getFileStream = sinon.stub().callsArgWith(2)
@FsWrapper.sendFileToS3 = sinon.stub().callsArgWith(3)
@FsWrapper.getFileStream = sinon.stub().callsArgWith(2)
@convetedKey = @key+"converted"
@handler._convertFile = sinon.stub().callsArgWith(3, null, @stubbedPath)
@ImageOptimiser.compressPng = sinon.stub().callsArgWith(1)
@handler._getConvertedFileAndCache @bucket, @key, @convetedKey, {}, =>
@handler._convertFile.called.should.equal true
@s3Wrapper.sendFileToS3.calledWith(@bucket, @convetedKey, @stubbedPath).should.equal true
@s3Wrapper.getFileStream.calledWith(@bucket, @convetedKey).should.equal true
@FsWrapper.sendFileToS3.calledWith(@bucket, @convetedKey, @stubbedPath).should.equal true
@FsWrapper.getFileStream.calledWith(@bucket, @convetedKey).should.equal true
@ImageOptimiser.compressPng.calledWith(@stubbedPath).should.equal true
done()
@ -174,4 +173,4 @@ describe "FileHandler", ->

View file

@ -0,0 +1,96 @@
logger = require("logger-sharelatex")
assert = require("chai").assert
sinon = require('sinon')
chai = require('chai')
should = chai.should()
expect = chai.expect
modulePath = "../../../app/js/fsWrapper.js"
SandboxedModule = require('sandboxed-module')
describe "fsWrapperTests", ->
beforeEach ->
@s3Wrapper =
getFileStream: sinon.stub()
checkIfFileExists: sinon.stub()
deleteFile: sinon.stub()
deleteDirectory: sinon.stub()
sendStreamToS3: sinon.stub()
insertFile: sinon.stub()
describe "test s3 mixin", ->
beforeEach ->
@settings =
filestoreBackend: "s3"
@requires =
"./s3Wrapper": @s3Wrapper
"settings-sharelatex": @settings
"logger-sharelatex":
log:->
err:->
@fsWrapper = SandboxedModule.require modulePath, requires: @requires
it "should load getFileStream", (done) ->
@fsWrapper.should.respondTo("getFileStream")
@fsWrapper.getFileStream()
@s3Wrapper.getFileStream.calledOnce.should.equal true
done()
it "should load checkIfFileExists", (done) ->
@fsWrapper.checkIfFileExists()
@s3Wrapper.checkIfFileExists.calledOnce.should.equal true
done()
it "should load deleteFile", (done) ->
@fsWrapper.deleteFile()
@s3Wrapper.deleteFile.calledOnce.should.equal true
done()
it "should load deleteDirectory", (done) ->
@fsWrapper.deleteDirectory()
@s3Wrapper.deleteDirectory.calledOnce.should.equal true
done()
it "should load sendStreamToS3", (done) ->
@fsWrapper.sendStreamToS3()
@s3Wrapper.sendStreamToS3.calledOnce.should.equal true
done()
it "should load insertFile", (done) ->
@fsWrapper.insertFile()
@s3Wrapper.insertFile.calledOnce.should.equal true
done()
describe "test invalid mixins", ->
it "should not load a null wrapper", (done) ->
@settings =
@requires =
"./s3Wrapper": @s3Wrapper
"settings-sharelatex": @settings
"logger-sharelatex":
log:->
err:->
try
@fsWrapper=SandboxedModule.require modulePath, requires: @requires
catch error
assert.equal("Unknown filestore backend: null",error.message)
done()
it "should not load an invalid wrapper", (done) ->
@settings =
filestoreBackend:"magic"
@requires =
"./s3Wrapper": @s3Wrapper
"settings-sharelatex": @settings
"logger-sharelatex":
log:->
err:->
try
@fsWrapper=SandboxedModule.require modulePath, requires: @requires
catch error
assert.equal("Unknown filestore backend: magic",error.message)
done()