create mixin to allow switching of backend

This commit is contained in:
Oliver Matthews 2014-02-25 09:10:22 +00:00
parent 9963f01482
commit f68080d56c
6 changed files with 34 additions and 23 deletions

View file

@ -1,4 +1,4 @@
s3Wrapper = require("./s3Wrapper")
fsWrapper = require("./fsWrapper")
settings = require("settings-sharelatex")
logger = require("logger-sharelatex")
FileHandler = require("./FileHandler")
@ -37,9 +37,9 @@ module.exports =
oldProject_id = req.body.source.project_id
oldFile_id = req.body.source.file_id
logger.log key:key, bucket:bucket, oldProject_id:oldProject_id, oldFile_id:oldFile_id, "reciving request to copy file"
s3Wrapper.copyFile bucket, "#{oldProject_id}/#{oldFile_id}", key, (err)->
fsWrapper.copyFile bucket, "#{oldProject_id}/#{oldFile_id}", key, (err)->
if err?
logger.log err:err, oldProject_id:oldProject_id, oldFile_id:oldFile_id, "something went wrong copying file in s3Wrapper"
logger.log err:err, oldProject_id:oldProject_id, oldFile_id:oldFile_id, "something went wrong copying file"
res.send 500
else
res.send 200
@ -50,7 +50,7 @@ module.exports =
logger.log key:key, bucket:bucket, "reciving request to delete file"
FileHandler.deleteFile bucket, key, (err)->
if err?
logger.log err:err, key:key, bucket:bucket, "something went wrong deleting file in s3Wrapper"
logger.log err:err, key:key, bucket:bucket, "something went wrong deleting file"
res.send 500
else
res.send 204

View file

@ -1,5 +1,5 @@
settings = require("settings-sharelatex")
s3Wrapper = require("./s3Wrapper")
fsWrapper = require("./fsWrapper")
LocalFileWriter = require("./LocalFileWriter")
logger = require("logger-sharelatex")
FileConverter = require("./FileConverter")
@ -12,15 +12,15 @@ module.exports =
insertFile: (bucket, key, stream, callback)->
convetedKey = KeyBuilder.getConvertedFolderKey(key)
s3Wrapper.deleteDirectory bucket, convetedKey, ->
s3Wrapper.sendStreamToS3 bucket, key, stream, ->
fsWrapper.deleteDirectory bucket, convetedKey, ->
fsWrapper.sendStreamToS3 bucket, key, stream, ->
callback()
deleteFile: (bucket, key, callback)->
convetedKey = KeyBuilder.getConvertedFolderKey(bucket, key)
async.parallel [
(done)-> s3Wrapper.deleteFile bucket, key, done
(done)-> s3Wrapper.deleteFile bucket, convetedKey, done
(done)-> fsWrapper.deleteFile bucket, key, done
(done)-> fsWrapper.deleteFile bucket, convetedKey, done
], callback
getFile: (bucket, key, opts = {}, callback)->
@ -31,16 +31,16 @@ module.exports =
@_getConvertedFile bucket, key, opts, callback
_getStandardFile: (bucket, key, opts, callback)->
s3Wrapper.getFileStream bucket, key, (err, fileStream)->
fsWrapper.getFileStream bucket, key, (err, fileStream)->
if err?
logger.err bucket:bucket, key:key, opts:opts, "error getting fileStream"
callback err, fileStream
_getConvertedFile: (bucket, key, opts, callback)->
convetedKey = KeyBuilder.addCachingToKey(key, opts)
s3Wrapper.checkIfFileExists bucket, convetedKey, (err, exists)=>
fsWrapper.checkIfFileExists bucket, convetedKey, (err, exists)=>
if exists
s3Wrapper.getFileStream bucket, convetedKey, callback
fsWrapper.getFileStream bucket, convetedKey, callback
else
@_getConvertedFileAndCache bucket, key, convetedKey, opts, callback
@ -53,13 +53,13 @@ module.exports =
if err?
logger.err err:err, fsPath:fsPath, bucket:bucket, key:key, opts:opts, "something went wrong optimising png file"
return callback(err)
s3Wrapper.sendFileToS3 bucket, convetedKey, fsPath, (err)->
fsWrapper.sendFileToS3 bucket, convetedKey, fsPath, (err)->
if err?
logger.err err:err, bucket:bucket, key:key, convetedKey:convetedKey, opts:opts, "something went wrong seing file to s3"
logger.err err:err, bucket:bucket, key:key, convetedKey:convetedKey, opts:opts, "something went wrong sending the file"
return callback(err)
s3Wrapper.getFileStream bucket, convetedKey, callback
fsWrapper.getFileStream bucket, convetedKey, callback
_convertFile: (bucket, origonalKey, opts, callback)->
_convertFile: (bucket, origonalKey, opts, callback)->
@_writeS3FileToDisk bucket, origonalKey, (err, origonalFsPath)->
if opts.format?
FileConverter.convert origonalFsPath, opts.format, callback
@ -72,9 +72,6 @@ module.exports =
_writeS3FileToDisk: (bucket, key, callback)->
s3Wrapper.getFileStream bucket, key, (err, fileStream)->
fsWrapper.getFileStream bucket, key, (err, fileStream)->
LocalFileWriter.writeStream fileStream, key, callback

View file

@ -0,0 +1,9 @@
settings = require("settings-sharelatex")
wrappedFs = switch settings.filestoreWrapper
when "s3" then require("./s3Wrapper")
else null
if !wrappedFs
throw new Error( "Unknown filestore wrapper #{settings.filestoreWrapper}" )
module.exports[name] = method for name,method of wrappedFs

View file

@ -4,6 +4,11 @@ module.exports =
port: 3009
host: "localhost"
# which backend to use
# current options are:
# "s3" - Amazon S3
filestoreWrapper: "s3"
# ShareLaTeX stores binary files like images in S3.
# Fill in your Amazon S3 credentials below.
s3:

View file

@ -27,7 +27,7 @@ describe "FileController", ->
@controller = SandboxedModule.require modulePath, requires:
"./LocalFileWriter":@LocalFileWriter
"./FileHandler": @FileHandler
"./s3Wrapper":@s3Wrapper
"./fsWrapper":@s3Wrapper
"settings-sharelatex": @settings
"logger-sharelatex":
log:->

View file

@ -34,7 +34,7 @@ describe "FileHandler", ->
compressPng: sinon.stub()
@handler = SandboxedModule.require modulePath, requires:
"settings-sharelatex": @settings
"./s3Wrapper":@s3Wrapper
"./fsWrapper":@s3Wrapper
"./LocalFileWriter":@LocalFileWriter
"./FileConverter":@FileConverter
"./KeyBuilder": @keyBuilder
@ -174,4 +174,4 @@ describe "FileHandler", ->