mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
Merge pull request #11 from cwoac/FSPersistorManager
Fs persistor manager - merge from cwoac
This commit is contained in:
commit
1e1c14e5fe
8 changed files with 288 additions and 41 deletions
70
services/filestore/app/coffee/FSPersistorManager.coffee
Normal file
70
services/filestore/app/coffee/FSPersistorManager.coffee
Normal file
|
@ -0,0 +1,70 @@
|
|||
logger = require("logger-sharelatex")
|
||||
fs = require("fs")
|
||||
LocalFileWriter = require("./LocalFileWriter")
|
||||
|
||||
filterName = (key) ->
|
||||
return key.replace /\//, "_"
|
||||
|
||||
|
||||
module.exports =
|
||||
sendFile: ( location, target, source, callback = (err)->) ->
|
||||
filteredTarget = filterName target
|
||||
logger.log location:location, target:filteredTarget, source:source, "sending file"
|
||||
fs.rename source, "#{location}/#{filteredTarget}", (err) ->
|
||||
if err!=null
|
||||
logger.err err:err, location:location, target:filteredTarget, source:source, "Error on put of file"
|
||||
callback err
|
||||
|
||||
sendStream: ( location, target, sourceStream, callback = (err)->) ->
|
||||
logger.log location:location, target:target, "sending file stream"
|
||||
sourceStream.on "error", (err)->
|
||||
logger.err location:location, target:target, err:err "error on stream to send"
|
||||
LocalFileWriter.writeStream sourceStream, null, (err, fsPath)=>
|
||||
if err?
|
||||
logger.err location:location, target:target, fsPath:fsPath, err:err, "something went wrong writing stream to disk"
|
||||
return callback err
|
||||
@sendFile location, target, fsPath, callback
|
||||
|
||||
getFileStream: (location, name, callback = (err, res)->)->
|
||||
filteredName = filterName name
|
||||
logger.log location:location, name:filteredName, "getting file"
|
||||
sourceStream = fs.createReadStream "#{location}/#{filteredName}"
|
||||
sourceStream.on 'error', (err) ->
|
||||
logger.err err:err, location:location, name:name, "Error reading from file"
|
||||
callback err
|
||||
callback null,sourceStream
|
||||
|
||||
|
||||
copyFile: (location, fromName, toName, callback = (err)->)->
|
||||
filteredFromName=filterName fromName
|
||||
filteredToName=filterName toName
|
||||
logger.log location:location, fromName:filteredFromName, toName:filteredToName, "copying file"
|
||||
sourceStream = fs.createReadStream "#{location}/#{filteredFromName}"
|
||||
sourceStream.on 'error', (err) ->
|
||||
logger.err err:err, location:location, key:filteredFromName, "Error reading from file"
|
||||
callback err
|
||||
targetStream = fs.createWriteStream "#{location}/#{filteredToName}"
|
||||
targetStream.on 'error', (err) ->
|
||||
logger.err err:err, location:location, key:filteredToName, "Error writing to file"
|
||||
callback err
|
||||
sourceStream.pipe targetStream
|
||||
|
||||
deleteFile: (location, name, callback)->
|
||||
filteredName = filterName name
|
||||
logger.log location:location, name:filteredName, "delete file"
|
||||
fs.unlink "#{location}/#{filteredName}", (err) ->
|
||||
logger.err err:err, location:location, name:filteredName, "Error on delete."
|
||||
callback err
|
||||
|
||||
deleteDirectory: (location, name, callback = (err)->)->
|
||||
filteredName = filterName name
|
||||
fs.rmdir "#{location}/#{filteredName}", (err) ->
|
||||
logger.err err:err, location:location, name:filteredName, "Error on rmdir."
|
||||
callback err
|
||||
|
||||
checkIfFileExists:(location, name, callback = (err,exists)->)->
|
||||
filteredName = filterName name
|
||||
logger.log location:location, name:filteredName, "checking if file exists"
|
||||
fs.exists "#{location}/#{filteredName}", (exists) ->
|
||||
logger.log location:location, name:filteredName, exists:exists, "checked if file exists"
|
||||
callback null, exists
|
|
@ -20,13 +20,13 @@ module.exports =
|
|||
userFileKey: (req, res, next)->
|
||||
{project_id, file_id} = req.params
|
||||
req.key = "#{project_id}/#{file_id}"
|
||||
req.bucket = settings.s3.buckets.user_files
|
||||
req.bucket = settings.filestore.stores.user_files
|
||||
next()
|
||||
|
||||
templateFileKey: (req, res, next)->
|
||||
{template_id, format, version} = req.params
|
||||
req.key = "#{template_id}/#{version}/#{format}"
|
||||
req.bucket = settings.s3.buckets.template_files
|
||||
req.bucket = settings.filestore.stores.template_files
|
||||
req.version = version
|
||||
opts = req.query
|
||||
next()
|
||||
|
|
|
@ -1,14 +1,15 @@
|
|||
settings = require("settings-sharelatex")
|
||||
logger = require("logger-sharelatex")
|
||||
S3PersistorManager = require("./S3PersistorManager")
|
||||
|
||||
# assume s3 if none specified
|
||||
settings.filestoreBackend ||= "s3"
|
||||
settings.filestore.backend ||= "s3"
|
||||
|
||||
|
||||
logger.log backend:settings.filestoreBackend, "Loading backend"
|
||||
module.exports = switch settings.filestoreBackend
|
||||
logger.log backend:settings.filestore.backend, "Loading backend"
|
||||
module.exports = switch settings.filestore.backend
|
||||
when "s3"
|
||||
S3PersistorManager
|
||||
require("./S3PersistorManager")
|
||||
when "fs"
|
||||
require("./FSPersistorManager")
|
||||
else
|
||||
throw new Error( "Unknown filestore backend: #{settings.filestoreBackend}" )
|
||||
throw new Error( "Unknown filestore backend: #{settings.filestore.backend}" )
|
||||
|
|
|
@ -24,8 +24,8 @@ printSockets()
|
|||
buildDefaultOptions = (bucketName, method, key)->
|
||||
return {
|
||||
aws:
|
||||
key: settings.s3.key
|
||||
secret: settings.s3.secret
|
||||
key: settings.filestore.s3.key
|
||||
secret: settings.filestore.s3.secret
|
||||
bucket: bucketName
|
||||
method: method
|
||||
timeout: thirtySeconds
|
||||
|
@ -36,8 +36,8 @@ module.exports =
|
|||
|
||||
sendFile: (bucketName, key, fsPath, callback)->
|
||||
s3Client = knox.createClient
|
||||
key: settings.s3.key
|
||||
secret: settings.s3.secret
|
||||
key: settings.filestore.s3.key
|
||||
secret: settings.filestore.s3.secret
|
||||
bucket: bucketName
|
||||
putEventEmiter = s3Client.putFile fsPath, key, (err, res)->
|
||||
if err?
|
||||
|
@ -70,8 +70,8 @@ module.exports =
|
|||
getFileStream: (bucketName, key, callback = (err, res)->)->
|
||||
logger.log bucketName:bucketName, key:key, "getting file from s3"
|
||||
s3Client = knox.createClient
|
||||
key: settings.s3.key
|
||||
secret: settings.s3.secret
|
||||
key: settings.filestore.s3.key
|
||||
secret: settings.filestore.s3.secret
|
||||
bucket: bucketName
|
||||
s3Stream = s3Client.get(key)
|
||||
s3Stream.end()
|
||||
|
@ -84,8 +84,8 @@ module.exports =
|
|||
copyFile: (bucketName, sourceKey, destKey, callback)->
|
||||
logger.log bucketName:bucketName, sourceKey:sourceKey, destKey:destKey, "copying file in s3"
|
||||
s3Client = knox.createClient
|
||||
key: settings.s3.key
|
||||
secret: settings.s3.secret
|
||||
key: settings.filestore.s3.key
|
||||
secret: settings.filestore.s3.secret
|
||||
bucket: bucketName
|
||||
s3Client.copyFile sourceKey, destKey, (err)->
|
||||
if err?
|
||||
|
@ -102,8 +102,8 @@ module.exports =
|
|||
|
||||
deleteDirectory: (bucketName, key, callback)->
|
||||
s3Client = knox.createClient
|
||||
key: settings.s3.key
|
||||
secret: settings.s3.secret
|
||||
key: settings.filestore.s3.key
|
||||
secret: settings.filestore.s3.secret
|
||||
bucket: bucketName
|
||||
s3Client.list prefix:key, (err, data)->
|
||||
keys = _.map data.Contents, (entry)->
|
||||
|
|
|
@ -4,11 +4,26 @@ module.exports =
|
|||
port: 3009
|
||||
host: "localhost"
|
||||
|
||||
# which persistor to use for file storage
|
||||
# current options are:
|
||||
# "s3" - Amazon S3
|
||||
# if no persistor is chosen, s3 will be used by default
|
||||
filestoreBackend: "s3"
|
||||
filestore:
|
||||
# which backend persistor to use.
|
||||
# choices are
|
||||
# s3 - Amazon S3
|
||||
# fs - local filesystem
|
||||
backend: "s3"
|
||||
stores:
|
||||
# where to store user and template binary files
|
||||
#
|
||||
# For Amazon S3 this is the bucket name to store binary files in
|
||||
# Must contain full url like: <bucketname>.s3.amazonaws.com
|
||||
#
|
||||
# For local filesystem this is the directory to store the files in.
|
||||
# Must contain full path, e.g. "/var/lib/sharelatex/data"
|
||||
# This path must exist, not be tmpfs and be writable to by the user sharelatex is run as.
|
||||
user_files: ""
|
||||
s3:
|
||||
# if you are using S3, then fill in your S3 details below
|
||||
key: ""
|
||||
secret: ""
|
||||
|
||||
# ShareLaTeX stores binary files like images in S3.
|
||||
# Fill in your Amazon S3 credentials below.
|
||||
|
|
|
@ -0,0 +1,157 @@
|
|||
assert = require("chai").assert
|
||||
sinon = require('sinon')
|
||||
chai = require('chai')
|
||||
should = chai.should
|
||||
expect = chai.expect
|
||||
modulePath = "../../../app/js/FSPersistorManager.js"
|
||||
SandboxedModule = require('sandboxed-module')
|
||||
fs = require("fs")
|
||||
|
||||
describe "FSPersistorManagerTests", ->
|
||||
|
||||
beforeEach ->
|
||||
@Fs =
|
||||
rename:sinon.stub()
|
||||
createReadStream:sinon.stub()
|
||||
createWriteStream:sinon.stub()
|
||||
unlink:sinon.stub()
|
||||
rmdir:sinon.stub()
|
||||
exists:sinon.stub()
|
||||
@LocalFileWriter =
|
||||
writeStream: sinon.stub()
|
||||
@requires =
|
||||
"./LocalFileWriter":@LocalFileWriter
|
||||
"fs":@Fs
|
||||
"logger-sharelatex":
|
||||
log:->
|
||||
err:->
|
||||
@location = "/tmp"
|
||||
@name1 = "530f2407e7ef165704000007/530f838b46d9a9e859000008"
|
||||
@name1Filtered ="530f2407e7ef165704000007_530f838b46d9a9e859000008"
|
||||
@name2 = "second_file"
|
||||
@error = "error_message"
|
||||
@FSPersistorManager = SandboxedModule.require modulePath, requires: @requires
|
||||
|
||||
describe "sendFile", ->
|
||||
it "should put the file", (done) ->
|
||||
@Fs.rename.callsArgWith(2,@error)
|
||||
@FSPersistorManager.sendFile @location, @name1, @name2, (err)=>
|
||||
@Fs.rename.calledWith( @name2, "#{@location}/#{@name1Filtered}" ).should.equal true
|
||||
err.should.equal @error
|
||||
done()
|
||||
|
||||
describe "sendStream", ->
|
||||
beforeEach ->
|
||||
@FSPersistorManager.sendFile = sinon.stub().callsArgWith(3)
|
||||
@LocalFileWriter.writeStream.callsArgWith(2, null, @name1)
|
||||
@SourceStream =
|
||||
on:->
|
||||
|
||||
it "should sent stream to LocalFileWriter", (done)->
|
||||
@FSPersistorManager.sendStream @location, @name1, @SourceStream, =>
|
||||
@LocalFileWriter.writeStream.calledWith(@SourceStream).should.equal true
|
||||
done()
|
||||
|
||||
it "should return the error from LocalFileWriter", (done)->
|
||||
@LocalFileWriter.writeStream.callsArgWith(2, @error)
|
||||
@FSPersistorManager.sendStream @location, @name1, @SourceStream, (err)=>
|
||||
err.should.equal @error
|
||||
done()
|
||||
|
||||
it "should send the file to the filestore", (done)->
|
||||
@LocalFileWriter.writeStream.callsArgWith(2)
|
||||
@FSPersistorManager.sendStream @location, @name1, @SourceStream, (err)=>
|
||||
@FSPersistorManager.sendFile.called.should.equal true
|
||||
done()
|
||||
|
||||
describe "getFileStream", ->
|
||||
it "should use correct file location", (done) ->
|
||||
@Fs.createReadStream.returns(
|
||||
on:->
|
||||
)
|
||||
@FSPersistorManager.getFileStream @location, @name1, (err,res)=>
|
||||
@Fs.createReadStream.calledWith("#{@location}/#{@name1Filtered}").should.equal.true
|
||||
done()
|
||||
|
||||
describe "copyFile", ->
|
||||
beforeEach ->
|
||||
@ReadStream=
|
||||
on:->
|
||||
pipe:sinon.stub()
|
||||
@WriteStream=
|
||||
on:->
|
||||
@Fs.createReadStream.returns(@ReadStream)
|
||||
@Fs.createWriteStream.returns(@WriteStream)
|
||||
|
||||
it "Should open the source for reading", (done) ->
|
||||
@FSPersistorManager.copyFile @location, @name1, @name2, ->
|
||||
@Fs.createReadStream.calledWith("#{@location}/#{@name1}").should.equal.true
|
||||
done()
|
||||
|
||||
it "Should open the target for writing", (done) ->
|
||||
@FSPersistorManager.copyFile @location, @name1, @name2, ->
|
||||
@Fs.createWriteStream.calledWith("#{@location}/#{@name2}").should.equal.true
|
||||
done()
|
||||
|
||||
it "Should pipe the source to the target", (done) ->
|
||||
@FSPersistorManager.copyFile @location, @name1, @name2, ->
|
||||
@ReadStream.pipe.calledWith(@WriteStream).should.equal.true
|
||||
done()
|
||||
|
||||
describe "deleteFile", ->
|
||||
beforeEach ->
|
||||
@Fs.unlink.callsArgWith(1,@error)
|
||||
|
||||
it "Should call unlink with correct options", (done) ->
|
||||
@FSPersistorManager.deleteFile @location, @name1, (err) =>
|
||||
@Fs.unlink.calledWith("#{@location}/#{@name1}").should.equal.true
|
||||
done()
|
||||
|
||||
it "Should propogate the error", (done) ->
|
||||
@FSPersistorManager.deleteFile @location, @name1, (err) =>
|
||||
err.should.equal @error
|
||||
done()
|
||||
|
||||
|
||||
describe "deleteDirectory", ->
|
||||
beforeEach ->
|
||||
@Fs.rmdir.callsArgWith(1,@error)
|
||||
|
||||
it "Should call rmdir with correct options", (done) ->
|
||||
@FSPersistorManager.deleteDirectory @location, @name1, (err) =>
|
||||
@Fs.rmdir.calledWith("#{@location}/#{@name1}").should.equal.true
|
||||
done()
|
||||
|
||||
it "Should propogate the error", (done) ->
|
||||
@FSPersistorManager.deleteDirectory @location, @name1, (err) =>
|
||||
err.should.equal @error
|
||||
done()
|
||||
|
||||
describe "checkIfFileExists", ->
|
||||
beforeEach ->
|
||||
@Fs.exists.callsArgWith(1,true)
|
||||
|
||||
it "Should call exists with correct options", (done) ->
|
||||
@FSPersistorManager.checkIfFileExists @location, @name1, (exists) =>
|
||||
@Fs.exists.calledWith("#{@location}/#{@name1}").should.equal.true
|
||||
done()
|
||||
|
||||
# fs.exists simply returns false on any error, so...
|
||||
it "should not return an error", (done) ->
|
||||
@FSPersistorManager.checkIfFileExists @location, @name1, (err,exists) =>
|
||||
expect(err).to.be.null
|
||||
done()
|
||||
|
||||
it "Should return true for existing files", (done) ->
|
||||
@Fs.exists.callsArgWith(1,true)
|
||||
@FSPersistorManager.checkIfFileExists @location, @name1, (err,exists) =>
|
||||
exists.should.be.true
|
||||
done()
|
||||
|
||||
it "Should return false for non-existing files", (done) ->
|
||||
@Fs.exists.callsArgWith(1,false)
|
||||
@FSPersistorManager.checkIfFileExists @location, @name1, (err,exists) =>
|
||||
exists.should.be.false
|
||||
done()
|
||||
|
||||
|
|
@ -22,7 +22,8 @@ describe "PersistorManagerTests", ->
|
|||
describe "test s3 mixin", ->
|
||||
beforeEach ->
|
||||
@settings =
|
||||
filestoreBackend: "s3"
|
||||
filestore:
|
||||
backend: "s3"
|
||||
@requires =
|
||||
"./S3PersistorManager": @S3PersistorManager
|
||||
"settings-sharelatex": @settings
|
||||
|
@ -81,7 +82,8 @@ describe "PersistorManagerTests", ->
|
|||
describe "test invalid mixins", ->
|
||||
it "should not load an invalid wrapper", (done) ->
|
||||
@settings =
|
||||
filestoreBackend:"magic"
|
||||
filestore:
|
||||
backend:"magic"
|
||||
@requires =
|
||||
"./S3PersistorManager": @S3PersistorManager
|
||||
"settings-sharelatex": @settings
|
||||
|
|
|
@ -10,10 +10,12 @@ describe "S3PersistorManagerTests", ->
|
|||
|
||||
beforeEach ->
|
||||
@settings =
|
||||
filestore:
|
||||
backend: "s3"
|
||||
s3:
|
||||
secret: "secret"
|
||||
key: "this_key"
|
||||
buckets:
|
||||
stores:
|
||||
user_files:"sl_user_files"
|
||||
@stubbedKnoxClient =
|
||||
putFile:sinon.stub()
|
||||
|
@ -138,7 +140,7 @@ describe "S3PersistorManagerTests", ->
|
|||
|
||||
@S3PersistorManager.deleteFile @bucketName, @key, (err)=>
|
||||
opts = @request.args[0][0]
|
||||
assert.deepEqual(opts.aws, {key:@settings.s3.key, secret:@settings.s3.secret, bucket:@bucketName})
|
||||
assert.deepEqual(opts.aws, {key:@settings.filestore.s3.key, secret:@settings.filestore.s3.secret, bucket:@bucketName})
|
||||
opts.method.should.equal "delete"
|
||||
opts.timeout.should.equal (30*1000)
|
||||
opts.uri.should.equal "https://#{@bucketName}.s3.amazonaws.com/#{@key}"
|
||||
|
@ -162,7 +164,7 @@ describe "S3PersistorManagerTests", ->
|
|||
|
||||
@S3PersistorManager.checkIfFileExists @bucketName, @key, (err)=>
|
||||
opts = @request.args[0][0]
|
||||
assert.deepEqual(opts.aws, {key:@settings.s3.key, secret:@settings.s3.secret, bucket:@bucketName})
|
||||
assert.deepEqual(opts.aws, {key:@settings.filestore.s3.key, secret:@settings.filestore.s3.secret, bucket:@bucketName})
|
||||
opts.method.should.equal "head"
|
||||
opts.timeout.should.equal (30*1000)
|
||||
opts.uri.should.equal "https://#{@bucketName}.s3.amazonaws.com/#{@key}"
|
||||
|
|
Loading…
Reference in a new issue