Initial open source commit

This commit is contained in:
James Allen 2014-02-14 16:39:05 +00:00
commit 3199aad601
21 changed files with 1421 additions and 0 deletions

61
services/filestore/.gitignore vendored Normal file
View file

@ -0,0 +1,61 @@
compileFolder
Compiled source #
###################
*.com
*.class
*.dll
*.exe
*.o
*.so
# Packages #
############
# it's better to unpack these files and commit the raw source
# git has its own built in compression methods
*.7z
*.dmg
*.gz
*.iso
*.jar
*.rar
*.tar
*.zip
# Logs and databases #
######################
*.log
*.sql
*.sqlite
# OS generated files #
######################
.DS_Store?
ehthumbs.db
Icon?
Thumbs.db
/node_modules/*
test/IntergrationTests/js/*
data/*/*
app.js
app/js/*
test/IntergrationTests/js/*
test/UnitTests/js/*
cookies.txt
uploads/*
public/js/editor.js
public/js/home.js
public/js/forms.js
public/js/gui.js
public/js/admin.js
public/stylesheets/mainStyle.css
public/minjs/
test/unit/js/
test/acceptence/js
**.swp
/log.json
hash_folder

View file

@ -0,0 +1,64 @@
module.exports = (grunt) ->
# Project configuration.
grunt.initConfig
coffee:
server:
expand: true,
flatten: false,
cwd: 'app/coffee',
src: ['**/*.coffee'],
dest: 'app/js/',
ext: '.js'
app_server:
expand: true,
flatten: false,
src: ['app.coffee'],
dest: './',
ext: '.js'
server_tests:
expand: true,
flatten: false,
cwd: 'test/unit/coffee',
src: ['*.coffee', '**/*.coffee'],
dest: 'test/unit/js/',
ext: '.js'
watch:
server_coffee:
files: ['app/*.coffee','app/**/*.coffee', 'test/unit/coffee/**/*.coffee', 'test/unit/coffee/*.coffee', "app.coffee"]
tasks: ["clean", 'coffee', 'mochaTest']
clean: ["app/js", "test/unit/js", "app.js"]
nodemon:
dev:
options:
file: 'app.js'
concurrent:
dev:
tasks: ['nodemon', 'watch']
options:
logConcurrentOutput: true
mochaTest:
test:
options:
reporter: process.env.MOCHA_RUNNER || "spec"
src: ['test/*.js', 'test/**/*.js']
grunt.loadNpmTasks 'grunt-contrib-coffee'
grunt.loadNpmTasks 'grunt-contrib-watch'
grunt.loadNpmTasks 'grunt-nodemon'
grunt.loadNpmTasks 'grunt-contrib-clean'
grunt.loadNpmTasks 'grunt-concurrent'
grunt.loadNpmTasks 'grunt-mocha-test'
grunt.registerTask "ci", ["coffee", "mochaTest"]
grunt.registerTask 'default', ['coffee', 'concurrent']
grunt.registerTask "install", "coffee"

View file

@ -0,0 +1,102 @@
express = require('express')
logger = require('logger-sharelatex')
logger.initialize("filestore")
metrics = require("./app/js/metrics")
settings = require("settings-sharelatex")
request = require("request")
fileController = require("./app/js/FileController")
keyBuilder = require("./app/js/KeyBuilder")
domain = require("domain")
appIsOk = true
app = express()
streamBuffers = require("stream-buffers")
app.configure ->
app.use express.bodyParser()
app.configure 'development', ->
console.log "Development Enviroment"
app.use express.errorHandler({ dumpExceptions: true, showStack: true })
app.configure 'production', ->
console.log "Production Enviroment"
app.use express.logger()
app.use express.errorHandler()
metrics.inc "startup"
app.use (req, res, next)->
metrics.inc "http-request"
next()
app.use (req, res, next) ->
requestDomain = domain.create()
requestDomain.add req
requestDomain.add res
requestDomain.on "error", (err)->
res.send 500
logger = require('logger-sharelatex')
req =
body:req.body
headers:req.headers
url:req.url
key: req.key
statusCode: req.statusCode
err.domainEmitter.res = "to big to log"
logger.err err:err, req:req, res:res, "uncaught exception thrown on request"
appIsOk = false
exit = ->
console.log "exit"
process.exit(1)
setTimeout exit, 20000
requestDomain.run next
app.get "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.getFile
app.post "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.insertFile
app.put "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.copyFile
app.del "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.deleteFile
app.get "/template/:template_id/v/:version/:format", keyBuilder.templateFileKey, fileController.getFile
app.post "/template/:template_id/v/:version/:format", keyBuilder.templateFileKey, fileController.insertFile
app.post "/shutdown", (req, res)->
appIsOk = false
res.send()
app.get '/status', (req, res)->
if appIsOk
res.send('filestore sharelatex up')
else
logger.log "app is not ok - shutting down"
res.send("server is being shut down", 500)
app.get "/health_check", (req, res)->
req.params.project_id = settings.health_check.project_id
req.params.file_id = settings.health_check.file_id
myWritableStreamBuffer = new streamBuffers.WritableStreamBuffer(initialSize: 100)
keyBuilder.userFileKey req, res, ->
fileController.getFile req, myWritableStreamBuffer
myWritableStreamBuffer.on "close", ->
if myWritableStreamBuffer.size() > 0
res.send(200)
else
res.send(503)
app.get '*', (req, res)->
res.send 404
serverDomain = domain.create()
serverDomain.run ->
server = require('http').createServer(app)
port = settings.internal.filestore.port or 3009
host = settings.internal.filestore.host or "localhost"
server.listen port, host, ->
logger.log("filestore store listening on #{host}:#{port}")
serverDomain.on "error", (err)->
logger.log err:err, "top level uncaught exception"

View file

@ -0,0 +1,59 @@
s3Wrapper = require("./s3Wrapper")
settings = require("settings-sharelatex")
logger = require("logger-sharelatex")
FileHandler = require("./FileHandler")
LocalFileWriter = require("./LocalFileWriter")
metrics = require("./metrics")
oneDayInSeconds = 60 * 60 * 24
module.exports =
getFile: (req, res)->
metrics.inc "getFile"
{key, bucket} = req
{format, style} = req.query
logger.log key:key, bucket:bucket, format:format, style:style, "reciving request to get file"
FileHandler.getFile bucket, key, {format:format,style:style}, (err, fileStream)->
if err?
logger.err err:err, key:key, bucket:bucket, format:format, style:style, "problem getting file"
res.send 500
else if req.params.cacheWarm
logger.log key:key, bucket:bucket, format:format, style:style, "request is only for cache warm so not sending stream"
res.send 200
else
logger.log key:key, bucket:bucket, format:format, style:style, "sending file to response"
fileStream.pipe res
insertFile: (req, res)->
metrics.inc "insertFile"
{key, bucket} = req
logger.log key:key, bucket:bucket, "reciving request to insert file"
FileHandler.insertFile bucket, key, req, (err)->
res.send 200
copyFile: (req, res)->
metrics.inc "copyFile"
{key, bucket} = req
oldProject_id = req.body.source.project_id
oldFile_id = req.body.source.file_id
logger.log key:key, bucket:bucket, oldProject_id:oldProject_id, oldFile_id:oldFile_id, "reciving request to copy file"
s3Wrapper.copyFile bucket, "#{oldProject_id}/#{oldFile_id}", key, (err)->
if err?
logger.log err:err, oldProject_id:oldProject_id, oldFile_id:oldFile_id, "something went wrong copying file in s3Wrapper"
res.send 500
else
res.send 200
deleteFile: (req, res)->
metrics.inc "deleteFile"
{key, bucket} = req
logger.log key:key, bucket:bucket, "reciving request to delete file"
FileHandler.deleteFile bucket, key, (err)->
if err?
logger.log err:err, key:key, bucket:bucket, "something went wrong deleting file in s3Wrapper"
res.send 500
else
res.send 204

View file

@ -0,0 +1,49 @@
easyimage = require("easyimage")
_ = require("underscore")
metrics = require("./metrics")
logger = require("logger-sharelatex")
approvedFormats = ["png"]
module.exports =
convert: (sourcePath, requestedFormat, callback)->
logger.log sourcePath:sourcePath, requestedFormat:requestedFormat, "converting file format"
timer = new metrics.Timer("imageConvert")
destPath = "#{sourcePath}.#{requestedFormat}"
sourcePath = "#{sourcePath}[0]"
if !_.include approvedFormats, requestedFormat
err = new Error("invalid format requested")
return callback err
args =
src: sourcePath
dst: destPath
easyimage.convert args, (err)->
timer.done()
callback(err, destPath)
thumbnail: (sourcePath, callback)->
logger.log sourcePath:sourcePath, "thumbnail convert file"
destPath = "#{sourcePath}.png"
sourcePath = "#{sourcePath}[0]"
args =
src: sourcePath
dst: destPath
width: 424
height: 300
args = "convert -flatten -background white -resize 300x -density 300 #{sourcePath} #{destPath}"
easyimage.exec args, (err)->
callback(err, destPath)
preview: (sourcePath, callback)->
logger.log sourcePath:sourcePath, "preview convert file"
destPath = "#{sourcePath}.png"
sourcePath = "#{sourcePath}[0]"
args =
src: sourcePath
dst: destPath
width: 600
height: 849
args = "convert -flatten -background white -resize 600x -density 300 #{sourcePath} #{destPath}"
easyimage.exec args, (err)->
callback(err, destPath)

View file

@ -0,0 +1,80 @@
settings = require("settings-sharelatex")
s3Wrapper = require("./s3Wrapper")
LocalFileWriter = require("./LocalFileWriter")
logger = require("logger-sharelatex")
FileConverter = require("./FileConverter")
KeyBuilder = require("./KeyBuilder")
async = require("async")
ImageOptimiser = require("./ImageOptimiser")
module.exports =
insertFile: (bucket, key, stream, callback)->
convetedKey = KeyBuilder.getConvertedFolderKey(key)
s3Wrapper.deleteDirectory bucket, convetedKey, ->
s3Wrapper.sendStreamToS3 bucket, key, stream, ->
callback()
deleteFile: (bucket, key, callback)->
convetedKey = KeyBuilder.getConvertedFolderKey(bucket, key)
async.parallel [
(done)-> s3Wrapper.deleteFile bucket, key, done
(done)-> s3Wrapper.deleteFile bucket, convetedKey, done
], callback
getFile: (bucket, key, opts = {}, callback)->
logger.log bucket:bucket, key:key, opts:opts, "getting file"
if !opts.format? and !opts.style?
@_getStandardFile bucket, key, opts, callback
else
@_getConvertedFile bucket, key, opts, callback
_getStandardFile: (bucket, key, opts, callback)->
s3Wrapper.getFileStream bucket, key, (err, fileStream)->
if err?
logger.err bucket:bucket, key:key, opts:opts, "error getting fileStream"
callback err, fileStream
_getConvertedFile: (bucket, key, opts, callback)->
convetedKey = KeyBuilder.addCachingToKey(key, opts)
s3Wrapper.checkIfFileExists bucket, convetedKey, (err, exists)=>
if exists
s3Wrapper.getFileStream bucket, convetedKey, callback
else
@_getConvertedFileAndCache bucket, key, convetedKey, opts, callback
_getConvertedFileAndCache: (bucket, key, convetedKey, opts, callback)->
@_convertFile bucket, key, opts, (err, fsPath)->
if err?
logger.err err:err, fsPath:fsPath, bucket:bucket, key:key, opts:opts, "something went wrong with converting file"
return callback(err)
ImageOptimiser.compressPng fsPath, (err)->
if err?
logger.err err:err, fsPath:fsPath, bucket:bucket, key:key, opts:opts, "something went wrong optimising png file"
return callback(err)
s3Wrapper.sendFileToS3 bucket, convetedKey, fsPath, (err)->
if err?
logger.err err:err, bucket:bucket, key:key, convetedKey:convetedKey, opts:opts, "something went wrong seing file to s3"
return callback(err)
s3Wrapper.getFileStream bucket, convetedKey, callback
_convertFile: (bucket, origonalKey, opts, callback)->
@_writeS3FileToDisk bucket, origonalKey, (err, origonalFsPath)->
if opts.format?
FileConverter.convert origonalFsPath, opts.format, callback
else if opts.style == "thumbnail"
FileConverter.thumbnail origonalFsPath, callback
else if opts.style == "preview"
FileConverter.preview origonalFsPath, callback
else
throw new Error("should have specified opts to convert file with #{JSON.stringify(opts)}")
_writeS3FileToDisk: (bucket, key, callback)->
s3Wrapper.getFileStream bucket, key, (err, fileStream)->
LocalFileWriter.writeStream fileStream, key, callback

View file

@ -0,0 +1,29 @@
PngCrush = require('pngcrush')
fs = require("fs")
logger = require("logger-sharelatex")
module.exports =
compressPng: (localPath, callback)->
optimisedPath = "#{localPath}-optimised"
startTime = new Date()
logger.log localPath:localPath, optimisedPath:optimisedPath, "optimising png path"
readStream = fs.createReadStream(localPath)
writeStream = fs.createWriteStream(optimisedPath)
readStream.on "error", (err)->
logger.err err:err, localPath:localPath, "something went wrong getting read stream for compressPng"
callback(err)
writeStream.on "error", (err)->
logger.err err:err, localPath:localPath, "something went wrong getting write stream for compressPng"
callback(err)
myCrusher = new PngCrush()
myCrusher.on "error", (err)->
logger.err err:err, localPath:localPath, "error compressing file"
callback err
readStream.pipe(myCrusher).pipe(writeStream)
writeStream.on "finish", ->
timeTaken = new Date() - startTime
logger.log localPath:localPath, timeTaken:timeTaken, "finished converting file"
fs.rename optimisedPath, localPath, callback

View file

@ -0,0 +1,34 @@
settings = require("settings-sharelatex")
module.exports =
getConvertedFolderKey: (key)->
key = "#{key}-converted-cache/"
addCachingToKey: (key, opts)->
key = @getConvertedFolderKey(key)
if opts.format? and !opts.style?
key = "#{key}format-#{opts.format}"
if opts.style? and !opts.format?
key = "#{key}style-#{opts.style}"
if opts.style? and opts.format?
key = "#{key}format-#{opts.format}-style-#{opts.style}"
return key
userFileKey: (req, res, next)->
{project_id, file_id} = req.params
req.key = "#{project_id}/#{file_id}"
req.bucket = settings.s3.buckets.user_files
next()
templateFileKey: (req, res, next)->
{template_id, format, version} = req.params
req.key = "#{template_id}/#{version}/#{format}"
req.bucket = settings.s3.buckets.template_files
req.version = version
opts = req.query
next()

View file

@ -0,0 +1,36 @@
fs = require("fs")
uuid = require('node-uuid')
path = require("path")
_ = require("underscore")
logger = require("logger-sharelatex")
metrics = require("./metrics")
module.exports =
writeStream: (stream, key, callback)->
timer = new metrics.Timer("writingFile")
callback = _.once callback
fsPath = @_getPath(key)
logger.log fsPath:fsPath, "writing file locally"
writeStream = fs.createWriteStream(fsPath)
stream.pipe writeStream
writeStream.on "finish", ->
timer.done()
logger.log fsPath:fsPath, "finished writing file locally"
callback(null, fsPath)
writeStream.on "error", (err)->
logger.err err:err, fsPath:fsPath, "problem writing file locally, with write stream"
callback err
stream.on "error", (err)->
logger.log err:err, fsPath:fsPath, "problem writing file locally, with read stream"
callback err
deleteFile: (fsPath, callback)->
fs.unlink fsPath, callback
_getPath : (key)->
if !key?
key = uuid.v1()
key = key.replace(/\//g,"-")
path.join(__dirname, "../../uploads/#{key}")

View file

@ -0,0 +1,24 @@
StatsD = require('lynx')
settings = require('settings-sharelatex')
statsd = new StatsD('localhost', 8125, {on_error:->})
buildKey = (key)-> "filestore.#{process.env.NODE_ENV}.#{key}"
module.exports =
set : (key, value, sampleRate = 1)->
statsd.set buildKey(key), value, sampleRate
inc : (key, sampleRate = 1)->
statsd.increment buildKey(key), sampleRate
Timer : class
constructor :(key, sampleRate = 1)->
this.start = new Date()
this.key = buildKey(key)
done:->
timeSpan = new Date - this.start
statsd.timing(this.key, timeSpan, this.sampleRate)
gauge : (key, value, sampleRate = 1)->
statsd.gauge key, value, sampleRate

View file

@ -0,0 +1,102 @@
settings = require("settings-sharelatex")
request = require("request")
logger = require("logger-sharelatex")
fs = require("fs")
knox = require("knox")
path = require("path")
LocalFileWriter = require("./LocalFileWriter")
_ = require("underscore")
thirtySeconds = 30 * 1000
buildDefaultOptions = (bucketName, method, key)->
return {
aws:
key: settings.s3.key
secret: settings.s3.secret
bucket: bucketName
method: method
timeout: thirtySeconds
uri:"https://#{bucketName}.s3.amazonaws.com/#{key}"
}
module.exports =
sendFileToS3: (bucketName, key, fsPath, callback)->
s3Client = knox.createClient
key: settings.s3.key
secret: settings.s3.secret
bucket: bucketName
putEventEmiter = s3Client.putFile fsPath, key, (err, res)->
if err?
logger.err err:err, bucketName:bucketName, key:key, fsPath:fsPath,"something went wrong uploading file to s3"
return callback(err)
if res.statusCode != 200
logger.err bucketName:bucketName, key:key, fsPath:fsPath, "non 200 response from s3 putting file"
return callback("non 200 response from s3 on put file")
LocalFileWriter.deleteFile fsPath, (err)->
logger.log res:res, bucketName:bucketName, key:key, fsPath:fsPath,"file uploaded to s3"
callback(err)
putEventEmiter.on "error", (err)->
logger.err err:err, bucketName:bucketName, key:key, fsPath:fsPath, "error emmited on put of file"
callback err
sendStreamToS3: (bucketName, key, readStream, callback)->
logger.log bucketName:bucketName, key:key, "sending file to s3"
readStream.on "error", (err)->
logger.err bucketName:bucketName, key:key, "error on stream to send to s3"
LocalFileWriter.writeStream readStream, null, (err, fsPath)=>
if err?
logger.err bucketName:bucketName, key:key, fsPath:fsPath, err:err, "something went wrong writing stream to disk"
return callback(err)
@sendFileToS3 bucketName, key, fsPath, callback
getFileStream: (bucketName, key, callback)->
logger.log bucketName:bucketName, key:key, "getting file from s3"
options = buildDefaultOptions(bucketName, "get", key)
readStream = request(options)
readStream.on "error", (err)->
logger.err bucketName:bucketName, key:key, "error getting file stream from s3"
callback null, readStream
copyFile: (bucketName, sourceKey, destKey, callback)->
logger.log bucketName:bucketName, sourceKey:sourceKey, destKey:destKey, "copying file in s3"
s3Client = knox.createClient
key: settings.s3.key
secret: settings.s3.secret
bucket: bucketName
s3Client.copyFile sourceKey, destKey, (err)->
if err?
logger.err bucketName:bucketName, sourceKey:sourceKey, destKey:destKey, "something went wrong copying file in aws"
callback(err)
deleteFile: (bucketName, key, callback)->
logger.log bucketName:bucketName, key:key, "delete file in s3"
options = buildDefaultOptions(bucketName, "delete", key)
request options, (err, res)->
if err?
logger.err res:res, bucketName:bucketName, key:key, "something went wrong deleting file in aws"
callback(err)
deleteDirectory: (bucketName, key, callback)->
s3Client = knox.createClient
key: settings.s3.key
secret: settings.s3.secret
bucket: bucketName
s3Client.list prefix:key, (err, data)->
keys = _.map data.Contents, (entry)->
return entry.Key
s3Client.deleteMultiple keys, callback
checkIfFileExists:(bucketName, key, callback)->
logger.log bucketName:bucketName, key:key, "checking if file exists in s3"
options = buildDefaultOptions(bucketName, "head", key)
request options, (err, res)->
if err?
logger.err res:res, bucketName:bucketName, key:key, "something went wrong copying file in aws"
exists = res.statusCode == 200
logger.log bucketName:bucketName, key:key, exists:exists, "checked if file exsists in s3"
callback(err, exists)

View file

@ -0,0 +1,22 @@
module.exports =
internal:
filestore:
port: 3009
host: "localhost"
# ShareLaTeX stores binary files like images in S3.
# Fill in your Amazon S3 credentials below.
s3:
key: ''
secret: ''
buckets:
user_files: ""
template_files: ""
# Filestore health check
# ----------------------
# Project and file details to check in filestore when calling /health_check
# health_check:
# project_id: ""
# file_id: ""

View file

@ -0,0 +1,32 @@
{
"name": "filestore-sharelatex",
"version": "0.0.1",
"dependencies": {
"settings": "git+ssh://git@bitbucket.org:sharelatex/settings-sharelatex.git#master",
"logger": "git+ssh://git@bitbucket.org:sharelatex/logger-sharelatex.git#bunyan",
"request": "2.14.0",
"lynx": "0.0.11",
"grunt-mocha-test": "~0.8.2",
"knox": "~0.8.8",
"node-uuid": "~1.4.1",
"underscore": "~1.5.2",
"easyimage": "~0.1.6",
"express": "~3.4.8",
"longjohn": "~0.2.2",
"async": "~0.2.10",
"pngcrush": "0.0.3",
"stream-buffers": "~0.2.5"
},
"devDependencies": {
"sinon": "",
"chai": "",
"sandboxed-module": "",
"grunt": "0.4.1",
"grunt-contrib-requirejs": "0.4.1",
"grunt-contrib-coffee": "0.7.0",
"grunt-contrib-watch": "0.5.3",
"grunt-nodemon": "0.1.2",
"grunt-contrib-clean": "0.5.0",
"grunt-concurrent": "0.4.2"
}
}

View file

@ -0,0 +1,126 @@
assert = require("chai").assert
sinon = require('sinon')
chai = require('chai')
should = chai.should()
expect = chai.expect
modulePath = "../../../app/js/FileController.js"
SandboxedModule = require('sandboxed-module')
describe "FileController", ->
beforeEach ->
@s3Wrapper =
sendStreamToS3: sinon.stub()
getAndPipe: sinon.stub()
copyFile: sinon.stub()
deleteFile:sinon.stub()
@settings =
s3:
buckets:
user_files:"user_files"
@FileHandler =
getFile: sinon.stub()
deleteFile: sinon.stub()
insertFile: sinon.stub()
@LocalFileWriter = {}
@controller = SandboxedModule.require modulePath, requires:
"./LocalFileWriter":@LocalFileWriter
"./FileHandler": @FileHandler
"./s3Wrapper":@s3Wrapper
"settings-sharelatex": @settings
"logger-sharelatex":
log:->
err:->
@project_id = "project_id"
@file_id = "file_id"
@bucket = "user_files"
@key = "#{@project_id}/#{@file_id}"
@req =
key:@key
bucket:@bucket
query:{}
params:
project_id:@project_id
file_id:@file_id
@res =
setHeader: ->
@fileStream = {}
describe "getFile", ->
it "should pipe the stream", (done)->
@FileHandler.getFile.callsArgWith(3, null, @fileStream)
@fileStream.pipe = (res)=>
res.should.equal @res
done()
@controller.getFile @req, @res
it "should send a 200 if the cacheWarm param is true", (done)->
@req.params.cacheWarm = true
@FileHandler.getFile.callsArgWith(3, null, @fileStream)
@res.send = (statusCode)=>
statusCode.should.equal 200
done()
@controller.getFile @req, @res
it "should send a 500 if there is a problem", (done)->
@FileHandler.getFile.callsArgWith(3, "error")
@res.send = (code)=>
code.should.equal 500
done()
@controller.getFile @req, @res
describe "insertFile", ->
it "should send bucket name key and res to s3Wrapper", (done)->
@FileHandler.insertFile.callsArgWith(3)
@res.send = =>
@FileHandler.insertFile.calledWith(@bucket, @key, @req).should.equal true
done()
@controller.insertFile @req, @res
describe "copyFile", ->
beforeEach ->
@oldFile_id = "old_file_id"
@oldProject_id = "old_project_id"
@req.body =
source:
project_id: @oldProject_id
file_id: @oldFile_id
it "should send bucket name and both keys to s3Wrapper", (done)->
@s3Wrapper.copyFile.callsArgWith(3)
@res.send = (code)=>
code.should.equal 200
@s3Wrapper.copyFile.calledWith(@bucket, "#{@oldProject_id}/#{@oldFile_id}", @key).should.equal true
done()
@controller.copyFile @req, @res
it "should send a 500 if there was an error", (done)->
@s3Wrapper.copyFile.callsArgWith(3, "error")
@res.send = (code)=>
code.should.equal 500
done()
@controller.copyFile @req, @res
describe "delete file", ->
it "should tell the file handler", (done)->
@FileHandler.deleteFile.callsArgWith(2)
@res.send = (code)=>
code.should.equal 204
@FileHandler.deleteFile.calledWith(@bucket, @key).should.equal true
done()
@controller.deleteFile @req, @res
it "should send a 500 if there was an error", (done)->
@FileHandler.deleteFile.callsArgWith(2, "error")
@res.send = (code)->
code.should.equal 500
done()
@controller.deleteFile @req, @res

View file

@ -0,0 +1,73 @@
assert = require("chai").assert
sinon = require('sinon')
chai = require('chai')
should = chai.should()
expect = chai.expect
modulePath = "../../../app/js/FileConverter.js"
SandboxedModule = require('sandboxed-module')
describe "FileConverter", ->
beforeEach ->
@easyimage =
convert:sinon.stub()
exec: sinon.stub()
@converter = SandboxedModule.require modulePath, requires:
"easyimage":@easyimage
"logger-sharelatex":
log:->
err:->
@sourcePath = "/this/path/here.eps"
@format = "png"
@error = "Error"
describe "convert", ->
it "should convert the source to the requested format", (done)->
@easyimage.convert.callsArgWith(1)
@converter.convert @sourcePath, @format, (err)=>
args = @easyimage.convert.args[0][0]
args.src.should.equal @sourcePath+"[0]"
args.dst.should.equal "#{@sourcePath}.#{@format}"
done()
it "should return the dest path", (done)->
@easyimage.convert.callsArgWith(1)
@converter.convert @sourcePath, @format, (err, destPath)=>
destPath.should.equal "#{@sourcePath}.#{@format}"
done()
it "should return the error from convert", (done)->
@easyimage.convert.callsArgWith(1, @error)
@converter.convert @sourcePath, @format, (err)=>
err.should.equal @error
done()
it "should not accapt an non aproved format", (done)->
@easyimage.convert.callsArgWith(1)
@converter.convert @sourcePath, "ahhhhh", (err)=>
expect(err).to.exist
done()
describe "thumbnail", ->
it "should call easy image resize with args", (done)->
@easyimage.exec.callsArgWith(1)
@converter.thumbnail @sourcePath, (err)=>
args = @easyimage.exec.args[0][0]
args.indexOf(@sourcePath).should.not.equal -1
done()
it "should compress the png", ()->
describe "preview", ->
it "should call easy image resize with args", (done)->
@easyimage.exec.callsArgWith(1)
@converter.preview @sourcePath, (err)=>
args = @easyimage.exec.args[0][0]
args.indexOf(@sourcePath).should.not.equal -1
done()

View file

@ -0,0 +1,177 @@
assert = require("chai").assert
sinon = require('sinon')
chai = require('chai')
should = chai.should()
expect = chai.expect
modulePath = "../../../app/js/FileHandler.js"
SandboxedModule = require('sandboxed-module')
describe "FileHandler", ->
beforeEach ->
@settings =
s3:
buckets:
user_files:"user_files"
@s3Wrapper =
getFileStream: sinon.stub()
checkIfFileExists: sinon.stub()
deleteFile: sinon.stub()
deleteDirectory: sinon.stub()
sendStreamToS3: sinon.stub()
insertFile: sinon.stub()
@LocalFileWriter =
writeStream: sinon.stub()
@FileConverter =
convert: sinon.stub()
thumbnail: sinon.stub()
preview: sinon.stub()
@keyBuilder =
addCachingToKey: sinon.stub()
getConvertedFolderKey: sinon.stub()
@ImageOptimiser =
compressPng: sinon.stub()
@handler = SandboxedModule.require modulePath, requires:
"settings-sharelatex": @settings
"./s3Wrapper":@s3Wrapper
"./LocalFileWriter":@LocalFileWriter
"./FileConverter":@FileConverter
"./KeyBuilder": @keyBuilder
"./ImageOptimiser":@ImageOptimiser
"logger-sharelatex":
log:->
err:->
@bucket = "my_bucket"
@key = "key/here"
@stubbedPath = "/var/somewhere/path"
@format = "png"
@formattedStubbedPath = "#{@stubbedPath}.#{@format}"
describe "insertFile", ->
beforeEach ->
@stream = {}
@s3Wrapper.deleteDirectory.callsArgWith(2)
@s3Wrapper.sendStreamToS3.callsArgWith(3)
it "should send file to s3", (done)->
@handler.insertFile @bucket, @key, @stream, =>
@s3Wrapper.sendStreamToS3.calledWith(@bucket, @key, @stream).should.equal true
done()
it "should delete the convetedKey folder", (done)->
@keyBuilder.getConvertedFolderKey.returns(@stubbedConvetedKey)
@handler.insertFile @bucket, @key, @stream, =>
@s3Wrapper.deleteDirectory.calledWith(@bucket, @stubbedConvetedKey).should.equal true
done()
describe "deleteFile", ->
beforeEach ->
@keyBuilder.getConvertedFolderKey.returns(@stubbedConvetedKey)
@s3Wrapper.deleteFile.callsArgWith(2)
it "should tell the s3 wrapper to delete the file", (done)->
@handler.deleteFile @bucket, @key, =>
@s3Wrapper.deleteFile.calledWith(@bucket, @key).should.equal true
done()
it "should tell the s3 wrapper to delete the cached foler", (done)->
@handler.deleteFile @bucket, @key, =>
@s3Wrapper.deleteFile.calledWith(@bucket, @stubbedConvetedKey).should.equal true
done()
describe "getFile", ->
beforeEach ->
@handler._getStandardFile = sinon.stub().callsArgWith(3)
@handler._getConvertedFile = sinon.stub().callsArgWith(3)
it "should call _getStandardFile if no format or style are defined", (done)->
@handler.getFile @bucket, @key, null, =>
@handler._getStandardFile.called.should.equal true
@handler._getConvertedFile.called.should.equal false
done()
it "should call _getConvertedFile if a format is defined", (done)->
@handler.getFile @bucket, @key, format:"png", =>
@handler._getStandardFile.called.should.equal false
@handler._getConvertedFile.called.should.equal true
done()
describe "_getStandardFile", ->
beforeEach ->
@fileStream = {on:->}
@s3Wrapper.getFileStream.callsArgWith(2, "err", @fileStream)
it "should get the stream from s3 ", (done)->
@handler.getFile @bucket, @key, null, =>
@s3Wrapper.getFileStream.calledWith(@bucket, @key).should.equal true
done()
it "should return the stream and error", (done)->
@handler.getFile @bucket, @key, null, (err, stream)=>
err.should.equal "err"
stream.should.equal @fileStream
done()
describe "_getConvertedFile", ->
it "should getFileStream if it does exists", (done)->
@s3Wrapper.checkIfFileExists.callsArgWith(2, null, true)
@s3Wrapper.getFileStream.callsArgWith(2)
@handler._getConvertedFile @bucket, @key, {}, =>
@s3Wrapper.getFileStream.calledWith(@bucket).should.equal true
done()
it "should call _getConvertedFileAndCache if it does exists", (done)->
@s3Wrapper.checkIfFileExists.callsArgWith(2, null, false)
@handler._getConvertedFileAndCache = sinon.stub().callsArgWith(4)
@handler._getConvertedFile @bucket, @key, {}, =>
@handler._getConvertedFileAndCache.calledWith(@bucket, @key).should.equal true
done()
describe "_getConvertedFileAndCache", ->
it "should _convertFile ", (done)->
@s3Wrapper.sendFileToS3 = sinon.stub().callsArgWith(3)
@s3Wrapper.getFileStream = sinon.stub().callsArgWith(2)
@convetedKey = @key+"converted"
@handler._convertFile = sinon.stub().callsArgWith(3, null, @stubbedPath)
@ImageOptimiser.compressPng = sinon.stub().callsArgWith(1)
@handler._getConvertedFileAndCache @bucket, @key, @convetedKey, {}, =>
@handler._convertFile.called.should.equal true
@s3Wrapper.sendFileToS3.calledWith(@bucket, @convetedKey, @stubbedPath).should.equal true
@s3Wrapper.getFileStream.calledWith(@bucket, @convetedKey).should.equal true
@ImageOptimiser.compressPng.calledWith(@stubbedPath).should.equal true
done()
describe "_convertFile", ->
beforeEach ->
@FileConverter.convert.callsArgWith(2, null, @formattedStubbedPath)
@FileConverter.thumbnail.callsArgWith(1, null, @formattedStubbedPath)
@FileConverter.preview.callsArgWith(1, null, @formattedStubbedPath)
@handler._writeS3FileToDisk = sinon.stub().callsArgWith(2, null, @stubbedPath)
it "should call thumbnail on the writer path if style was thumbnail was specified", (done)->
@handler._convertFile @bucket, @key, style:"thumbnail", (err, path)=>
path.should.equal @formattedStubbedPath
@FileConverter.thumbnail.calledWith(@stubbedPath).should.equal true
done()
it "should call preview on the writer path if style was preview was specified", (done)->
@handler._convertFile @bucket, @key, style:"preview", (err, path)=>
path.should.equal @formattedStubbedPath
@FileConverter.preview.calledWith(@stubbedPath).should.equal true
done()
it "should call convert on the writer path if a format was specified", (done)->
@handler._convertFile @bucket, @key, format:@format, (err, path)=>
path.should.equal @formattedStubbedPath
@FileConverter.convert.calledWith(@stubbedPath, @format).should.equal true
done()

View file

@ -0,0 +1,60 @@
assert = require("chai").assert
sinon = require('sinon')
chai = require('chai')
should = chai.should()
expect = chai.expect
modulePath = "../../../app/js/ImageOptimiser.js"
SandboxedModule = require('sandboxed-module')
describe "ImageOptimiser", ->
beforeEach ->
@fs =
createReadStream:sinon.stub()
createWriteStream:sinon.stub()
rename:sinon.stub()
@pngcrush = class PngCrush
pipe:->
on: ->
@optimiser = SandboxedModule.require modulePath, requires:
"fs":@fs
"pngcrush":@pngcrush
"logger-sharelatex":
log:->
err:->
@sourcePath = "/this/path/here.eps"
@writeStream =
pipe:->
on: (type, cb)->
if type == "finish"
cb()
@sourceStream =
pipe:->
return pipe:->
on:->
@error = "Error"
describe "compressPng", ->
beforeEach ->
@fs.createReadStream.returns(@sourceStream)
@fs.createWriteStream.returns(@writeStream)
@fs.rename.callsArgWith(2)
it "should get the file stream", (done)->
@optimiser.compressPng @sourcePath, (err)=>
@fs.createReadStream.calledWith(@sourcePath).should.equal true
done()
it "should create a compressed file stream", (done)->
@optimiser.compressPng @sourcePath, (err)=>
@fs.createWriteStream.calledWith("#{@sourcePath}-optimised")
done()
it "should rename the file after completion", (done)->
@optimiser.compressPng @sourcePath, (err)=>
@fs.rename.calledWith("#{@sourcePath}-optimised", @sourcePath).should.equal true
done()

View file

@ -0,0 +1,39 @@
assert = require("chai").assert
sinon = require('sinon')
chai = require('chai')
should = chai.should()
expect = chai.expect
modulePath = "../../../app/js/KeyBuilder.js"
SandboxedModule = require('sandboxed-module')
describe "LocalFileWriter", ->
beforeEach ->
@keyBuilder = SandboxedModule.require modulePath, requires:
"logger-sharelatex":
log:->
err:->
@key = "123/456"
describe "cachedKey", ->
it "should add the fomat on", ->
opts =
format: "png"
newKey = @keyBuilder.addCachingToKey @key, opts
newKey.should.equal "#{@key}-converted-cache/format-png"
it "should add the style on", ->
opts =
style: "thumbnail"
newKey = @keyBuilder.addCachingToKey @key, opts
newKey.should.equal "#{@key}-converted-cache/style-thumbnail"
it "should add format on first", ->
opts =
style: "thumbnail"
format: "png"
newKey = @keyBuilder.addCachingToKey @key, opts
newKey.should.equal "#{@key}-converted-cache/format-png-style-thumbnail"

View file

@ -0,0 +1,59 @@
assert = require("chai").assert
sinon = require('sinon')
chai = require('chai')
should = chai.should()
expect = chai.expect
modulePath = "../../../app/js/LocalFileWriter.js"
SandboxedModule = require('sandboxed-module')
describe "LocalFileWriter", ->
beforeEach ->
@writeStream =
on: (type, cb)->
if type == "finish"
cb()
@fs =
createWriteStream : sinon.stub().returns(@writeStream)
unlink: sinon.stub()
@writer = SandboxedModule.require modulePath, requires:
"fs": @fs
"logger-sharelatex":
log:->
err:->
@stubbedFsPath = "something/uploads/eio2k1j3"
describe "writeStrem", ->
beforeEach ->
@writer._getPath = sinon.stub().returns(@stubbedFsPath)
it "write the stream to ./uploads", (done)->
stream =
pipe: (dest)=>
dest.should.equal @writeStream
done()
on: ->
@writer.writeStream stream, null, ()=>
it "should send the path in the callback", (done)->
stream =
pipe: (dest)=>
on: (type, cb)->
if type == "end"
cb()
@writer.writeStream stream, null, (err, fsPath)=>
fsPath.should.equal @stubbedFsPath
done()
describe "delete file", ->
it "should unlink the file", (done)->
error = "my error"
@fs.unlink.callsArgWith(1, error)
@writer.deleteFile @stubbedFsPath, (err)=>
@fs.unlink.calledWith(@stubbedFsPath).should.equal true
err.should.equal error
done()

View file

@ -0,0 +1,193 @@
assert = require("chai").assert
sinon = require('sinon')
chai = require('chai')
should = chai.should()
expect = chai.expect
modulePath = "../../../app/js/s3Wrapper.js"
SandboxedModule = require('sandboxed-module')
describe "s3WrapperTests", ->
beforeEach ->
@settings =
s3:
secret: "secret"
key: "this_key"
buckets:
user_files:"sl_user_files"
@stubbedKnoxClient =
putFile:sinon.stub()
copyFile:sinon.stub()
list: sinon.stub()
deleteMultiple: sinon.stub()
@knox =
createClient: sinon.stub().returns(@stubbedKnoxClient)
@LocalFileWriter =
writeStream: sinon.stub()
deleteFile: sinon.stub()
@requires =
"knox": @knox
"settings-sharelatex": @settings
"./LocalFileWriter":@LocalFileWriter
"logger-sharelatex":
log:->
err:->
@key = "my/key"
@bucketName = "my-bucket"
@error = "my errror"
describe "Pipe to dest", ->
it "should use correct options", (done)->
stubbedReadStream = {on:->}
dest = {my:"object"}
@request = (opts)=>
return stubbedReadStream
@requires["request"] = @request
@s3Wrapper = SandboxedModule.require modulePath, requires: @requires
@s3Wrapper.getFileStream @bucketName, @key, (err, readStream)->
readStream.should.equal stubbedReadStream
done()
describe "sendFileToS3", ->
beforeEach ->
@s3Wrapper = SandboxedModule.require modulePath, requires: @requires
@stubbedKnoxClient.putFile.returns on:->
it "should put file with knox", (done)->
@LocalFileWriter.deleteFile.callsArgWith(1)
@stubbedKnoxClient.putFile.callsArgWith(2, @error)
@s3Wrapper.sendFileToS3 @bucketName, @key, @fsPath, (err)=>
@stubbedKnoxClient.putFile.calledWith(@fsPath, @key).should.equal true
err.should.equal @error
done()
it "should delete the file and pass the error with it", (done)->
@LocalFileWriter.deleteFile.callsArgWith(1)
@stubbedKnoxClient.putFile.callsArgWith(2, @error)
@s3Wrapper.sendFileToS3 @bucketName, @key, @fsPath, (err)=>
@stubbedKnoxClient.putFile.calledWith(@fsPath, @key).should.equal true
err.should.equal @error
done()
describe "sendStreamToS3", ->
beforeEach ->
@fsPath = "to/some/where"
@origin =
on:->
@s3Wrapper = SandboxedModule.require modulePath, requires: @requires
@s3Wrapper.sendFileToS3 = sinon.stub().callsArgWith(3)
it "should send stream to LocalFileWriter", (done)->
@LocalFileWriter.deleteFile.callsArgWith(1)
@LocalFileWriter.writeStream.callsArgWith(2, null, @fsPath)
@s3Wrapper.sendStreamToS3 @bucketName, @key, @origin, =>
@LocalFileWriter.writeStream.calledWith(@origin).should.equal true
done()
it "should return the error from LocalFileWriter", (done)->
@LocalFileWriter.deleteFile.callsArgWith(1)
@LocalFileWriter.writeStream.callsArgWith(2, @error)
@s3Wrapper.sendStreamToS3 @bucketName, @key, @origin, (err)=>
err.should.equal @error
done()
it "should send the file to s3", (done)->
@LocalFileWriter.deleteFile.callsArgWith(1)
@LocalFileWriter.writeStream.callsArgWith(2)
@s3Wrapper.sendStreamToS3 @bucketName, @key, @origin, (err)=>
@s3Wrapper.sendFileToS3.called.should.equal true
done()
describe "copyFile", ->
beforeEach ->
@sourceKey = "my/key"
@destKey = "my/dest/key"
@s3Wrapper = SandboxedModule.require modulePath, requires: @requires
it "should use knox to copy file", (done)->
@stubbedKnoxClient.copyFile.callsArgWith(2, @error)
@s3Wrapper.copyFile @bucketName, @sourceKey, @destKey, (err)=>
err.should.equal @error
@stubbedKnoxClient.copyFile.calledWith(@sourceKey, @destKey).should.equal true
done()
describe "deleteDirectory", ->
beforeEach ->
@s3Wrapper = SandboxedModule.require modulePath, requires: @requires
it "should list the contents passing them onto multi delete", (done)->
data =
Contents: [{Key:"1234"}, {Key: "456"}]
@stubbedKnoxClient.list.callsArgWith(1, null, data)
@stubbedKnoxClient.deleteMultiple.callsArgWith(1)
@s3Wrapper.deleteDirectory @bucketName, @key, (err)=>
@stubbedKnoxClient.deleteMultiple.calledWith(["1234","456"]).should.equal true
done()
describe "deleteFile", ->
it "should use correct options", (done)->
@request = sinon.stub().callsArgWith(1)
@requires["request"] = @request
@s3Wrapper = SandboxedModule.require modulePath, requires: @requires
@s3Wrapper.deleteFile @bucketName, @key, (err)=>
opts = @request.args[0][0]
assert.deepEqual(opts.aws, {key:@settings.s3.key, secret:@settings.s3.secret, bucket:@bucketName})
opts.method.should.equal "delete"
opts.timeout.should.equal (30*1000)
opts.uri.should.equal "https://#{@bucketName}.s3.amazonaws.com/#{@key}"
done()
it "should return the error", (done)->
@request = sinon.stub().callsArgWith(1, @error)
@requires["request"] = @request
@s3Wrapper = SandboxedModule.require modulePath, requires: @requires
@s3Wrapper.deleteFile @bucketName, @key, (err)=>
err.should.equal @error
done()
describe "checkIfFileExists", ->
it "should use correct options", (done)->
@request = sinon.stub().callsArgWith(1, null, statusCode:200)
@requires["request"] = @request
@s3Wrapper = SandboxedModule.require modulePath, requires: @requires
@s3Wrapper.checkIfFileExists @bucketName, @key, (err)=>
opts = @request.args[0][0]
assert.deepEqual(opts.aws, {key:@settings.s3.key, secret:@settings.s3.secret, bucket:@bucketName})
opts.method.should.equal "head"
opts.timeout.should.equal (30*1000)
opts.uri.should.equal "https://#{@bucketName}.s3.amazonaws.com/#{@key}"
done()
it "should return true for a 200", (done)->
@request = sinon.stub().callsArgWith(1, null, statusCode:200)
@requires["request"] = @request
@s3Wrapper = SandboxedModule.require modulePath, requires: @requires
@s3Wrapper.checkIfFileExists @bucketName, @key, (err, exists)=>
exists.should.equal true
done()
it "should return false for a non 200", (done)->
@request = sinon.stub().callsArgWith(1, null, statusCode:404)
@requires["request"] = @request
@s3Wrapper = SandboxedModule.require modulePath, requires: @requires
@s3Wrapper.checkIfFileExists @bucketName, @key, (err, exists)=>
exists.should.equal false
done()
it "should return the error", (done)->
@request = sinon.stub().callsArgWith(1, @error, {})
@requires["request"] = @request
@s3Wrapper = SandboxedModule.require modulePath, requires: @requires
@s3Wrapper.checkIfFileExists @bucketName, @key, (err)=>
err.should.equal @error
done()

0
services/filestore/uploads/.gitignore vendored Normal file
View file