From 3199aad601b22e520c57f7f9c642ad0be39063eb Mon Sep 17 00:00:00 2001 From: James Allen Date: Fri, 14 Feb 2014 16:39:05 +0000 Subject: [PATCH 001/555] Initial open source commit --- services/filestore/.gitignore | 61 ++++++ services/filestore/GruntFile.coffee | 64 ++++++ services/filestore/app.coffee | 102 +++++++++ .../app/coffee/FileController.coffee | 59 ++++++ .../filestore/app/coffee/FileConverter.coffee | 49 +++++ .../filestore/app/coffee/FileHandler.coffee | 80 ++++++++ .../app/coffee/ImageOptimiser.coffee | 29 +++ .../filestore/app/coffee/KeyBuilder.coffee | 34 +++ .../app/coffee/LocalFileWriter.coffee | 36 ++++ services/filestore/app/coffee/metrics.coffee | 24 +++ .../filestore/app/coffee/s3Wrapper.coffee | 102 +++++++++ .../config/settings.development.coffee | 22 ++ services/filestore/package.json | 32 +++ .../unit/coffee/FileControllerTests.coffee | 126 ++++++++++++ .../unit/coffee/FileConverterTests.coffee | 73 +++++++ .../test/unit/coffee/FileHandlerTests.coffee | 177 ++++++++++++++++ .../unit/coffee/ImageOptimiserTests.coffee | 60 ++++++ .../test/unit/coffee/KeybuilderTests.coffee | 39 ++++ .../unit/coffee/LocalFileWriterTests.coffee | 59 ++++++ .../test/unit/coffee/s3WrapperTests.coffee | 193 ++++++++++++++++++ services/filestore/uploads/.gitignore | 0 21 files changed, 1421 insertions(+) create mode 100644 services/filestore/.gitignore create mode 100644 services/filestore/GruntFile.coffee create mode 100644 services/filestore/app.coffee create mode 100644 services/filestore/app/coffee/FileController.coffee create mode 100644 services/filestore/app/coffee/FileConverter.coffee create mode 100644 services/filestore/app/coffee/FileHandler.coffee create mode 100644 services/filestore/app/coffee/ImageOptimiser.coffee create mode 100644 services/filestore/app/coffee/KeyBuilder.coffee create mode 100644 services/filestore/app/coffee/LocalFileWriter.coffee create mode 100644 services/filestore/app/coffee/metrics.coffee create mode 100644 services/filestore/app/coffee/s3Wrapper.coffee create mode 100644 services/filestore/config/settings.development.coffee create mode 100644 services/filestore/package.json create mode 100644 services/filestore/test/unit/coffee/FileControllerTests.coffee create mode 100644 services/filestore/test/unit/coffee/FileConverterTests.coffee create mode 100644 services/filestore/test/unit/coffee/FileHandlerTests.coffee create mode 100644 services/filestore/test/unit/coffee/ImageOptimiserTests.coffee create mode 100644 services/filestore/test/unit/coffee/KeybuilderTests.coffee create mode 100644 services/filestore/test/unit/coffee/LocalFileWriterTests.coffee create mode 100644 services/filestore/test/unit/coffee/s3WrapperTests.coffee create mode 100644 services/filestore/uploads/.gitignore diff --git a/services/filestore/.gitignore b/services/filestore/.gitignore new file mode 100644 index 0000000000..6d486a3b2c --- /dev/null +++ b/services/filestore/.gitignore @@ -0,0 +1,61 @@ +compileFolder + +Compiled source # +################### +*.com +*.class +*.dll +*.exe +*.o +*.so + +# Packages # +############ +# it's better to unpack these files and commit the raw source +# git has its own built in compression methods +*.7z +*.dmg +*.gz +*.iso +*.jar +*.rar +*.tar +*.zip + +# Logs and databases # +###################### +*.log +*.sql +*.sqlite + +# OS generated files # +###################### +.DS_Store? +ehthumbs.db +Icon? +Thumbs.db + +/node_modules/* +test/IntergrationTests/js/* +data/*/* + +app.js +app/js/* +test/IntergrationTests/js/* +test/UnitTests/js/* +cookies.txt +uploads/* +public/js/editor.js +public/js/home.js +public/js/forms.js +public/js/gui.js +public/js/admin.js +public/stylesheets/mainStyle.css +public/minjs/ +test/unit/js/ +test/acceptence/js + +**.swp + +/log.json +hash_folder diff --git a/services/filestore/GruntFile.coffee b/services/filestore/GruntFile.coffee new file mode 100644 index 0000000000..27d4adc86c --- /dev/null +++ b/services/filestore/GruntFile.coffee @@ -0,0 +1,64 @@ +module.exports = (grunt) -> + + # Project configuration. + grunt.initConfig + coffee: + server: + expand: true, + flatten: false, + cwd: 'app/coffee', + src: ['**/*.coffee'], + dest: 'app/js/', + ext: '.js' + + app_server: + expand: true, + flatten: false, + src: ['app.coffee'], + dest: './', + ext: '.js' + + server_tests: + expand: true, + flatten: false, + cwd: 'test/unit/coffee', + src: ['*.coffee', '**/*.coffee'], + dest: 'test/unit/js/', + ext: '.js' + + watch: + server_coffee: + files: ['app/*.coffee','app/**/*.coffee', 'test/unit/coffee/**/*.coffee', 'test/unit/coffee/*.coffee', "app.coffee"] + tasks: ["clean", 'coffee', 'mochaTest'] + + clean: ["app/js", "test/unit/js", "app.js"] + + nodemon: + dev: + options: + file: 'app.js' + + concurrent: + dev: + tasks: ['nodemon', 'watch'] + options: + logConcurrentOutput: true + + mochaTest: + test: + options: + reporter: process.env.MOCHA_RUNNER || "spec" + src: ['test/*.js', 'test/**/*.js'] + + grunt.loadNpmTasks 'grunt-contrib-coffee' + grunt.loadNpmTasks 'grunt-contrib-watch' + grunt.loadNpmTasks 'grunt-nodemon' + grunt.loadNpmTasks 'grunt-contrib-clean' + grunt.loadNpmTasks 'grunt-concurrent' + grunt.loadNpmTasks 'grunt-mocha-test' + + grunt.registerTask "ci", ["coffee", "mochaTest"] + grunt.registerTask 'default', ['coffee', 'concurrent'] + + grunt.registerTask "install", "coffee" + diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee new file mode 100644 index 0000000000..70f6bddbbe --- /dev/null +++ b/services/filestore/app.coffee @@ -0,0 +1,102 @@ +express = require('express') +logger = require('logger-sharelatex') +logger.initialize("filestore") +metrics = require("./app/js/metrics") +settings = require("settings-sharelatex") +request = require("request") +fileController = require("./app/js/FileController") +keyBuilder = require("./app/js/KeyBuilder") +domain = require("domain") +appIsOk = true +app = express() +streamBuffers = require("stream-buffers") + + +app.configure -> + app.use express.bodyParser() + +app.configure 'development', -> + console.log "Development Enviroment" + app.use express.errorHandler({ dumpExceptions: true, showStack: true }) + +app.configure 'production', -> + console.log "Production Enviroment" + app.use express.logger() + app.use express.errorHandler() + +metrics.inc "startup" + +app.use (req, res, next)-> + metrics.inc "http-request" + next() + +app.use (req, res, next) -> + requestDomain = domain.create() + requestDomain.add req + requestDomain.add res + requestDomain.on "error", (err)-> + res.send 500 + logger = require('logger-sharelatex') + req = + body:req.body + headers:req.headers + url:req.url + key: req.key + statusCode: req.statusCode + err.domainEmitter.res = "to big to log" + logger.err err:err, req:req, res:res, "uncaught exception thrown on request" + appIsOk = false + exit = -> + console.log "exit" + process.exit(1) + setTimeout exit, 20000 + requestDomain.run next + +app.get "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.getFile +app.post "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.insertFile + +app.put "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.copyFile +app.del "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.deleteFile + +app.get "/template/:template_id/v/:version/:format", keyBuilder.templateFileKey, fileController.getFile +app.post "/template/:template_id/v/:version/:format", keyBuilder.templateFileKey, fileController.insertFile + +app.post "/shutdown", (req, res)-> + appIsOk = false + res.send() + +app.get '/status', (req, res)-> + if appIsOk + res.send('filestore sharelatex up') + else + logger.log "app is not ok - shutting down" + res.send("server is being shut down", 500) + +app.get "/health_check", (req, res)-> + req.params.project_id = settings.health_check.project_id + req.params.file_id = settings.health_check.file_id + myWritableStreamBuffer = new streamBuffers.WritableStreamBuffer(initialSize: 100) + keyBuilder.userFileKey req, res, -> + fileController.getFile req, myWritableStreamBuffer + myWritableStreamBuffer.on "close", -> + if myWritableStreamBuffer.size() > 0 + res.send(200) + else + res.send(503) + + + +app.get '*', (req, res)-> + res.send 404 + +serverDomain = domain.create() +serverDomain.run -> + server = require('http').createServer(app) + port = settings.internal.filestore.port or 3009 + host = settings.internal.filestore.host or "localhost" + server.listen port, host, -> + logger.log("filestore store listening on #{host}:#{port}") + +serverDomain.on "error", (err)-> + logger.log err:err, "top level uncaught exception" + diff --git a/services/filestore/app/coffee/FileController.coffee b/services/filestore/app/coffee/FileController.coffee new file mode 100644 index 0000000000..83c02334e3 --- /dev/null +++ b/services/filestore/app/coffee/FileController.coffee @@ -0,0 +1,59 @@ +s3Wrapper = require("./s3Wrapper") +settings = require("settings-sharelatex") +logger = require("logger-sharelatex") +FileHandler = require("./FileHandler") +LocalFileWriter = require("./LocalFileWriter") +metrics = require("./metrics") +oneDayInSeconds = 60 * 60 * 24 + +module.exports = + + getFile: (req, res)-> + metrics.inc "getFile" + {key, bucket} = req + {format, style} = req.query + logger.log key:key, bucket:bucket, format:format, style:style, "reciving request to get file" + FileHandler.getFile bucket, key, {format:format,style:style}, (err, fileStream)-> + if err? + logger.err err:err, key:key, bucket:bucket, format:format, style:style, "problem getting file" + res.send 500 + else if req.params.cacheWarm + logger.log key:key, bucket:bucket, format:format, style:style, "request is only for cache warm so not sending stream" + res.send 200 + else + logger.log key:key, bucket:bucket, format:format, style:style, "sending file to response" + fileStream.pipe res + + insertFile: (req, res)-> + metrics.inc "insertFile" + {key, bucket} = req + logger.log key:key, bucket:bucket, "reciving request to insert file" + FileHandler.insertFile bucket, key, req, (err)-> + res.send 200 + + copyFile: (req, res)-> + metrics.inc "copyFile" + {key, bucket} = req + oldProject_id = req.body.source.project_id + oldFile_id = req.body.source.file_id + logger.log key:key, bucket:bucket, oldProject_id:oldProject_id, oldFile_id:oldFile_id, "reciving request to copy file" + s3Wrapper.copyFile bucket, "#{oldProject_id}/#{oldFile_id}", key, (err)-> + if err? + logger.log err:err, oldProject_id:oldProject_id, oldFile_id:oldFile_id, "something went wrong copying file in s3Wrapper" + res.send 500 + else + res.send 200 + + deleteFile: (req, res)-> + metrics.inc "deleteFile" + {key, bucket} = req + logger.log key:key, bucket:bucket, "reciving request to delete file" + FileHandler.deleteFile bucket, key, (err)-> + if err? + logger.log err:err, key:key, bucket:bucket, "something went wrong deleting file in s3Wrapper" + res.send 500 + else + res.send 204 + + + diff --git a/services/filestore/app/coffee/FileConverter.coffee b/services/filestore/app/coffee/FileConverter.coffee new file mode 100644 index 0000000000..11f405361c --- /dev/null +++ b/services/filestore/app/coffee/FileConverter.coffee @@ -0,0 +1,49 @@ +easyimage = require("easyimage") +_ = require("underscore") +metrics = require("./metrics") +logger = require("logger-sharelatex") + +approvedFormats = ["png"] + +module.exports = + + convert: (sourcePath, requestedFormat, callback)-> + logger.log sourcePath:sourcePath, requestedFormat:requestedFormat, "converting file format" + timer = new metrics.Timer("imageConvert") + destPath = "#{sourcePath}.#{requestedFormat}" + sourcePath = "#{sourcePath}[0]" + if !_.include approvedFormats, requestedFormat + err = new Error("invalid format requested") + return callback err + args = + src: sourcePath + dst: destPath + easyimage.convert args, (err)-> + timer.done() + callback(err, destPath) + + thumbnail: (sourcePath, callback)-> + logger.log sourcePath:sourcePath, "thumbnail convert file" + destPath = "#{sourcePath}.png" + sourcePath = "#{sourcePath}[0]" + args = + src: sourcePath + dst: destPath + width: 424 + height: 300 + args = "convert -flatten -background white -resize 300x -density 300 #{sourcePath} #{destPath}" + easyimage.exec args, (err)-> + callback(err, destPath) + + preview: (sourcePath, callback)-> + logger.log sourcePath:sourcePath, "preview convert file" + destPath = "#{sourcePath}.png" + sourcePath = "#{sourcePath}[0]" + args = + src: sourcePath + dst: destPath + width: 600 + height: 849 + args = "convert -flatten -background white -resize 600x -density 300 #{sourcePath} #{destPath}" + easyimage.exec args, (err)-> + callback(err, destPath) diff --git a/services/filestore/app/coffee/FileHandler.coffee b/services/filestore/app/coffee/FileHandler.coffee new file mode 100644 index 0000000000..6d647fdac5 --- /dev/null +++ b/services/filestore/app/coffee/FileHandler.coffee @@ -0,0 +1,80 @@ +settings = require("settings-sharelatex") +s3Wrapper = require("./s3Wrapper") +LocalFileWriter = require("./LocalFileWriter") +logger = require("logger-sharelatex") +FileConverter = require("./FileConverter") +KeyBuilder = require("./KeyBuilder") +async = require("async") +ImageOptimiser = require("./ImageOptimiser") + + +module.exports = + + insertFile: (bucket, key, stream, callback)-> + convetedKey = KeyBuilder.getConvertedFolderKey(key) + s3Wrapper.deleteDirectory bucket, convetedKey, -> + s3Wrapper.sendStreamToS3 bucket, key, stream, -> + callback() + + deleteFile: (bucket, key, callback)-> + convetedKey = KeyBuilder.getConvertedFolderKey(bucket, key) + async.parallel [ + (done)-> s3Wrapper.deleteFile bucket, key, done + (done)-> s3Wrapper.deleteFile bucket, convetedKey, done + ], callback + + getFile: (bucket, key, opts = {}, callback)-> + logger.log bucket:bucket, key:key, opts:opts, "getting file" + if !opts.format? and !opts.style? + @_getStandardFile bucket, key, opts, callback + else + @_getConvertedFile bucket, key, opts, callback + + _getStandardFile: (bucket, key, opts, callback)-> + s3Wrapper.getFileStream bucket, key, (err, fileStream)-> + if err? + logger.err bucket:bucket, key:key, opts:opts, "error getting fileStream" + callback err, fileStream + + _getConvertedFile: (bucket, key, opts, callback)-> + convetedKey = KeyBuilder.addCachingToKey(key, opts) + s3Wrapper.checkIfFileExists bucket, convetedKey, (err, exists)=> + if exists + s3Wrapper.getFileStream bucket, convetedKey, callback + else + @_getConvertedFileAndCache bucket, key, convetedKey, opts, callback + + _getConvertedFileAndCache: (bucket, key, convetedKey, opts, callback)-> + @_convertFile bucket, key, opts, (err, fsPath)-> + if err? + logger.err err:err, fsPath:fsPath, bucket:bucket, key:key, opts:opts, "something went wrong with converting file" + return callback(err) + ImageOptimiser.compressPng fsPath, (err)-> + if err? + logger.err err:err, fsPath:fsPath, bucket:bucket, key:key, opts:opts, "something went wrong optimising png file" + return callback(err) + s3Wrapper.sendFileToS3 bucket, convetedKey, fsPath, (err)-> + if err? + logger.err err:err, bucket:bucket, key:key, convetedKey:convetedKey, opts:opts, "something went wrong seing file to s3" + return callback(err) + s3Wrapper.getFileStream bucket, convetedKey, callback + + _convertFile: (bucket, origonalKey, opts, callback)-> + @_writeS3FileToDisk bucket, origonalKey, (err, origonalFsPath)-> + if opts.format? + FileConverter.convert origonalFsPath, opts.format, callback + else if opts.style == "thumbnail" + FileConverter.thumbnail origonalFsPath, callback + else if opts.style == "preview" + FileConverter.preview origonalFsPath, callback + else + throw new Error("should have specified opts to convert file with #{JSON.stringify(opts)}") + + + _writeS3FileToDisk: (bucket, key, callback)-> + s3Wrapper.getFileStream bucket, key, (err, fileStream)-> + LocalFileWriter.writeStream fileStream, key, callback + + + + diff --git a/services/filestore/app/coffee/ImageOptimiser.coffee b/services/filestore/app/coffee/ImageOptimiser.coffee new file mode 100644 index 0000000000..8d5a9b8714 --- /dev/null +++ b/services/filestore/app/coffee/ImageOptimiser.coffee @@ -0,0 +1,29 @@ +PngCrush = require('pngcrush') +fs = require("fs") +logger = require("logger-sharelatex") + + +module.exports = + + compressPng: (localPath, callback)-> + optimisedPath = "#{localPath}-optimised" + startTime = new Date() + logger.log localPath:localPath, optimisedPath:optimisedPath, "optimising png path" + readStream = fs.createReadStream(localPath) + writeStream = fs.createWriteStream(optimisedPath) + readStream.on "error", (err)-> + logger.err err:err, localPath:localPath, "something went wrong getting read stream for compressPng" + callback(err) + writeStream.on "error", (err)-> + logger.err err:err, localPath:localPath, "something went wrong getting write stream for compressPng" + callback(err) + myCrusher = new PngCrush() + myCrusher.on "error", (err)-> + logger.err err:err, localPath:localPath, "error compressing file" + callback err + readStream.pipe(myCrusher).pipe(writeStream) + writeStream.on "finish", -> + timeTaken = new Date() - startTime + logger.log localPath:localPath, timeTaken:timeTaken, "finished converting file" + fs.rename optimisedPath, localPath, callback + diff --git a/services/filestore/app/coffee/KeyBuilder.coffee b/services/filestore/app/coffee/KeyBuilder.coffee new file mode 100644 index 0000000000..113c0eac57 --- /dev/null +++ b/services/filestore/app/coffee/KeyBuilder.coffee @@ -0,0 +1,34 @@ +settings = require("settings-sharelatex") + +module.exports = + + + getConvertedFolderKey: (key)-> + key = "#{key}-converted-cache/" + + addCachingToKey: (key, opts)-> + key = @getConvertedFolderKey(key) + if opts.format? and !opts.style? + key = "#{key}format-#{opts.format}" + if opts.style? and !opts.format? + key = "#{key}style-#{opts.style}" + if opts.style? and opts.format? + key = "#{key}format-#{opts.format}-style-#{opts.style}" + return key + + + userFileKey: (req, res, next)-> + {project_id, file_id} = req.params + req.key = "#{project_id}/#{file_id}" + req.bucket = settings.s3.buckets.user_files + next() + + templateFileKey: (req, res, next)-> + {template_id, format, version} = req.params + req.key = "#{template_id}/#{version}/#{format}" + req.bucket = settings.s3.buckets.template_files + req.version = version + opts = req.query + next() + + \ No newline at end of file diff --git a/services/filestore/app/coffee/LocalFileWriter.coffee b/services/filestore/app/coffee/LocalFileWriter.coffee new file mode 100644 index 0000000000..3cf1cf27da --- /dev/null +++ b/services/filestore/app/coffee/LocalFileWriter.coffee @@ -0,0 +1,36 @@ +fs = require("fs") +uuid = require('node-uuid') +path = require("path") +_ = require("underscore") +logger = require("logger-sharelatex") +metrics = require("./metrics") + +module.exports = + + writeStream: (stream, key, callback)-> + + timer = new metrics.Timer("writingFile") + callback = _.once callback + fsPath = @_getPath(key) + logger.log fsPath:fsPath, "writing file locally" + writeStream = fs.createWriteStream(fsPath) + stream.pipe writeStream + writeStream.on "finish", -> + timer.done() + logger.log fsPath:fsPath, "finished writing file locally" + callback(null, fsPath) + writeStream.on "error", (err)-> + logger.err err:err, fsPath:fsPath, "problem writing file locally, with write stream" + callback err + stream.on "error", (err)-> + logger.log err:err, fsPath:fsPath, "problem writing file locally, with read stream" + callback err + + deleteFile: (fsPath, callback)-> + fs.unlink fsPath, callback + + _getPath : (key)-> + if !key? + key = uuid.v1() + key = key.replace(/\//g,"-") + path.join(__dirname, "../../uploads/#{key}") diff --git a/services/filestore/app/coffee/metrics.coffee b/services/filestore/app/coffee/metrics.coffee new file mode 100644 index 0000000000..cd5c7ab215 --- /dev/null +++ b/services/filestore/app/coffee/metrics.coffee @@ -0,0 +1,24 @@ +StatsD = require('lynx') +settings = require('settings-sharelatex') +statsd = new StatsD('localhost', 8125, {on_error:->}) + +buildKey = (key)-> "filestore.#{process.env.NODE_ENV}.#{key}" + +module.exports = + set : (key, value, sampleRate = 1)-> + statsd.set buildKey(key), value, sampleRate + + inc : (key, sampleRate = 1)-> + statsd.increment buildKey(key), sampleRate + + Timer : class + constructor :(key, sampleRate = 1)-> + this.start = new Date() + this.key = buildKey(key) + done:-> + timeSpan = new Date - this.start + statsd.timing(this.key, timeSpan, this.sampleRate) + + gauge : (key, value, sampleRate = 1)-> + statsd.gauge key, value, sampleRate + diff --git a/services/filestore/app/coffee/s3Wrapper.coffee b/services/filestore/app/coffee/s3Wrapper.coffee new file mode 100644 index 0000000000..0a6f5a3cf5 --- /dev/null +++ b/services/filestore/app/coffee/s3Wrapper.coffee @@ -0,0 +1,102 @@ +settings = require("settings-sharelatex") +request = require("request") +logger = require("logger-sharelatex") +fs = require("fs") +knox = require("knox") +path = require("path") +LocalFileWriter = require("./LocalFileWriter") +_ = require("underscore") + + +thirtySeconds = 30 * 1000 + +buildDefaultOptions = (bucketName, method, key)-> + return { + aws: + key: settings.s3.key + secret: settings.s3.secret + bucket: bucketName + method: method + timeout: thirtySeconds + uri:"https://#{bucketName}.s3.amazonaws.com/#{key}" + } + +module.exports = + + sendFileToS3: (bucketName, key, fsPath, callback)-> + s3Client = knox.createClient + key: settings.s3.key + secret: settings.s3.secret + bucket: bucketName + putEventEmiter = s3Client.putFile fsPath, key, (err, res)-> + if err? + logger.err err:err, bucketName:bucketName, key:key, fsPath:fsPath,"something went wrong uploading file to s3" + return callback(err) + if res.statusCode != 200 + logger.err bucketName:bucketName, key:key, fsPath:fsPath, "non 200 response from s3 putting file" + return callback("non 200 response from s3 on put file") + LocalFileWriter.deleteFile fsPath, (err)-> + logger.log res:res, bucketName:bucketName, key:key, fsPath:fsPath,"file uploaded to s3" + callback(err) + putEventEmiter.on "error", (err)-> + logger.err err:err, bucketName:bucketName, key:key, fsPath:fsPath, "error emmited on put of file" + callback err + + + sendStreamToS3: (bucketName, key, readStream, callback)-> + logger.log bucketName:bucketName, key:key, "sending file to s3" + readStream.on "error", (err)-> + logger.err bucketName:bucketName, key:key, "error on stream to send to s3" + LocalFileWriter.writeStream readStream, null, (err, fsPath)=> + if err? + logger.err bucketName:bucketName, key:key, fsPath:fsPath, err:err, "something went wrong writing stream to disk" + return callback(err) + @sendFileToS3 bucketName, key, fsPath, callback + + getFileStream: (bucketName, key, callback)-> + logger.log bucketName:bucketName, key:key, "getting file from s3" + options = buildDefaultOptions(bucketName, "get", key) + readStream = request(options) + readStream.on "error", (err)-> + logger.err bucketName:bucketName, key:key, "error getting file stream from s3" + callback null, readStream + + copyFile: (bucketName, sourceKey, destKey, callback)-> + logger.log bucketName:bucketName, sourceKey:sourceKey, destKey:destKey, "copying file in s3" + s3Client = knox.createClient + key: settings.s3.key + secret: settings.s3.secret + bucket: bucketName + s3Client.copyFile sourceKey, destKey, (err)-> + if err? + logger.err bucketName:bucketName, sourceKey:sourceKey, destKey:destKey, "something went wrong copying file in aws" + callback(err) + + deleteFile: (bucketName, key, callback)-> + logger.log bucketName:bucketName, key:key, "delete file in s3" + options = buildDefaultOptions(bucketName, "delete", key) + request options, (err, res)-> + if err? + logger.err res:res, bucketName:bucketName, key:key, "something went wrong deleting file in aws" + callback(err) + + deleteDirectory: (bucketName, key, callback)-> + s3Client = knox.createClient + key: settings.s3.key + secret: settings.s3.secret + bucket: bucketName + s3Client.list prefix:key, (err, data)-> + keys = _.map data.Contents, (entry)-> + return entry.Key + s3Client.deleteMultiple keys, callback + + checkIfFileExists:(bucketName, key, callback)-> + logger.log bucketName:bucketName, key:key, "checking if file exists in s3" + options = buildDefaultOptions(bucketName, "head", key) + request options, (err, res)-> + if err? + logger.err res:res, bucketName:bucketName, key:key, "something went wrong copying file in aws" + exists = res.statusCode == 200 + logger.log bucketName:bucketName, key:key, exists:exists, "checked if file exsists in s3" + callback(err, exists) + diff --git a/services/filestore/config/settings.development.coffee b/services/filestore/config/settings.development.coffee new file mode 100644 index 0000000000..2fa32e622e --- /dev/null +++ b/services/filestore/config/settings.development.coffee @@ -0,0 +1,22 @@ +module.exports = + internal: + filestore: + port: 3009 + host: "localhost" + + # ShareLaTeX stores binary files like images in S3. + # Fill in your Amazon S3 credentials below. + s3: + key: '' + secret: '' + buckets: + user_files: "" + template_files: "" + + + # Filestore health check + # ---------------------- + # Project and file details to check in filestore when calling /health_check + # health_check: + # project_id: "" + # file_id: "" diff --git a/services/filestore/package.json b/services/filestore/package.json new file mode 100644 index 0000000000..7658d613ee --- /dev/null +++ b/services/filestore/package.json @@ -0,0 +1,32 @@ +{ + "name": "filestore-sharelatex", + "version": "0.0.1", + "dependencies": { + "settings": "git+ssh://git@bitbucket.org:sharelatex/settings-sharelatex.git#master", + "logger": "git+ssh://git@bitbucket.org:sharelatex/logger-sharelatex.git#bunyan", + "request": "2.14.0", + "lynx": "0.0.11", + "grunt-mocha-test": "~0.8.2", + "knox": "~0.8.8", + "node-uuid": "~1.4.1", + "underscore": "~1.5.2", + "easyimage": "~0.1.6", + "express": "~3.4.8", + "longjohn": "~0.2.2", + "async": "~0.2.10", + "pngcrush": "0.0.3", + "stream-buffers": "~0.2.5" + }, + "devDependencies": { + "sinon": "", + "chai": "", + "sandboxed-module": "", + "grunt": "0.4.1", + "grunt-contrib-requirejs": "0.4.1", + "grunt-contrib-coffee": "0.7.0", + "grunt-contrib-watch": "0.5.3", + "grunt-nodemon": "0.1.2", + "grunt-contrib-clean": "0.5.0", + "grunt-concurrent": "0.4.2" + } +} diff --git a/services/filestore/test/unit/coffee/FileControllerTests.coffee b/services/filestore/test/unit/coffee/FileControllerTests.coffee new file mode 100644 index 0000000000..29bc9d8205 --- /dev/null +++ b/services/filestore/test/unit/coffee/FileControllerTests.coffee @@ -0,0 +1,126 @@ +assert = require("chai").assert +sinon = require('sinon') +chai = require('chai') +should = chai.should() +expect = chai.expect +modulePath = "../../../app/js/FileController.js" +SandboxedModule = require('sandboxed-module') + +describe "FileController", -> + + beforeEach -> + @s3Wrapper = + sendStreamToS3: sinon.stub() + getAndPipe: sinon.stub() + copyFile: sinon.stub() + deleteFile:sinon.stub() + + @settings = + s3: + buckets: + user_files:"user_files" + @FileHandler = + getFile: sinon.stub() + deleteFile: sinon.stub() + insertFile: sinon.stub() + @LocalFileWriter = {} + @controller = SandboxedModule.require modulePath, requires: + "./LocalFileWriter":@LocalFileWriter + "./FileHandler": @FileHandler + "./s3Wrapper":@s3Wrapper + "settings-sharelatex": @settings + "logger-sharelatex": + log:-> + err:-> + @project_id = "project_id" + @file_id = "file_id" + @bucket = "user_files" + @key = "#{@project_id}/#{@file_id}" + @req = + key:@key + bucket:@bucket + query:{} + params: + project_id:@project_id + file_id:@file_id + @res = + setHeader: -> + @fileStream = {} + + describe "getFile", -> + + it "should pipe the stream", (done)-> + @FileHandler.getFile.callsArgWith(3, null, @fileStream) + @fileStream.pipe = (res)=> + res.should.equal @res + done() + @controller.getFile @req, @res + + it "should send a 200 if the cacheWarm param is true", (done)-> + @req.params.cacheWarm = true + @FileHandler.getFile.callsArgWith(3, null, @fileStream) + @res.send = (statusCode)=> + statusCode.should.equal 200 + done() + @controller.getFile @req, @res + + it "should send a 500 if there is a problem", (done)-> + @FileHandler.getFile.callsArgWith(3, "error") + @res.send = (code)=> + code.should.equal 500 + done() + @controller.getFile @req, @res + + + describe "insertFile", -> + + it "should send bucket name key and res to s3Wrapper", (done)-> + @FileHandler.insertFile.callsArgWith(3) + @res.send = => + @FileHandler.insertFile.calledWith(@bucket, @key, @req).should.equal true + done() + @controller.insertFile @req, @res + + + describe "copyFile", -> + beforeEach -> + @oldFile_id = "old_file_id" + @oldProject_id = "old_project_id" + @req.body = + source: + project_id: @oldProject_id + file_id: @oldFile_id + + it "should send bucket name and both keys to s3Wrapper", (done)-> + @s3Wrapper.copyFile.callsArgWith(3) + @res.send = (code)=> + code.should.equal 200 + @s3Wrapper.copyFile.calledWith(@bucket, "#{@oldProject_id}/#{@oldFile_id}", @key).should.equal true + done() + @controller.copyFile @req, @res + + + it "should send a 500 if there was an error", (done)-> + @s3Wrapper.copyFile.callsArgWith(3, "error") + @res.send = (code)=> + code.should.equal 500 + done() + @controller.copyFile @req, @res + + + describe "delete file", -> + + it "should tell the file handler", (done)-> + @FileHandler.deleteFile.callsArgWith(2) + @res.send = (code)=> + code.should.equal 204 + @FileHandler.deleteFile.calledWith(@bucket, @key).should.equal true + done() + @controller.deleteFile @req, @res + + it "should send a 500 if there was an error", (done)-> + @FileHandler.deleteFile.callsArgWith(2, "error") + @res.send = (code)-> + code.should.equal 500 + done() + @controller.deleteFile @req, @res diff --git a/services/filestore/test/unit/coffee/FileConverterTests.coffee b/services/filestore/test/unit/coffee/FileConverterTests.coffee new file mode 100644 index 0000000000..bfdec86813 --- /dev/null +++ b/services/filestore/test/unit/coffee/FileConverterTests.coffee @@ -0,0 +1,73 @@ +assert = require("chai").assert +sinon = require('sinon') +chai = require('chai') +should = chai.should() +expect = chai.expect +modulePath = "../../../app/js/FileConverter.js" +SandboxedModule = require('sandboxed-module') + +describe "FileConverter", -> + + beforeEach -> + + @easyimage = + convert:sinon.stub() + exec: sinon.stub() + @converter = SandboxedModule.require modulePath, requires: + "easyimage":@easyimage + "logger-sharelatex": + log:-> + err:-> + + @sourcePath = "/this/path/here.eps" + @format = "png" + @error = "Error" + + describe "convert", -> + + it "should convert the source to the requested format", (done)-> + @easyimage.convert.callsArgWith(1) + @converter.convert @sourcePath, @format, (err)=> + args = @easyimage.convert.args[0][0] + args.src.should.equal @sourcePath+"[0]" + args.dst.should.equal "#{@sourcePath}.#{@format}" + done() + + + it "should return the dest path", (done)-> + @easyimage.convert.callsArgWith(1) + @converter.convert @sourcePath, @format, (err, destPath)=> + destPath.should.equal "#{@sourcePath}.#{@format}" + done() + + it "should return the error from convert", (done)-> + @easyimage.convert.callsArgWith(1, @error) + @converter.convert @sourcePath, @format, (err)=> + err.should.equal @error + done() + + it "should not accapt an non aproved format", (done)-> + @easyimage.convert.callsArgWith(1) + @converter.convert @sourcePath, "ahhhhh", (err)=> + expect(err).to.exist + done() + + + describe "thumbnail", -> + it "should call easy image resize with args", (done)-> + @easyimage.exec.callsArgWith(1) + @converter.thumbnail @sourcePath, (err)=> + args = @easyimage.exec.args[0][0] + args.indexOf(@sourcePath).should.not.equal -1 + done() + + it "should compress the png", ()-> + + + describe "preview", -> + it "should call easy image resize with args", (done)-> + @easyimage.exec.callsArgWith(1) + @converter.preview @sourcePath, (err)=> + args = @easyimage.exec.args[0][0] + args.indexOf(@sourcePath).should.not.equal -1 + done() diff --git a/services/filestore/test/unit/coffee/FileHandlerTests.coffee b/services/filestore/test/unit/coffee/FileHandlerTests.coffee new file mode 100644 index 0000000000..0b2ea034fe --- /dev/null +++ b/services/filestore/test/unit/coffee/FileHandlerTests.coffee @@ -0,0 +1,177 @@ + +assert = require("chai").assert +sinon = require('sinon') +chai = require('chai') +should = chai.should() +expect = chai.expect +modulePath = "../../../app/js/FileHandler.js" +SandboxedModule = require('sandboxed-module') + +describe "FileHandler", -> + + beforeEach -> + @settings = + s3: + buckets: + user_files:"user_files" + @s3Wrapper = + getFileStream: sinon.stub() + checkIfFileExists: sinon.stub() + deleteFile: sinon.stub() + deleteDirectory: sinon.stub() + sendStreamToS3: sinon.stub() + insertFile: sinon.stub() + @LocalFileWriter = + writeStream: sinon.stub() + @FileConverter = + convert: sinon.stub() + thumbnail: sinon.stub() + preview: sinon.stub() + @keyBuilder = + addCachingToKey: sinon.stub() + getConvertedFolderKey: sinon.stub() + @ImageOptimiser = + compressPng: sinon.stub() + @handler = SandboxedModule.require modulePath, requires: + "settings-sharelatex": @settings + "./s3Wrapper":@s3Wrapper + "./LocalFileWriter":@LocalFileWriter + "./FileConverter":@FileConverter + "./KeyBuilder": @keyBuilder + "./ImageOptimiser":@ImageOptimiser + "logger-sharelatex": + log:-> + err:-> + @bucket = "my_bucket" + @key = "key/here" + @stubbedPath = "/var/somewhere/path" + @format = "png" + @formattedStubbedPath = "#{@stubbedPath}.#{@format}" + + describe "insertFile", -> + beforeEach -> + @stream = {} + @s3Wrapper.deleteDirectory.callsArgWith(2) + @s3Wrapper.sendStreamToS3.callsArgWith(3) + + it "should send file to s3", (done)-> + @handler.insertFile @bucket, @key, @stream, => + @s3Wrapper.sendStreamToS3.calledWith(@bucket, @key, @stream).should.equal true + done() + + it "should delete the convetedKey folder", (done)-> + @keyBuilder.getConvertedFolderKey.returns(@stubbedConvetedKey) + @handler.insertFile @bucket, @key, @stream, => + @s3Wrapper.deleteDirectory.calledWith(@bucket, @stubbedConvetedKey).should.equal true + done() + + describe "deleteFile", -> + beforeEach -> + @keyBuilder.getConvertedFolderKey.returns(@stubbedConvetedKey) + @s3Wrapper.deleteFile.callsArgWith(2) + + it "should tell the s3 wrapper to delete the file", (done)-> + @handler.deleteFile @bucket, @key, => + @s3Wrapper.deleteFile.calledWith(@bucket, @key).should.equal true + done() + + it "should tell the s3 wrapper to delete the cached foler", (done)-> + @handler.deleteFile @bucket, @key, => + @s3Wrapper.deleteFile.calledWith(@bucket, @stubbedConvetedKey).should.equal true + done() + + describe "getFile", -> + beforeEach -> + @handler._getStandardFile = sinon.stub().callsArgWith(3) + @handler._getConvertedFile = sinon.stub().callsArgWith(3) + + it "should call _getStandardFile if no format or style are defined", (done)-> + + @handler.getFile @bucket, @key, null, => + @handler._getStandardFile.called.should.equal true + @handler._getConvertedFile.called.should.equal false + done() + + it "should call _getConvertedFile if a format is defined", (done)-> + @handler.getFile @bucket, @key, format:"png", => + @handler._getStandardFile.called.should.equal false + @handler._getConvertedFile.called.should.equal true + done() + + + describe "_getStandardFile", -> + + beforeEach -> + @fileStream = {on:->} + @s3Wrapper.getFileStream.callsArgWith(2, "err", @fileStream) + + it "should get the stream from s3 ", (done)-> + @handler.getFile @bucket, @key, null, => + @s3Wrapper.getFileStream.calledWith(@bucket, @key).should.equal true + done() + + it "should return the stream and error", (done)-> + @handler.getFile @bucket, @key, null, (err, stream)=> + err.should.equal "err" + stream.should.equal @fileStream + done() + + describe "_getConvertedFile", -> + + it "should getFileStream if it does exists", (done)-> + @s3Wrapper.checkIfFileExists.callsArgWith(2, null, true) + @s3Wrapper.getFileStream.callsArgWith(2) + @handler._getConvertedFile @bucket, @key, {}, => + @s3Wrapper.getFileStream.calledWith(@bucket).should.equal true + done() + + it "should call _getConvertedFileAndCache if it does exists", (done)-> + @s3Wrapper.checkIfFileExists.callsArgWith(2, null, false) + @handler._getConvertedFileAndCache = sinon.stub().callsArgWith(4) + @handler._getConvertedFile @bucket, @key, {}, => + @handler._getConvertedFileAndCache.calledWith(@bucket, @key).should.equal true + done() + + describe "_getConvertedFileAndCache", -> + + it "should _convertFile ", (done)-> + @s3Wrapper.sendFileToS3 = sinon.stub().callsArgWith(3) + @s3Wrapper.getFileStream = sinon.stub().callsArgWith(2) + @convetedKey = @key+"converted" + @handler._convertFile = sinon.stub().callsArgWith(3, null, @stubbedPath) + @ImageOptimiser.compressPng = sinon.stub().callsArgWith(1) + @handler._getConvertedFileAndCache @bucket, @key, @convetedKey, {}, => + @handler._convertFile.called.should.equal true + @s3Wrapper.sendFileToS3.calledWith(@bucket, @convetedKey, @stubbedPath).should.equal true + @s3Wrapper.getFileStream.calledWith(@bucket, @convetedKey).should.equal true + @ImageOptimiser.compressPng.calledWith(@stubbedPath).should.equal true + done() + + describe "_convertFile", -> + beforeEach -> + @FileConverter.convert.callsArgWith(2, null, @formattedStubbedPath) + @FileConverter.thumbnail.callsArgWith(1, null, @formattedStubbedPath) + @FileConverter.preview.callsArgWith(1, null, @formattedStubbedPath) + @handler._writeS3FileToDisk = sinon.stub().callsArgWith(2, null, @stubbedPath) + + it "should call thumbnail on the writer path if style was thumbnail was specified", (done)-> + @handler._convertFile @bucket, @key, style:"thumbnail", (err, path)=> + path.should.equal @formattedStubbedPath + @FileConverter.thumbnail.calledWith(@stubbedPath).should.equal true + done() + + it "should call preview on the writer path if style was preview was specified", (done)-> + @handler._convertFile @bucket, @key, style:"preview", (err, path)=> + path.should.equal @formattedStubbedPath + @FileConverter.preview.calledWith(@stubbedPath).should.equal true + done() + + it "should call convert on the writer path if a format was specified", (done)-> + @handler._convertFile @bucket, @key, format:@format, (err, path)=> + path.should.equal @formattedStubbedPath + @FileConverter.convert.calledWith(@stubbedPath, @format).should.equal true + done() + + + + \ No newline at end of file diff --git a/services/filestore/test/unit/coffee/ImageOptimiserTests.coffee b/services/filestore/test/unit/coffee/ImageOptimiserTests.coffee new file mode 100644 index 0000000000..4742d42840 --- /dev/null +++ b/services/filestore/test/unit/coffee/ImageOptimiserTests.coffee @@ -0,0 +1,60 @@ +assert = require("chai").assert +sinon = require('sinon') +chai = require('chai') +should = chai.should() +expect = chai.expect +modulePath = "../../../app/js/ImageOptimiser.js" +SandboxedModule = require('sandboxed-module') + +describe "ImageOptimiser", -> + + beforeEach -> + + @fs = + createReadStream:sinon.stub() + createWriteStream:sinon.stub() + rename:sinon.stub() + @pngcrush = class PngCrush + pipe:-> + on: -> + + @optimiser = SandboxedModule.require modulePath, requires: + "fs":@fs + "pngcrush":@pngcrush + "logger-sharelatex": + log:-> + err:-> + + @sourcePath = "/this/path/here.eps" + @writeStream = + pipe:-> + on: (type, cb)-> + if type == "finish" + cb() + @sourceStream = + pipe:-> + return pipe:-> + on:-> + @error = "Error" + + describe "compressPng", -> + + beforeEach -> + @fs.createReadStream.returns(@sourceStream) + @fs.createWriteStream.returns(@writeStream) + @fs.rename.callsArgWith(2) + + it "should get the file stream", (done)-> + @optimiser.compressPng @sourcePath, (err)=> + @fs.createReadStream.calledWith(@sourcePath).should.equal true + done() + + it "should create a compressed file stream", (done)-> + @optimiser.compressPng @sourcePath, (err)=> + @fs.createWriteStream.calledWith("#{@sourcePath}-optimised") + done() + + it "should rename the file after completion", (done)-> + @optimiser.compressPng @sourcePath, (err)=> + @fs.rename.calledWith("#{@sourcePath}-optimised", @sourcePath).should.equal true + done() \ No newline at end of file diff --git a/services/filestore/test/unit/coffee/KeybuilderTests.coffee b/services/filestore/test/unit/coffee/KeybuilderTests.coffee new file mode 100644 index 0000000000..3ab2dd037a --- /dev/null +++ b/services/filestore/test/unit/coffee/KeybuilderTests.coffee @@ -0,0 +1,39 @@ + +assert = require("chai").assert +sinon = require('sinon') +chai = require('chai') +should = chai.should() +expect = chai.expect +modulePath = "../../../app/js/KeyBuilder.js" +SandboxedModule = require('sandboxed-module') + +describe "LocalFileWriter", -> + + beforeEach -> + + @keyBuilder = SandboxedModule.require modulePath, requires: + "logger-sharelatex": + log:-> + err:-> + @key = "123/456" + + describe "cachedKey", -> + + it "should add the fomat on", -> + opts = + format: "png" + newKey = @keyBuilder.addCachingToKey @key, opts + newKey.should.equal "#{@key}-converted-cache/format-png" + + it "should add the style on", -> + opts = + style: "thumbnail" + newKey = @keyBuilder.addCachingToKey @key, opts + newKey.should.equal "#{@key}-converted-cache/style-thumbnail" + + it "should add format on first", -> + opts = + style: "thumbnail" + format: "png" + newKey = @keyBuilder.addCachingToKey @key, opts + newKey.should.equal "#{@key}-converted-cache/format-png-style-thumbnail" diff --git a/services/filestore/test/unit/coffee/LocalFileWriterTests.coffee b/services/filestore/test/unit/coffee/LocalFileWriterTests.coffee new file mode 100644 index 0000000000..b8b443a040 --- /dev/null +++ b/services/filestore/test/unit/coffee/LocalFileWriterTests.coffee @@ -0,0 +1,59 @@ + +assert = require("chai").assert +sinon = require('sinon') +chai = require('chai') +should = chai.should() +expect = chai.expect +modulePath = "../../../app/js/LocalFileWriter.js" +SandboxedModule = require('sandboxed-module') + +describe "LocalFileWriter", -> + + beforeEach -> + + @writeStream = + on: (type, cb)-> + if type == "finish" + cb() + @fs = + createWriteStream : sinon.stub().returns(@writeStream) + unlink: sinon.stub() + @writer = SandboxedModule.require modulePath, requires: + "fs": @fs + "logger-sharelatex": + log:-> + err:-> + @stubbedFsPath = "something/uploads/eio2k1j3" + + describe "writeStrem", -> + beforeEach -> + @writer._getPath = sinon.stub().returns(@stubbedFsPath) + + it "write the stream to ./uploads", (done)-> + stream = + pipe: (dest)=> + dest.should.equal @writeStream + done() + on: -> + @writer.writeStream stream, null, ()=> + + it "should send the path in the callback", (done)-> + stream = + pipe: (dest)=> + on: (type, cb)-> + if type == "end" + cb() + @writer.writeStream stream, null, (err, fsPath)=> + fsPath.should.equal @stubbedFsPath + done() + + describe "delete file", -> + + it "should unlink the file", (done)-> + error = "my error" + @fs.unlink.callsArgWith(1, error) + @writer.deleteFile @stubbedFsPath, (err)=> + @fs.unlink.calledWith(@stubbedFsPath).should.equal true + err.should.equal error + done() + diff --git a/services/filestore/test/unit/coffee/s3WrapperTests.coffee b/services/filestore/test/unit/coffee/s3WrapperTests.coffee new file mode 100644 index 0000000000..3de9c92dba --- /dev/null +++ b/services/filestore/test/unit/coffee/s3WrapperTests.coffee @@ -0,0 +1,193 @@ +assert = require("chai").assert +sinon = require('sinon') +chai = require('chai') +should = chai.should() +expect = chai.expect +modulePath = "../../../app/js/s3Wrapper.js" +SandboxedModule = require('sandboxed-module') + +describe "s3WrapperTests", -> + + beforeEach -> + @settings = + s3: + secret: "secret" + key: "this_key" + buckets: + user_files:"sl_user_files" + @stubbedKnoxClient = + putFile:sinon.stub() + copyFile:sinon.stub() + list: sinon.stub() + deleteMultiple: sinon.stub() + @knox = + createClient: sinon.stub().returns(@stubbedKnoxClient) + @LocalFileWriter = + writeStream: sinon.stub() + deleteFile: sinon.stub() + @requires = + "knox": @knox + "settings-sharelatex": @settings + "./LocalFileWriter":@LocalFileWriter + "logger-sharelatex": + log:-> + err:-> + @key = "my/key" + @bucketName = "my-bucket" + @error = "my errror" + + describe "Pipe to dest", -> + + it "should use correct options", (done)-> + + stubbedReadStream = {on:->} + dest = {my:"object"} + @request = (opts)=> + return stubbedReadStream + @requires["request"] = @request + @s3Wrapper = SandboxedModule.require modulePath, requires: @requires + @s3Wrapper.getFileStream @bucketName, @key, (err, readStream)-> + readStream.should.equal stubbedReadStream + done() + + describe "sendFileToS3", -> + + beforeEach -> + @s3Wrapper = SandboxedModule.require modulePath, requires: @requires + @stubbedKnoxClient.putFile.returns on:-> + + it "should put file with knox", (done)-> + @LocalFileWriter.deleteFile.callsArgWith(1) + @stubbedKnoxClient.putFile.callsArgWith(2, @error) + @s3Wrapper.sendFileToS3 @bucketName, @key, @fsPath, (err)=> + @stubbedKnoxClient.putFile.calledWith(@fsPath, @key).should.equal true + err.should.equal @error + done() + + it "should delete the file and pass the error with it", (done)-> + @LocalFileWriter.deleteFile.callsArgWith(1) + @stubbedKnoxClient.putFile.callsArgWith(2, @error) + @s3Wrapper.sendFileToS3 @bucketName, @key, @fsPath, (err)=> + @stubbedKnoxClient.putFile.calledWith(@fsPath, @key).should.equal true + err.should.equal @error + done() + + describe "sendStreamToS3", -> + beforeEach -> + @fsPath = "to/some/where" + @origin = + on:-> + @s3Wrapper = SandboxedModule.require modulePath, requires: @requires + @s3Wrapper.sendFileToS3 = sinon.stub().callsArgWith(3) + + it "should send stream to LocalFileWriter", (done)-> + @LocalFileWriter.deleteFile.callsArgWith(1) + @LocalFileWriter.writeStream.callsArgWith(2, null, @fsPath) + @s3Wrapper.sendStreamToS3 @bucketName, @key, @origin, => + @LocalFileWriter.writeStream.calledWith(@origin).should.equal true + done() + + it "should return the error from LocalFileWriter", (done)-> + @LocalFileWriter.deleteFile.callsArgWith(1) + @LocalFileWriter.writeStream.callsArgWith(2, @error) + @s3Wrapper.sendStreamToS3 @bucketName, @key, @origin, (err)=> + err.should.equal @error + done() + + it "should send the file to s3", (done)-> + @LocalFileWriter.deleteFile.callsArgWith(1) + @LocalFileWriter.writeStream.callsArgWith(2) + @s3Wrapper.sendStreamToS3 @bucketName, @key, @origin, (err)=> + @s3Wrapper.sendFileToS3.called.should.equal true + done() + + describe "copyFile", -> + beforeEach -> + @sourceKey = "my/key" + @destKey = "my/dest/key" + @s3Wrapper = SandboxedModule.require modulePath, requires: @requires + + it "should use knox to copy file", (done)-> + @stubbedKnoxClient.copyFile.callsArgWith(2, @error) + @s3Wrapper.copyFile @bucketName, @sourceKey, @destKey, (err)=> + err.should.equal @error + @stubbedKnoxClient.copyFile.calledWith(@sourceKey, @destKey).should.equal true + done() + + describe "deleteDirectory", -> + + beforeEach -> + @s3Wrapper = SandboxedModule.require modulePath, requires: @requires + + it "should list the contents passing them onto multi delete", (done)-> + data = + Contents: [{Key:"1234"}, {Key: "456"}] + @stubbedKnoxClient.list.callsArgWith(1, null, data) + @stubbedKnoxClient.deleteMultiple.callsArgWith(1) + @s3Wrapper.deleteDirectory @bucketName, @key, (err)=> + @stubbedKnoxClient.deleteMultiple.calledWith(["1234","456"]).should.equal true + done() + + describe "deleteFile", -> + + it "should use correct options", (done)-> + @request = sinon.stub().callsArgWith(1) + @requires["request"] = @request + @s3Wrapper = SandboxedModule.require modulePath, requires: @requires + + @s3Wrapper.deleteFile @bucketName, @key, (err)=> + opts = @request.args[0][0] + assert.deepEqual(opts.aws, {key:@settings.s3.key, secret:@settings.s3.secret, bucket:@bucketName}) + opts.method.should.equal "delete" + opts.timeout.should.equal (30*1000) + opts.uri.should.equal "https://#{@bucketName}.s3.amazonaws.com/#{@key}" + done() + + it "should return the error", (done)-> + @request = sinon.stub().callsArgWith(1, @error) + @requires["request"] = @request + @s3Wrapper = SandboxedModule.require modulePath, requires: @requires + + @s3Wrapper.deleteFile @bucketName, @key, (err)=> + err.should.equal @error + done() + + describe "checkIfFileExists", -> + + it "should use correct options", (done)-> + @request = sinon.stub().callsArgWith(1, null, statusCode:200) + @requires["request"] = @request + @s3Wrapper = SandboxedModule.require modulePath, requires: @requires + + @s3Wrapper.checkIfFileExists @bucketName, @key, (err)=> + opts = @request.args[0][0] + assert.deepEqual(opts.aws, {key:@settings.s3.key, secret:@settings.s3.secret, bucket:@bucketName}) + opts.method.should.equal "head" + opts.timeout.should.equal (30*1000) + opts.uri.should.equal "https://#{@bucketName}.s3.amazonaws.com/#{@key}" + done() + + it "should return true for a 200", (done)-> + @request = sinon.stub().callsArgWith(1, null, statusCode:200) + @requires["request"] = @request + @s3Wrapper = SandboxedModule.require modulePath, requires: @requires + @s3Wrapper.checkIfFileExists @bucketName, @key, (err, exists)=> + exists.should.equal true + done() + + it "should return false for a non 200", (done)-> + @request = sinon.stub().callsArgWith(1, null, statusCode:404) + @requires["request"] = @request + @s3Wrapper = SandboxedModule.require modulePath, requires: @requires + @s3Wrapper.checkIfFileExists @bucketName, @key, (err, exists)=> + exists.should.equal false + done() + + it "should return the error", (done)-> + @request = sinon.stub().callsArgWith(1, @error, {}) + @requires["request"] = @request + @s3Wrapper = SandboxedModule.require modulePath, requires: @requires + + @s3Wrapper.checkIfFileExists @bucketName, @key, (err)=> + err.should.equal @error + done() \ No newline at end of file diff --git a/services/filestore/uploads/.gitignore b/services/filestore/uploads/.gitignore new file mode 100644 index 0000000000..e69de29bb2 From fb02e4042393bf40c4fe6c6078afbdbcaaeb235f Mon Sep 17 00:00:00 2001 From: James Allen Date: Fri, 14 Feb 2014 16:46:08 +0000 Subject: [PATCH 002/555] Standardize grunt file --- .../{GruntFile.coffee => Gruntfile.coffee} | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) rename services/filestore/{GruntFile.coffee => Gruntfile.coffee} (77%) diff --git a/services/filestore/GruntFile.coffee b/services/filestore/Gruntfile.coffee similarity index 77% rename from services/filestore/GruntFile.coffee rename to services/filestore/Gruntfile.coffee index 27d4adc86c..6534dedc09 100644 --- a/services/filestore/GruntFile.coffee +++ b/services/filestore/Gruntfile.coffee @@ -45,10 +45,12 @@ module.exports = (grunt) -> logConcurrentOutput: true mochaTest: - test: + unit: + src: ["test/unit/js/#{grunt.option('feature') or '**'}/*.js"] options: - reporter: process.env.MOCHA_RUNNER || "spec" - src: ['test/*.js', 'test/**/*.js'] + reporter: grunt.option('reporter') or 'spec' + grep: grunt.option("grep") + grunt.loadNpmTasks 'grunt-contrib-coffee' grunt.loadNpmTasks 'grunt-contrib-watch' @@ -57,8 +59,10 @@ module.exports = (grunt) -> grunt.loadNpmTasks 'grunt-concurrent' grunt.loadNpmTasks 'grunt-mocha-test' - grunt.registerTask "ci", ["coffee", "mochaTest"] + grunt.registerTask "test:unit", ["coffee", "mochaTest:unit"] + grunt.registerTask "ci", "test:unit" grunt.registerTask 'default', ['coffee', 'concurrent'] - grunt.registerTask "install", "coffee" + grunt.registerTask "compile", "coffee" + grunt.registerTask "install", "compile" From 110e623a0d9d214b87cd26bfb8758bb97544baf4 Mon Sep 17 00:00:00 2001 From: James Allen Date: Mon, 17 Feb 2014 15:09:53 +0000 Subject: [PATCH 003/555] Point npm modules at github --- services/filestore/package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/filestore/package.json b/services/filestore/package.json index 7658d613ee..02a7656538 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -2,8 +2,8 @@ "name": "filestore-sharelatex", "version": "0.0.1", "dependencies": { - "settings": "git+ssh://git@bitbucket.org:sharelatex/settings-sharelatex.git#master", - "logger": "git+ssh://git@bitbucket.org:sharelatex/logger-sharelatex.git#bunyan", + "settings": "git+ssh://git@github.com:sharelatex/settings-sharelatex.git#master", + "logger": "git+ssh://git@github.com:sharelatex/logger-sharelatex.git#master", "request": "2.14.0", "lynx": "0.0.11", "grunt-mocha-test": "~0.8.2", From 7501046ebca1e4265ec576d0a6fc6d8932e35220 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Mon, 17 Feb 2014 16:31:33 +0000 Subject: [PATCH 004/555] improved logging --- services/filestore/app/coffee/LocalFileWriter.coffee | 1 - services/filestore/app/coffee/s3Wrapper.coffee | 11 +++++++---- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/services/filestore/app/coffee/LocalFileWriter.coffee b/services/filestore/app/coffee/LocalFileWriter.coffee index 3cf1cf27da..8bb8a6bc97 100644 --- a/services/filestore/app/coffee/LocalFileWriter.coffee +++ b/services/filestore/app/coffee/LocalFileWriter.coffee @@ -8,7 +8,6 @@ metrics = require("./metrics") module.exports = writeStream: (stream, key, callback)-> - timer = new metrics.Timer("writingFile") callback = _.once callback fsPath = @_getPath(key) diff --git a/services/filestore/app/coffee/s3Wrapper.coffee b/services/filestore/app/coffee/s3Wrapper.coffee index 0a6f5a3cf5..d9ef3dea55 100644 --- a/services/filestore/app/coffee/s3Wrapper.coffee +++ b/services/filestore/app/coffee/s3Wrapper.coffee @@ -32,6 +32,9 @@ module.exports = if err? logger.err err:err, bucketName:bucketName, key:key, fsPath:fsPath,"something went wrong uploading file to s3" return callback(err) + if !res? + logger.err err:err, res:res, bucketName:bucketName, key:key, fsPath:fsPath, "no response from s3 put file" + callback("no response from put file") if res.statusCode != 200 logger.err bucketName:bucketName, key:key, fsPath:fsPath, "non 200 response from s3 putting file" return callback("non 200 response from s3 on put file") @@ -58,7 +61,7 @@ module.exports = options = buildDefaultOptions(bucketName, "get", key) readStream = request(options) readStream.on "error", (err)-> - logger.err bucketName:bucketName, key:key, "error getting file stream from s3" + logger.err err:err, bucketName:bucketName, key:key, "error getting file stream from s3" callback null, readStream copyFile: (bucketName, sourceKey, destKey, callback)-> @@ -69,7 +72,7 @@ module.exports = bucket: bucketName s3Client.copyFile sourceKey, destKey, (err)-> if err? - logger.err bucketName:bucketName, sourceKey:sourceKey, destKey:destKey, "something went wrong copying file in aws" + logger.err err:err, bucketName:bucketName, sourceKey:sourceKey, destKey:destKey, "something went wrong copying file in aws" callback(err) deleteFile: (bucketName, key, callback)-> @@ -77,7 +80,7 @@ module.exports = options = buildDefaultOptions(bucketName, "delete", key) request options, (err, res)-> if err? - logger.err res:res, bucketName:bucketName, key:key, "something went wrong deleting file in aws" + logger.err err:err, res:res, bucketName:bucketName, key:key, "something went wrong deleting file in aws" callback(err) deleteDirectory: (bucketName, key, callback)-> @@ -95,7 +98,7 @@ module.exports = options = buildDefaultOptions(bucketName, "head", key) request options, (err, res)-> if err? - logger.err res:res, bucketName:bucketName, key:key, "something went wrong copying file in aws" + logger.err err:err, res:res, bucketName:bucketName, key:key, "something went wrong copying file in aws" exists = res.statusCode == 200 logger.log bucketName:bucketName, key:key, exists:exists, "checked if file exsists in s3" callback(err, exists) From 5b49ec11441c6172e67aa2387e7eac36555efe6c Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Mon, 17 Feb 2014 16:32:28 +0000 Subject: [PATCH 005/555] don't exit on error --- services/filestore/app.coffee | 5 ----- 1 file changed, 5 deletions(-) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index 70f6bddbbe..63b7f510ef 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -45,11 +45,6 @@ app.use (req, res, next) -> statusCode: req.statusCode err.domainEmitter.res = "to big to log" logger.err err:err, req:req, res:res, "uncaught exception thrown on request" - appIsOk = false - exit = -> - console.log "exit" - process.exit(1) - setTimeout exit, 20000 requestDomain.run next app.get "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.getFile From 1ff5a5adb827110722c7de7e33841dc22ebfcc7c Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Mon, 17 Feb 2014 18:50:28 +0000 Subject: [PATCH 006/555] changed error log message --- services/filestore/app.coffee | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index 63b7f510ef..fc6cce6ede 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -43,7 +43,12 @@ app.use (req, res, next) -> url:req.url key: req.key statusCode: req.statusCode - err.domainEmitter.res = "to big to log" + err = + message: err.message + stack: err.stack + name: err.name + type: err.type + arguments: err.arguments logger.err err:err, req:req, res:res, "uncaught exception thrown on request" requestDomain.run next From f99a6dc39b3017642c3369a643a2827da5249372 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Tue, 18 Feb 2014 17:43:21 +0000 Subject: [PATCH 007/555] changed file converted to use child process with nice --- .../filestore/app/coffee/FileConverter.coffee | 16 +++++----- .../filestore/app/coffee/s3Wrapper.coffee | 2 ++ .../config/settings.development.coffee | 1 - services/filestore/package.json | 4 +-- .../unit/coffee/FileConverterTests.coffee | 30 +++++++++---------- 5 files changed, 25 insertions(+), 28 deletions(-) diff --git a/services/filestore/app/coffee/FileConverter.coffee b/services/filestore/app/coffee/FileConverter.coffee index 11f405361c..63672c3872 100644 --- a/services/filestore/app/coffee/FileConverter.coffee +++ b/services/filestore/app/coffee/FileConverter.coffee @@ -2,7 +2,7 @@ easyimage = require("easyimage") _ = require("underscore") metrics = require("./metrics") logger = require("logger-sharelatex") - +exec = require('child_process').exec approvedFormats = ["png"] module.exports = @@ -15,10 +15,8 @@ module.exports = if !_.include approvedFormats, requestedFormat err = new Error("invalid format requested") return callback err - args = - src: sourcePath - dst: destPath - easyimage.convert args, (err)-> + args = "nice convert -flatten -density 300 #{sourcePath} #{destPath}" + exec args, (err, stdout, stderr)-> timer.done() callback(err, destPath) @@ -31,8 +29,8 @@ module.exports = dst: destPath width: 424 height: 300 - args = "convert -flatten -background white -resize 300x -density 300 #{sourcePath} #{destPath}" - easyimage.exec args, (err)-> + args = "nice convert -flatten -background white -resize 300x -density 300 #{sourcePath} #{destPath}" + exec args, (err, stdout, stderr)-> callback(err, destPath) preview: (sourcePath, callback)-> @@ -44,6 +42,6 @@ module.exports = dst: destPath width: 600 height: 849 - args = "convert -flatten -background white -resize 600x -density 300 #{sourcePath} #{destPath}" - easyimage.exec args, (err)-> + args = "nice convert -flatten -background white -resize 600x -density 300 #{sourcePath} #{destPath}" + exec args, (err, stdout, stderr)-> callback(err, destPath) diff --git a/services/filestore/app/coffee/s3Wrapper.coffee b/services/filestore/app/coffee/s3Wrapper.coffee index d9ef3dea55..bff18f90c7 100644 --- a/services/filestore/app/coffee/s3Wrapper.coffee +++ b/services/filestore/app/coffee/s3Wrapper.coffee @@ -99,6 +99,8 @@ module.exports = request options, (err, res)-> if err? logger.err err:err, res:res, bucketName:bucketName, key:key, "something went wrong copying file in aws" + if !res? + logger.err err:err, res:res, bucketName:bucketName, key:key, "no response object returned when checking if file exists" exists = res.statusCode == 200 logger.log bucketName:bucketName, key:key, exists:exists, "checked if file exsists in s3" callback(err, exists) diff --git a/services/filestore/config/settings.development.coffee b/services/filestore/config/settings.development.coffee index 2fa32e622e..3f390837d8 100644 --- a/services/filestore/config/settings.development.coffee +++ b/services/filestore/config/settings.development.coffee @@ -13,7 +13,6 @@ module.exports = user_files: "" template_files: "" - # Filestore health check # ---------------------- # Project and file details to check in filestore when calling /health_check diff --git a/services/filestore/package.json b/services/filestore/package.json index 02a7656538..a10bf95b87 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -10,12 +10,12 @@ "knox": "~0.8.8", "node-uuid": "~1.4.1", "underscore": "~1.5.2", - "easyimage": "~0.1.6", "express": "~3.4.8", "longjohn": "~0.2.2", "async": "~0.2.10", "pngcrush": "0.0.3", - "stream-buffers": "~0.2.5" + "stream-buffers": "~0.2.5", + "node-transloadit": "0.0.4" }, "devDependencies": { "sinon": "", diff --git a/services/filestore/test/unit/coffee/FileConverterTests.coffee b/services/filestore/test/unit/coffee/FileConverterTests.coffee index bfdec86813..41f9d5090d 100644 --- a/services/filestore/test/unit/coffee/FileConverterTests.coffee +++ b/services/filestore/test/unit/coffee/FileConverterTests.coffee @@ -13,8 +13,11 @@ describe "FileConverter", -> @easyimage = convert:sinon.stub() exec: sinon.stub() + @child_process = + exec : sinon.stub() @converter = SandboxedModule.require modulePath, requires: "easyimage":@easyimage + 'child_process': @child_process "logger-sharelatex": log:-> err:-> @@ -26,48 +29,43 @@ describe "FileConverter", -> describe "convert", -> it "should convert the source to the requested format", (done)-> - @easyimage.convert.callsArgWith(1) + @child_process.exec.callsArgWith(1) @converter.convert @sourcePath, @format, (err)=> - args = @easyimage.convert.args[0][0] - args.src.should.equal @sourcePath+"[0]" - args.dst.should.equal "#{@sourcePath}.#{@format}" + args = @child_process.exec.args[0][0] + args.indexOf(@sourcePath).should.not.equal -1 + args.indexOf(@format).should.not.equal -1 done() - it "should return the dest path", (done)-> - @easyimage.convert.callsArgWith(1) + @child_process.exec.callsArgWith(1) @converter.convert @sourcePath, @format, (err, destPath)=> destPath.should.equal "#{@sourcePath}.#{@format}" done() it "should return the error from convert", (done)-> - @easyimage.convert.callsArgWith(1, @error) + @child_process.exec.callsArgWith(1, @error) @converter.convert @sourcePath, @format, (err)=> err.should.equal @error done() it "should not accapt an non aproved format", (done)-> - @easyimage.convert.callsArgWith(1) + @child_process.exec.callsArgWith(1) @converter.convert @sourcePath, "ahhhhh", (err)=> expect(err).to.exist done() - describe "thumbnail", -> it "should call easy image resize with args", (done)-> - @easyimage.exec.callsArgWith(1) + @child_process.exec.callsArgWith(1) @converter.thumbnail @sourcePath, (err)=> - args = @easyimage.exec.args[0][0] + args = @child_process.exec.args[0][0] args.indexOf(@sourcePath).should.not.equal -1 done() - it "should compress the png", ()-> - - describe "preview", -> it "should call easy image resize with args", (done)-> - @easyimage.exec.callsArgWith(1) + @child_process.exec.callsArgWith(1) @converter.preview @sourcePath, (err)=> - args = @easyimage.exec.args[0][0] + args = @child_process.exec.args[0][0] args.indexOf(@sourcePath).should.not.equal -1 done() From 5d86dfacd7d09c4fc64ec92da8017d105167514c Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 18 Feb 2014 17:24:17 +0000 Subject: [PATCH 008/555] Create LICENSE --- services/filestore/LICENSE | 662 +++++++++++++++++++++++++++++++++++++ 1 file changed, 662 insertions(+) create mode 100644 services/filestore/LICENSE diff --git a/services/filestore/LICENSE b/services/filestore/LICENSE new file mode 100644 index 0000000000..ac8619dcb9 --- /dev/null +++ b/services/filestore/LICENSE @@ -0,0 +1,662 @@ + + GNU AFFERO GENERAL PUBLIC LICENSE + Version 3, 19 November 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +our General Public Licenses are intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + Developers that use our General Public Licenses protect your rights +with two steps: (1) assert copyright on the software, and (2) offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. + + A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. + + The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. + + An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU Affero General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Remote Network Interaction; Use with the GNU General Public License. + + Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU Affero General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU Affero General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU Affero General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If your software can interact with users remotely through a computer +network, you should also make sure that it provides a way for users to +get its source. For example, if your program is a web application, its +interface could display a "Source" link that leads users to an archive +of the code. There are many ways you could offer source, and different +solutions will be better for different programs; see section 13 for the +specific requirements. + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU AGPL, see +. From f77906e01e8be24471638832aa01c13f6b5097ba Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 18 Feb 2014 17:24:40 +0000 Subject: [PATCH 009/555] Create README.md --- services/filestore/README.md | 11 +++++++++++ 1 file changed, 11 insertions(+) create mode 100644 services/filestore/README.md diff --git a/services/filestore/README.md b/services/filestore/README.md new file mode 100644 index 0000000000..f6ad20d6af --- /dev/null +++ b/services/filestore/README.md @@ -0,0 +1,11 @@ +filestore-sharelatex +==================== + +An API for CRUD operations on binary files stored in S3 + +License +------- + +The code in this repository is released under the GNU AFFERO GENERAL PUBLIC LICENSE, version 3. A copy can be found in the `LICENSE` file. + +Copyright (c) ShareLaTeX, 2014. From f63cdb35154b22cc0a30955bd3c665aa4aa8543a Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Wed, 19 Feb 2014 12:11:50 +0000 Subject: [PATCH 010/555] changed thumbnail and preview to be smaller --- services/filestore/app/coffee/FileConverter.coffee | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/services/filestore/app/coffee/FileConverter.coffee b/services/filestore/app/coffee/FileConverter.coffee index 63672c3872..ce62d32743 100644 --- a/services/filestore/app/coffee/FileConverter.coffee +++ b/services/filestore/app/coffee/FileConverter.coffee @@ -1,4 +1,3 @@ -easyimage = require("easyimage") _ = require("underscore") metrics = require("./metrics") logger = require("logger-sharelatex") @@ -29,7 +28,7 @@ module.exports = dst: destPath width: 424 height: 300 - args = "nice convert -flatten -background white -resize 300x -density 300 #{sourcePath} #{destPath}" + args = "nice convert -flatten -background white -resize 260x -density 300 #{sourcePath} #{destPath}" exec args, (err, stdout, stderr)-> callback(err, destPath) @@ -42,6 +41,6 @@ module.exports = dst: destPath width: 600 height: 849 - args = "nice convert -flatten -background white -resize 600x -density 300 #{sourcePath} #{destPath}" + args = "nice convert -flatten -background white -resize 548x -density 300 #{sourcePath} #{destPath}" exec args, (err, stdout, stderr)-> callback(err, destPath) From d0600a4b64c152eaad12ec85e047e686f458ce50 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Wed, 19 Feb 2014 13:02:53 +0000 Subject: [PATCH 011/555] changed optimisation to optipng and added timeouts to conversions --- .../filestore/app/coffee/FileConverter.coffee | 14 +++++-- .../app/coffee/ImageOptimiser.coffee | 28 ++++--------- .../unit/coffee/FileConverterTests.coffee | 12 +++--- .../unit/coffee/ImageOptimiserTests.coffee | 41 +++++-------------- 4 files changed, 34 insertions(+), 61 deletions(-) diff --git a/services/filestore/app/coffee/FileConverter.coffee b/services/filestore/app/coffee/FileConverter.coffee index ce62d32743..94323c0677 100644 --- a/services/filestore/app/coffee/FileConverter.coffee +++ b/services/filestore/app/coffee/FileConverter.coffee @@ -4,6 +4,8 @@ logger = require("logger-sharelatex") exec = require('child_process').exec approvedFormats = ["png"] +twoMinsInMs = 2 * (60 * 1000) + module.exports = convert: (sourcePath, requestedFormat, callback)-> @@ -15,7 +17,9 @@ module.exports = err = new Error("invalid format requested") return callback err args = "nice convert -flatten -density 300 #{sourcePath} #{destPath}" - exec args, (err, stdout, stderr)-> + opts = + timeout: twoMinsInMs + exec args, opts, (err, stdout, stderr)-> timer.done() callback(err, destPath) @@ -29,7 +33,9 @@ module.exports = width: 424 height: 300 args = "nice convert -flatten -background white -resize 260x -density 300 #{sourcePath} #{destPath}" - exec args, (err, stdout, stderr)-> + opts = + timeout: twoMinsInMs + exec args, opts,(err, stdout, stderr)-> callback(err, destPath) preview: (sourcePath, callback)-> @@ -42,5 +48,7 @@ module.exports = width: 600 height: 849 args = "nice convert -flatten -background white -resize 548x -density 300 #{sourcePath} #{destPath}" - exec args, (err, stdout, stderr)-> + opts = + timeout: twoMinsInMs + exec args, opts,(err, stdout, stderr)-> callback(err, destPath) diff --git a/services/filestore/app/coffee/ImageOptimiser.coffee b/services/filestore/app/coffee/ImageOptimiser.coffee index 8d5a9b8714..4c804cedc1 100644 --- a/services/filestore/app/coffee/ImageOptimiser.coffee +++ b/services/filestore/app/coffee/ImageOptimiser.coffee @@ -1,29 +1,15 @@ -PngCrush = require('pngcrush') -fs = require("fs") +exec = require('child_process').exec logger = require("logger-sharelatex") module.exports = compressPng: (localPath, callback)-> - optimisedPath = "#{localPath}-optimised" startTime = new Date() - logger.log localPath:localPath, optimisedPath:optimisedPath, "optimising png path" - readStream = fs.createReadStream(localPath) - writeStream = fs.createWriteStream(optimisedPath) - readStream.on "error", (err)-> - logger.err err:err, localPath:localPath, "something went wrong getting read stream for compressPng" - callback(err) - writeStream.on "error", (err)-> - logger.err err:err, localPath:localPath, "something went wrong getting write stream for compressPng" - callback(err) - myCrusher = new PngCrush() - myCrusher.on "error", (err)-> - logger.err err:err, localPath:localPath, "error compressing file" - callback err - readStream.pipe(myCrusher).pipe(writeStream) - writeStream.on "finish", -> - timeTaken = new Date() - startTime - logger.log localPath:localPath, timeTaken:timeTaken, "finished converting file" - fs.rename optimisedPath, localPath, callback + logger.log localPath:localPath, "optimising png path" + args = "optipng #{localPath}" + opts = + timeout: 60 * 1000 + exec args, opts, callback + diff --git a/services/filestore/test/unit/coffee/FileConverterTests.coffee b/services/filestore/test/unit/coffee/FileConverterTests.coffee index 41f9d5090d..b56cb55eb2 100644 --- a/services/filestore/test/unit/coffee/FileConverterTests.coffee +++ b/services/filestore/test/unit/coffee/FileConverterTests.coffee @@ -29,7 +29,7 @@ describe "FileConverter", -> describe "convert", -> it "should convert the source to the requested format", (done)-> - @child_process.exec.callsArgWith(1) + @child_process.exec.callsArgWith(2) @converter.convert @sourcePath, @format, (err)=> args = @child_process.exec.args[0][0] args.indexOf(@sourcePath).should.not.equal -1 @@ -37,26 +37,26 @@ describe "FileConverter", -> done() it "should return the dest path", (done)-> - @child_process.exec.callsArgWith(1) + @child_process.exec.callsArgWith(2) @converter.convert @sourcePath, @format, (err, destPath)=> destPath.should.equal "#{@sourcePath}.#{@format}" done() it "should return the error from convert", (done)-> - @child_process.exec.callsArgWith(1, @error) + @child_process.exec.callsArgWith(2, @error) @converter.convert @sourcePath, @format, (err)=> err.should.equal @error done() it "should not accapt an non aproved format", (done)-> - @child_process.exec.callsArgWith(1) + @child_process.exec.callsArgWith(2) @converter.convert @sourcePath, "ahhhhh", (err)=> expect(err).to.exist done() describe "thumbnail", -> it "should call easy image resize with args", (done)-> - @child_process.exec.callsArgWith(1) + @child_process.exec.callsArgWith(2) @converter.thumbnail @sourcePath, (err)=> args = @child_process.exec.args[0][0] args.indexOf(@sourcePath).should.not.equal -1 @@ -64,7 +64,7 @@ describe "FileConverter", -> describe "preview", -> it "should call easy image resize with args", (done)-> - @child_process.exec.callsArgWith(1) + @child_process.exec.callsArgWith(2) @converter.preview @sourcePath, (err)=> args = @child_process.exec.args[0][0] args.indexOf(@sourcePath).should.not.equal -1 diff --git a/services/filestore/test/unit/coffee/ImageOptimiserTests.coffee b/services/filestore/test/unit/coffee/ImageOptimiserTests.coffee index 4742d42840..80ca0c1d66 100644 --- a/services/filestore/test/unit/coffee/ImageOptimiserTests.coffee +++ b/services/filestore/test/unit/coffee/ImageOptimiserTests.coffee @@ -9,52 +9,31 @@ SandboxedModule = require('sandboxed-module') describe "ImageOptimiser", -> beforeEach -> - - @fs = - createReadStream:sinon.stub() - createWriteStream:sinon.stub() - rename:sinon.stub() - @pngcrush = class PngCrush - pipe:-> - on: -> + @child_process = + exec : sinon.stub() @optimiser = SandboxedModule.require modulePath, requires: - "fs":@fs - "pngcrush":@pngcrush + 'child_process': @child_process "logger-sharelatex": log:-> err:-> @sourcePath = "/this/path/here.eps" - @writeStream = - pipe:-> - on: (type, cb)-> - if type == "finish" - cb() - @sourceStream = - pipe:-> - return pipe:-> - on:-> @error = "Error" describe "compressPng", -> - beforeEach -> - @fs.createReadStream.returns(@sourceStream) - @fs.createWriteStream.returns(@writeStream) - @fs.rename.callsArgWith(2) - it "should get the file stream", (done)-> + it "convert the file", (done)-> + @child_process.exec.callsArgWith(2) @optimiser.compressPng @sourcePath, (err)=> - @fs.createReadStream.calledWith(@sourcePath).should.equal true + args = @child_process.exec.args[0][0] + args.should.equal "optipng #{@sourcePath}" done() - it "should create a compressed file stream", (done)-> - @optimiser.compressPng @sourcePath, (err)=> - @fs.createWriteStream.calledWith("#{@sourcePath}-optimised") - done() - it "should rename the file after completion", (done)-> + it "should return the errro the file", (done)-> + @child_process.exec.callsArgWith(2, @error) @optimiser.compressPng @sourcePath, (err)=> - @fs.rename.calledWith("#{@sourcePath}-optimised", @sourcePath).should.equal true + err.should.equal @error done() \ No newline at end of file From 531b83a032a74bf7d1525960e2bfa8d9495b62a2 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Wed, 19 Feb 2014 13:32:41 +0000 Subject: [PATCH 012/555] added more logging --- services/filestore/app/coffee/FileConverter.coffee | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/services/filestore/app/coffee/FileConverter.coffee b/services/filestore/app/coffee/FileConverter.coffee index 94323c0677..67de922c9e 100644 --- a/services/filestore/app/coffee/FileConverter.coffee +++ b/services/filestore/app/coffee/FileConverter.coffee @@ -21,6 +21,10 @@ module.exports = timeout: twoMinsInMs exec args, opts, (err, stdout, stderr)-> timer.done() + if err? + logger.err err:err, stderr:stderr, sourcePath:sourcePath, requestedFormat:requestedFormat, "something went wrong converting file" + else + logger.log sourcePath:sourcePath, requestedFormat:requestedFormat, "finished converting file" callback(err, destPath) thumbnail: (sourcePath, callback)-> @@ -36,6 +40,10 @@ module.exports = opts = timeout: twoMinsInMs exec args, opts,(err, stdout, stderr)-> + if err? + logger.err err:err, stderr:stderr, sourcePath:sourcePath, "something went wrong converting file to preview" + else + logger.log sourcePath:sourcePath, "finished thumbnailing file" callback(err, destPath) preview: (sourcePath, callback)-> @@ -51,4 +59,8 @@ module.exports = opts = timeout: twoMinsInMs exec args, opts,(err, stdout, stderr)-> + if err? + logger.err err:err, stderr:stderr, sourcePath:sourcePath, "something went wrong converting file to preview" + else + logger.log sourcePath:sourcePath, "finished converting file to preview" callback(err, destPath) From b85017759ee15c1ea6bf4efea967678d27dd1a7d Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Wed, 19 Feb 2014 14:01:53 +0000 Subject: [PATCH 013/555] added more logging --- services/filestore/app/coffee/ImageOptimiser.coffee | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/services/filestore/app/coffee/ImageOptimiser.coffee b/services/filestore/app/coffee/ImageOptimiser.coffee index 4c804cedc1..4e4947d986 100644 --- a/services/filestore/app/coffee/ImageOptimiser.coffee +++ b/services/filestore/app/coffee/ImageOptimiser.coffee @@ -10,6 +10,11 @@ module.exports = args = "optipng #{localPath}" opts = timeout: 60 * 1000 - exec args, opts, callback + exec args, opts,(err, stdout, stderr)-> + if err? + logger.err err:err, stderr:stderr, localPath:localPath, "something went wrong converting compressPng" + else + logger.log localPath:localPath, "finished compressPng file" + callback(err) From 960b46dda00c4dacb5dc2218970fc7e27f088bb9 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Wed, 19 Feb 2014 14:13:40 +0000 Subject: [PATCH 014/555] increased timeout to 4 mins.... --- services/filestore/app/coffee/FileConverter.coffee | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/services/filestore/app/coffee/FileConverter.coffee b/services/filestore/app/coffee/FileConverter.coffee index 67de922c9e..bb19fbaa16 100644 --- a/services/filestore/app/coffee/FileConverter.coffee +++ b/services/filestore/app/coffee/FileConverter.coffee @@ -4,7 +4,7 @@ logger = require("logger-sharelatex") exec = require('child_process').exec approvedFormats = ["png"] -twoMinsInMs = 2 * (60 * 1000) +fourMinsInMs = 4 * (60 * 1000) module.exports = @@ -18,7 +18,7 @@ module.exports = return callback err args = "nice convert -flatten -density 300 #{sourcePath} #{destPath}" opts = - timeout: twoMinsInMs + timeout: fourMinsInMs exec args, opts, (err, stdout, stderr)-> timer.done() if err? @@ -38,7 +38,7 @@ module.exports = height: 300 args = "nice convert -flatten -background white -resize 260x -density 300 #{sourcePath} #{destPath}" opts = - timeout: twoMinsInMs + timeout: fourMinsInMs exec args, opts,(err, stdout, stderr)-> if err? logger.err err:err, stderr:stderr, sourcePath:sourcePath, "something went wrong converting file to preview" @@ -57,7 +57,7 @@ module.exports = height: 849 args = "nice convert -flatten -background white -resize 548x -density 300 #{sourcePath} #{destPath}" opts = - timeout: twoMinsInMs + timeout: fourMinsInMs exec args, opts,(err, stdout, stderr)-> if err? logger.err err:err, stderr:stderr, sourcePath:sourcePath, "something went wrong converting file to preview" From 90ad2c24c655e7dc4ccea02003a316cee89f9605 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Wed, 19 Feb 2014 14:58:36 +0000 Subject: [PATCH 015/555] call the callback when no response is returned from s3 when checking if file exists --- services/filestore/app/coffee/s3Wrapper.coffee | 2 ++ 1 file changed, 2 insertions(+) diff --git a/services/filestore/app/coffee/s3Wrapper.coffee b/services/filestore/app/coffee/s3Wrapper.coffee index bff18f90c7..3e0c322a1a 100644 --- a/services/filestore/app/coffee/s3Wrapper.coffee +++ b/services/filestore/app/coffee/s3Wrapper.coffee @@ -101,6 +101,8 @@ module.exports = logger.err err:err, res:res, bucketName:bucketName, key:key, "something went wrong copying file in aws" if !res? logger.err err:err, res:res, bucketName:bucketName, key:key, "no response object returned when checking if file exists" + err = new Error("no response from s3 #{bucketName} #{key}") + return callback(err) exists = res.statusCode == 200 logger.log bucketName:bucketName, key:key, exists:exists, "checked if file exsists in s3" callback(err, exists) From 0a2ce3b869b7bc996ae8adf99e4b645a2d21c779 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Wed, 19 Feb 2014 15:24:29 +0000 Subject: [PATCH 016/555] fixed missleading log file --- services/filestore/app/coffee/s3Wrapper.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/app/coffee/s3Wrapper.coffee b/services/filestore/app/coffee/s3Wrapper.coffee index 3e0c322a1a..e64764694f 100644 --- a/services/filestore/app/coffee/s3Wrapper.coffee +++ b/services/filestore/app/coffee/s3Wrapper.coffee @@ -98,7 +98,7 @@ module.exports = options = buildDefaultOptions(bucketName, "head", key) request options, (err, res)-> if err? - logger.err err:err, res:res, bucketName:bucketName, key:key, "something went wrong copying file in aws" + logger.err err:err, res:res, bucketName:bucketName, key:key, "something went wrong checking file in aws" if !res? logger.err err:err, res:res, bucketName:bucketName, key:key, "no response object returned when checking if file exists" err = new Error("no response from s3 #{bucketName} #{key}") From b24ae77f4c7b675c842ecddb55dcbf8b0b06ae15 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Wed, 19 Feb 2014 15:26:00 +0000 Subject: [PATCH 017/555] call callback when error returned from getign status of file --- services/filestore/app/coffee/s3Wrapper.coffee | 1 + 1 file changed, 1 insertion(+) diff --git a/services/filestore/app/coffee/s3Wrapper.coffee b/services/filestore/app/coffee/s3Wrapper.coffee index e64764694f..ca0b0aaf74 100644 --- a/services/filestore/app/coffee/s3Wrapper.coffee +++ b/services/filestore/app/coffee/s3Wrapper.coffee @@ -99,6 +99,7 @@ module.exports = request options, (err, res)-> if err? logger.err err:err, res:res, bucketName:bucketName, key:key, "something went wrong checking file in aws" + return callback(err) if !res? logger.err err:err, res:res, bucketName:bucketName, key:key, "no response object returned when checking if file exists" err = new Error("no response from s3 #{bucketName} #{key}") From 3f54688ec80996c2c8b782df08718b2e23e04139 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Wed, 19 Feb 2014 18:39:25 +0000 Subject: [PATCH 018/555] added max sockets to s3 wrapper, getting some ETIMEDOUT under laod, may be cause --- services/filestore/app/coffee/s3Wrapper.coffee | 2 ++ 1 file changed, 2 insertions(+) diff --git a/services/filestore/app/coffee/s3Wrapper.coffee b/services/filestore/app/coffee/s3Wrapper.coffee index ca0b0aaf74..2aea7945d8 100644 --- a/services/filestore/app/coffee/s3Wrapper.coffee +++ b/services/filestore/app/coffee/s3Wrapper.coffee @@ -1,3 +1,5 @@ +http = require('http') +http.globalAgent.maxSockets = 300 settings = require("settings-sharelatex") request = require("request") logger = require("logger-sharelatex") From fada5ecaf4aa2cecf68018b1750a5a1fbb2b3bbc Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Wed, 19 Feb 2014 20:56:45 +0000 Subject: [PATCH 019/555] converted get stream to use knox, test to see if it is more robust --- .../filestore/app/coffee/s3Wrapper.coffee | 17 ++++++++----- .../test/unit/coffee/s3WrapperTests.coffee | 25 ++++++++++--------- 2 files changed, 24 insertions(+), 18 deletions(-) diff --git a/services/filestore/app/coffee/s3Wrapper.coffee b/services/filestore/app/coffee/s3Wrapper.coffee index 2aea7945d8..f2eb067d39 100644 --- a/services/filestore/app/coffee/s3Wrapper.coffee +++ b/services/filestore/app/coffee/s3Wrapper.coffee @@ -8,7 +8,6 @@ knox = require("knox") path = require("path") LocalFileWriter = require("./LocalFileWriter") _ = require("underscore") - thirtySeconds = 30 * 1000 @@ -58,13 +57,19 @@ module.exports = return callback(err) @sendFileToS3 bucketName, key, fsPath, callback - getFileStream: (bucketName, key, callback)-> + getFileStream: (bucketName, key, callback = (err, res)->)-> logger.log bucketName:bucketName, key:key, "getting file from s3" - options = buildDefaultOptions(bucketName, "get", key) - readStream = request(options) - readStream.on "error", (err)-> + s3Client = knox.createClient + key: settings.s3.key + secret: settings.s3.secret + bucket: bucketName + s3Stream = s3Client.get(key) + s3Stream.end() + s3Stream.on 'response', (res) -> + callback null, res + s3Stream.on 'error', (err) -> logger.err err:err, bucketName:bucketName, key:key, "error getting file stream from s3" - callback null, readStream + callback err copyFile: (bucketName, sourceKey, destKey, callback)-> logger.log bucketName:bucketName, sourceKey:sourceKey, destKey:destKey, "copying file in s3" diff --git a/services/filestore/test/unit/coffee/s3WrapperTests.coffee b/services/filestore/test/unit/coffee/s3WrapperTests.coffee index 3de9c92dba..42023fea29 100644 --- a/services/filestore/test/unit/coffee/s3WrapperTests.coffee +++ b/services/filestore/test/unit/coffee/s3WrapperTests.coffee @@ -20,6 +20,7 @@ describe "s3WrapperTests", -> copyFile:sinon.stub() list: sinon.stub() deleteMultiple: sinon.stub() + get: sinon.stub() @knox = createClient: sinon.stub().returns(@stubbedKnoxClient) @LocalFileWriter = @@ -36,19 +37,19 @@ describe "s3WrapperTests", -> @bucketName = "my-bucket" @error = "my errror" - describe "Pipe to dest", -> - - it "should use correct options", (done)-> - - stubbedReadStream = {on:->} - dest = {my:"object"} - @request = (opts)=> - return stubbedReadStream - @requires["request"] = @request + describe "getFileStream", -> + beforeEach -> @s3Wrapper = SandboxedModule.require modulePath, requires: @requires - @s3Wrapper.getFileStream @bucketName, @key, (err, readStream)-> - readStream.should.equal stubbedReadStream - done() + + + it "should use correct key", (done)-> + @stubbedKnoxClient.get.returns( + on:-> + end:-> + ) + @s3Wrapper.getFileStream @bucketName, @key, @fsPath, (err)=> + @stubbedKnoxClient.get.calledWith(@key).should.equal true + done() describe "sendFileToS3", -> From 99e15dc12faa869393597669f31ae3d40ea157f3 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Wed, 19 Feb 2014 21:03:03 +0000 Subject: [PATCH 020/555] upped https connections to 300 and added debug logging --- services/filestore/app/coffee/s3Wrapper.coffee | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/services/filestore/app/coffee/s3Wrapper.coffee b/services/filestore/app/coffee/s3Wrapper.coffee index f2eb067d39..8d2492ba53 100644 --- a/services/filestore/app/coffee/s3Wrapper.coffee +++ b/services/filestore/app/coffee/s3Wrapper.coffee @@ -1,5 +1,7 @@ http = require('http') http.globalAgent.maxSockets = 300 +https = require('https') +https.globalAgent.maxSockets = 300 settings = require("settings-sharelatex") request = require("request") logger = require("logger-sharelatex") @@ -11,6 +13,14 @@ _ = require("underscore") thirtySeconds = 30 * 1000 + +printSockets = -> + console.log require('https').globalAgent.sockets + console.log require('http').globalAgent.sockets + setTimeout printSockets, thirtySeconds + +printSockets() + buildDefaultOptions = (bucketName, method, key)-> return { aws: From 159670a3d459fd6db55d310d46f18c1685b712ce Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Thu, 20 Feb 2014 11:45:51 +0000 Subject: [PATCH 021/555] reduced timeout to 20 seconds --- services/filestore/app/coffee/FileConverter.coffee | 8 ++++---- services/filestore/app/coffee/ImageOptimiser.coffee | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/services/filestore/app/coffee/FileConverter.coffee b/services/filestore/app/coffee/FileConverter.coffee index bb19fbaa16..3093c888c2 100644 --- a/services/filestore/app/coffee/FileConverter.coffee +++ b/services/filestore/app/coffee/FileConverter.coffee @@ -4,7 +4,7 @@ logger = require("logger-sharelatex") exec = require('child_process').exec approvedFormats = ["png"] -fourMinsInMs = 4 * (60 * 1000) +twentySeconds = 20 * 1000 module.exports = @@ -18,7 +18,7 @@ module.exports = return callback err args = "nice convert -flatten -density 300 #{sourcePath} #{destPath}" opts = - timeout: fourMinsInMs + timeout: twentySeconds exec args, opts, (err, stdout, stderr)-> timer.done() if err? @@ -38,7 +38,7 @@ module.exports = height: 300 args = "nice convert -flatten -background white -resize 260x -density 300 #{sourcePath} #{destPath}" opts = - timeout: fourMinsInMs + timeout: twentySeconds exec args, opts,(err, stdout, stderr)-> if err? logger.err err:err, stderr:stderr, sourcePath:sourcePath, "something went wrong converting file to preview" @@ -57,7 +57,7 @@ module.exports = height: 849 args = "nice convert -flatten -background white -resize 548x -density 300 #{sourcePath} #{destPath}" opts = - timeout: fourMinsInMs + timeout: twentySeconds exec args, opts,(err, stdout, stderr)-> if err? logger.err err:err, stderr:stderr, sourcePath:sourcePath, "something went wrong converting file to preview" diff --git a/services/filestore/app/coffee/ImageOptimiser.coffee b/services/filestore/app/coffee/ImageOptimiser.coffee index 4e4947d986..7aece25464 100644 --- a/services/filestore/app/coffee/ImageOptimiser.coffee +++ b/services/filestore/app/coffee/ImageOptimiser.coffee @@ -9,7 +9,7 @@ module.exports = logger.log localPath:localPath, "optimising png path" args = "optipng #{localPath}" opts = - timeout: 60 * 1000 + timeout: 20 * 1000 exec args, opts,(err, stdout, stderr)-> if err? logger.err err:err, stderr:stderr, localPath:localPath, "something went wrong converting compressPng" From aa9f8882eeff8ec85d47f735bc70bbb0ccfb2925 Mon Sep 17 00:00:00 2001 From: goodbest Date: Fri, 21 Feb 2014 23:09:38 +0800 Subject: [PATCH 022/555] change repo URL from git+ssh:// to git+https:// --- services/filestore/package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/filestore/package.json b/services/filestore/package.json index a10bf95b87..f623ce2ae3 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -2,8 +2,8 @@ "name": "filestore-sharelatex", "version": "0.0.1", "dependencies": { - "settings": "git+ssh://git@github.com:sharelatex/settings-sharelatex.git#master", - "logger": "git+ssh://git@github.com:sharelatex/logger-sharelatex.git#master", + "settings": "git+https://github.com/sharelatex/settings-sharelatex.git#master", + "logger": "git+https://github.com/sharelatex/logger-sharelatex.git#master", "request": "2.14.0", "lynx": "0.0.11", "grunt-mocha-test": "~0.8.2", From 4756b488eee5e1eddc33536ee5e509217e608166 Mon Sep 17 00:00:00 2001 From: James Allen Date: Mon, 24 Feb 2014 14:26:38 +0000 Subject: [PATCH 023/555] Create .travis.yml --- services/filestore/.travis.yml | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) create mode 100644 services/filestore/.travis.yml diff --git a/services/filestore/.travis.yml b/services/filestore/.travis.yml new file mode 100644 index 0000000000..d97f744899 --- /dev/null +++ b/services/filestore/.travis.yml @@ -0,0 +1,19 @@ +language: node_js + +node_js: + - "0.10" + +before_install: + - npm install -g grunt-cli + +install: + - npm install + - grunt install + +script: + - grunt test:unit + +services: + - redis-server + - mongodb + From 49feb6b8393aa505e32a3a4699de59890ca06732 Mon Sep 17 00:00:00 2001 From: James Allen Date: Mon, 24 Feb 2014 14:40:42 +0000 Subject: [PATCH 024/555] Update .travis.yml --- services/filestore/.travis.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/services/filestore/.travis.yml b/services/filestore/.travis.yml index d97f744899..29f5884d60 100644 --- a/services/filestore/.travis.yml +++ b/services/filestore/.travis.yml @@ -16,4 +16,3 @@ script: services: - redis-server - mongodb - From 64fec8aed778fa7afe7b47a2e5a4a40dbbf94559 Mon Sep 17 00:00:00 2001 From: James Allen Date: Mon, 24 Feb 2014 14:44:03 +0000 Subject: [PATCH 025/555] Update README.md --- services/filestore/README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/services/filestore/README.md b/services/filestore/README.md index f6ad20d6af..420214b56f 100644 --- a/services/filestore/README.md +++ b/services/filestore/README.md @@ -3,6 +3,8 @@ filestore-sharelatex An API for CRUD operations on binary files stored in S3 +[![Build Status](https://travis-ci.org/sharelatex/filestore-sharelatex.png?branch=master)](https://travis-ci.org/sharelatex/filestore-sharelatex) + License ------- From f68080d56c5b5a9afefdc40ebd78d7ce741de956 Mon Sep 17 00:00:00 2001 From: Oliver Matthews Date: Tue, 25 Feb 2014 09:10:22 +0000 Subject: [PATCH 026/555] create mixin to allow switching of backend --- .../app/coffee/FileController.coffee | 8 ++--- .../filestore/app/coffee/FileHandler.coffee | 29 +++++++++---------- .../filestore/app/coffee/fsWrapper.coffee | 9 ++++++ .../config/settings.development.coffee | 5 ++++ .../unit/coffee/FileControllerTests.coffee | 2 +- .../test/unit/coffee/FileHandlerTests.coffee | 4 +-- 6 files changed, 34 insertions(+), 23 deletions(-) create mode 100644 services/filestore/app/coffee/fsWrapper.coffee diff --git a/services/filestore/app/coffee/FileController.coffee b/services/filestore/app/coffee/FileController.coffee index 83c02334e3..85046b222a 100644 --- a/services/filestore/app/coffee/FileController.coffee +++ b/services/filestore/app/coffee/FileController.coffee @@ -1,4 +1,4 @@ -s3Wrapper = require("./s3Wrapper") +fsWrapper = require("./fsWrapper") settings = require("settings-sharelatex") logger = require("logger-sharelatex") FileHandler = require("./FileHandler") @@ -37,9 +37,9 @@ module.exports = oldProject_id = req.body.source.project_id oldFile_id = req.body.source.file_id logger.log key:key, bucket:bucket, oldProject_id:oldProject_id, oldFile_id:oldFile_id, "reciving request to copy file" - s3Wrapper.copyFile bucket, "#{oldProject_id}/#{oldFile_id}", key, (err)-> + fsWrapper.copyFile bucket, "#{oldProject_id}/#{oldFile_id}", key, (err)-> if err? - logger.log err:err, oldProject_id:oldProject_id, oldFile_id:oldFile_id, "something went wrong copying file in s3Wrapper" + logger.log err:err, oldProject_id:oldProject_id, oldFile_id:oldFile_id, "something went wrong copying file" res.send 500 else res.send 200 @@ -50,7 +50,7 @@ module.exports = logger.log key:key, bucket:bucket, "reciving request to delete file" FileHandler.deleteFile bucket, key, (err)-> if err? - logger.log err:err, key:key, bucket:bucket, "something went wrong deleting file in s3Wrapper" + logger.log err:err, key:key, bucket:bucket, "something went wrong deleting file" res.send 500 else res.send 204 diff --git a/services/filestore/app/coffee/FileHandler.coffee b/services/filestore/app/coffee/FileHandler.coffee index 6d647fdac5..eea1469845 100644 --- a/services/filestore/app/coffee/FileHandler.coffee +++ b/services/filestore/app/coffee/FileHandler.coffee @@ -1,5 +1,5 @@ settings = require("settings-sharelatex") -s3Wrapper = require("./s3Wrapper") +fsWrapper = require("./fsWrapper") LocalFileWriter = require("./LocalFileWriter") logger = require("logger-sharelatex") FileConverter = require("./FileConverter") @@ -12,15 +12,15 @@ module.exports = insertFile: (bucket, key, stream, callback)-> convetedKey = KeyBuilder.getConvertedFolderKey(key) - s3Wrapper.deleteDirectory bucket, convetedKey, -> - s3Wrapper.sendStreamToS3 bucket, key, stream, -> + fsWrapper.deleteDirectory bucket, convetedKey, -> + fsWrapper.sendStreamToS3 bucket, key, stream, -> callback() deleteFile: (bucket, key, callback)-> convetedKey = KeyBuilder.getConvertedFolderKey(bucket, key) async.parallel [ - (done)-> s3Wrapper.deleteFile bucket, key, done - (done)-> s3Wrapper.deleteFile bucket, convetedKey, done + (done)-> fsWrapper.deleteFile bucket, key, done + (done)-> fsWrapper.deleteFile bucket, convetedKey, done ], callback getFile: (bucket, key, opts = {}, callback)-> @@ -31,16 +31,16 @@ module.exports = @_getConvertedFile bucket, key, opts, callback _getStandardFile: (bucket, key, opts, callback)-> - s3Wrapper.getFileStream bucket, key, (err, fileStream)-> + fsWrapper.getFileStream bucket, key, (err, fileStream)-> if err? logger.err bucket:bucket, key:key, opts:opts, "error getting fileStream" callback err, fileStream _getConvertedFile: (bucket, key, opts, callback)-> convetedKey = KeyBuilder.addCachingToKey(key, opts) - s3Wrapper.checkIfFileExists bucket, convetedKey, (err, exists)=> + fsWrapper.checkIfFileExists bucket, convetedKey, (err, exists)=> if exists - s3Wrapper.getFileStream bucket, convetedKey, callback + fsWrapper.getFileStream bucket, convetedKey, callback else @_getConvertedFileAndCache bucket, key, convetedKey, opts, callback @@ -53,13 +53,13 @@ module.exports = if err? logger.err err:err, fsPath:fsPath, bucket:bucket, key:key, opts:opts, "something went wrong optimising png file" return callback(err) - s3Wrapper.sendFileToS3 bucket, convetedKey, fsPath, (err)-> + fsWrapper.sendFileToS3 bucket, convetedKey, fsPath, (err)-> if err? - logger.err err:err, bucket:bucket, key:key, convetedKey:convetedKey, opts:opts, "something went wrong seing file to s3" + logger.err err:err, bucket:bucket, key:key, convetedKey:convetedKey, opts:opts, "something went wrong sending the file" return callback(err) - s3Wrapper.getFileStream bucket, convetedKey, callback + fsWrapper.getFileStream bucket, convetedKey, callback - _convertFile: (bucket, origonalKey, opts, callback)-> + _convertFile: (bucket, origonalKey, opts, callback)-> @_writeS3FileToDisk bucket, origonalKey, (err, origonalFsPath)-> if opts.format? FileConverter.convert origonalFsPath, opts.format, callback @@ -72,9 +72,6 @@ module.exports = _writeS3FileToDisk: (bucket, key, callback)-> - s3Wrapper.getFileStream bucket, key, (err, fileStream)-> + fsWrapper.getFileStream bucket, key, (err, fileStream)-> LocalFileWriter.writeStream fileStream, key, callback - - - diff --git a/services/filestore/app/coffee/fsWrapper.coffee b/services/filestore/app/coffee/fsWrapper.coffee new file mode 100644 index 0000000000..3815fcae12 --- /dev/null +++ b/services/filestore/app/coffee/fsWrapper.coffee @@ -0,0 +1,9 @@ +settings = require("settings-sharelatex") +wrappedFs = switch settings.filestoreWrapper + when "s3" then require("./s3Wrapper") + else null + +if !wrappedFs + throw new Error( "Unknown filestore wrapper #{settings.filestoreWrapper}" ) + +module.exports[name] = method for name,method of wrappedFs diff --git a/services/filestore/config/settings.development.coffee b/services/filestore/config/settings.development.coffee index 3f390837d8..f4f0c482e1 100644 --- a/services/filestore/config/settings.development.coffee +++ b/services/filestore/config/settings.development.coffee @@ -4,6 +4,11 @@ module.exports = port: 3009 host: "localhost" + # which backend to use + # current options are: + # "s3" - Amazon S3 + filestoreWrapper: "s3" + # ShareLaTeX stores binary files like images in S3. # Fill in your Amazon S3 credentials below. s3: diff --git a/services/filestore/test/unit/coffee/FileControllerTests.coffee b/services/filestore/test/unit/coffee/FileControllerTests.coffee index 29bc9d8205..fa13fe91b4 100644 --- a/services/filestore/test/unit/coffee/FileControllerTests.coffee +++ b/services/filestore/test/unit/coffee/FileControllerTests.coffee @@ -27,7 +27,7 @@ describe "FileController", -> @controller = SandboxedModule.require modulePath, requires: "./LocalFileWriter":@LocalFileWriter "./FileHandler": @FileHandler - "./s3Wrapper":@s3Wrapper + "./fsWrapper":@s3Wrapper "settings-sharelatex": @settings "logger-sharelatex": log:-> diff --git a/services/filestore/test/unit/coffee/FileHandlerTests.coffee b/services/filestore/test/unit/coffee/FileHandlerTests.coffee index 0b2ea034fe..f5c8184f78 100644 --- a/services/filestore/test/unit/coffee/FileHandlerTests.coffee +++ b/services/filestore/test/unit/coffee/FileHandlerTests.coffee @@ -34,7 +34,7 @@ describe "FileHandler", -> compressPng: sinon.stub() @handler = SandboxedModule.require modulePath, requires: "settings-sharelatex": @settings - "./s3Wrapper":@s3Wrapper + "./fsWrapper":@s3Wrapper "./LocalFileWriter":@LocalFileWriter "./FileConverter":@FileConverter "./KeyBuilder": @keyBuilder @@ -174,4 +174,4 @@ describe "FileHandler", -> - \ No newline at end of file + From c43533b5cbb37c94cd7d1046a59384886a131fad Mon Sep 17 00:00:00 2001 From: Oliver Matthews Date: Tue, 25 Feb 2014 10:54:51 +0000 Subject: [PATCH 027/555] move test wrapper into fsWrapper where appropriate --- .../filestore/app/coffee/fsWrapper.coffee | 22 ++++++--- .../filestore/app/coffee/testWrapper.coffee | 11 +++++ .../unit/coffee/FileControllerTests.coffee | 23 ++++------ .../test/unit/coffee/FileHandlerTests.coffee | 46 +++++++++---------- 4 files changed, 58 insertions(+), 44 deletions(-) create mode 100644 services/filestore/app/coffee/testWrapper.coffee diff --git a/services/filestore/app/coffee/fsWrapper.coffee b/services/filestore/app/coffee/fsWrapper.coffee index 3815fcae12..aff75d79f8 100644 --- a/services/filestore/app/coffee/fsWrapper.coffee +++ b/services/filestore/app/coffee/fsWrapper.coffee @@ -1,9 +1,19 @@ settings = require("settings-sharelatex") -wrappedFs = switch settings.filestoreWrapper - when "s3" then require("./s3Wrapper") - else null +logger = require("logger-sharelatex") +s3Wrapper = require("./s3Wrapper") +testWrapper = require("./testWrapper") -if !wrappedFs - throw new Error( "Unknown filestore wrapper #{settings.filestoreWrapper}" ) +module.exports = + selectBackend: (backend) -> + wrappedFs = switch backend + when "s3" then s3Wrapper + when "test" then testWrapper + else null -module.exports[name] = method for name,method of wrappedFs + if !wrappedFs + throw new Error( "Unknown filestore wrapper #{backend}" ) + + module.exports[name] = method for name,method of wrappedFs + +if settings.fileStoreWrapper? + module.exports.selectBackend(settings.fileStoreWrapper) diff --git a/services/filestore/app/coffee/testWrapper.coffee b/services/filestore/app/coffee/testWrapper.coffee new file mode 100644 index 0000000000..966d4869b0 --- /dev/null +++ b/services/filestore/app/coffee/testWrapper.coffee @@ -0,0 +1,11 @@ +sinon = require('sinon') +logger = require("logger-sharelatex") + +module.exports = + getFileStream: sinon.stub() + checkIfFileExists: sinon.stub() + deleteFile: sinon.stub() + deleteDirectory: sinon.stub() + sendStreamToS3: sinon.stub() + insertFile: sinon.stub() + copyFile: sinon.stub() diff --git a/services/filestore/test/unit/coffee/FileControllerTests.coffee b/services/filestore/test/unit/coffee/FileControllerTests.coffee index fa13fe91b4..7ff2c5f14c 100644 --- a/services/filestore/test/unit/coffee/FileControllerTests.coffee +++ b/services/filestore/test/unit/coffee/FileControllerTests.coffee @@ -9,13 +9,8 @@ SandboxedModule = require('sandboxed-module') describe "FileController", -> beforeEach -> - @s3Wrapper = - sendStreamToS3: sinon.stub() - getAndPipe: sinon.stub() - copyFile: sinon.stub() - deleteFile:sinon.stub() - @settings = + filestreamWrapper:"test" s3: buckets: user_files:"user_files" @@ -27,11 +22,13 @@ describe "FileController", -> @controller = SandboxedModule.require modulePath, requires: "./LocalFileWriter":@LocalFileWriter "./FileHandler": @FileHandler - "./fsWrapper":@s3Wrapper + "./fsWrapper":@FsWrapper "settings-sharelatex": @settings "logger-sharelatex": log:-> err:-> + @FsWrapper = require("../../../app/js/fsWrapper.js") + @FsWrapper.selectBackend("test") @project_id = "project_id" @file_id = "file_id" @bucket = "user_files" @@ -43,7 +40,7 @@ describe "FileController", -> params: project_id:@project_id file_id:@file_id - @res = + @res = setHeader: -> @fileStream = {} @@ -74,7 +71,7 @@ describe "FileController", -> describe "insertFile", -> - it "should send bucket name key and res to s3Wrapper", (done)-> + it "should send bucket name key and res to FsWrapper", (done)-> @FileHandler.insertFile.callsArgWith(3) @res.send = => @FileHandler.insertFile.calledWith(@bucket, @key, @req).should.equal true @@ -91,17 +88,17 @@ describe "FileController", -> project_id: @oldProject_id file_id: @oldFile_id - it "should send bucket name and both keys to s3Wrapper", (done)-> - @s3Wrapper.copyFile.callsArgWith(3) + it "should send bucket name and both keys to FsWrapper", (done)-> + @FsWrapper.copyFile.callsArgWith(3) @res.send = (code)=> code.should.equal 200 - @s3Wrapper.copyFile.calledWith(@bucket, "#{@oldProject_id}/#{@oldFile_id}", @key).should.equal true + @FsWrapper.copyFile.calledWith(@bucket, "#{@oldProject_id}/#{@oldFile_id}", @key).should.equal true done() @controller.copyFile @req, @res it "should send a 500 if there was an error", (done)-> - @s3Wrapper.copyFile.callsArgWith(3, "error") + @FsWrapper.copyFile.callsArgWith(3, "error") @res.send = (code)=> code.should.equal 500 done() diff --git a/services/filestore/test/unit/coffee/FileHandlerTests.coffee b/services/filestore/test/unit/coffee/FileHandlerTests.coffee index f5c8184f78..d642031263 100644 --- a/services/filestore/test/unit/coffee/FileHandlerTests.coffee +++ b/services/filestore/test/unit/coffee/FileHandlerTests.coffee @@ -11,16 +11,10 @@ describe "FileHandler", -> beforeEach -> @settings = + filestreamWrapper:"test" s3: buckets: user_files:"user_files" - @s3Wrapper = - getFileStream: sinon.stub() - checkIfFileExists: sinon.stub() - deleteFile: sinon.stub() - deleteDirectory: sinon.stub() - sendStreamToS3: sinon.stub() - insertFile: sinon.stub() @LocalFileWriter = writeStream: sinon.stub() @FileConverter = @@ -32,9 +26,11 @@ describe "FileHandler", -> getConvertedFolderKey: sinon.stub() @ImageOptimiser = compressPng: sinon.stub() + @FsWrapper = require("../../../app/js/fsWrapper.js") + @FsWrapper.selectBackend("test") @handler = SandboxedModule.require modulePath, requires: "settings-sharelatex": @settings - "./fsWrapper":@s3Wrapper + "./fsWrapper":@FsWrapper "./LocalFileWriter":@LocalFileWriter "./FileConverter":@FileConverter "./KeyBuilder": @keyBuilder @@ -51,33 +47,33 @@ describe "FileHandler", -> describe "insertFile", -> beforeEach -> @stream = {} - @s3Wrapper.deleteDirectory.callsArgWith(2) - @s3Wrapper.sendStreamToS3.callsArgWith(3) + @FsWrapper.deleteDirectory.callsArgWith(2) + @FsWrapper.sendStreamToS3.callsArgWith(3) it "should send file to s3", (done)-> @handler.insertFile @bucket, @key, @stream, => - @s3Wrapper.sendStreamToS3.calledWith(@bucket, @key, @stream).should.equal true + @FsWrapper.sendStreamToS3.calledWith(@bucket, @key, @stream).should.equal true done() it "should delete the convetedKey folder", (done)-> @keyBuilder.getConvertedFolderKey.returns(@stubbedConvetedKey) @handler.insertFile @bucket, @key, @stream, => - @s3Wrapper.deleteDirectory.calledWith(@bucket, @stubbedConvetedKey).should.equal true + @FsWrapper.deleteDirectory.calledWith(@bucket, @stubbedConvetedKey).should.equal true done() describe "deleteFile", -> beforeEach -> @keyBuilder.getConvertedFolderKey.returns(@stubbedConvetedKey) - @s3Wrapper.deleteFile.callsArgWith(2) + @FsWrapper.deleteFile.callsArgWith(2) it "should tell the s3 wrapper to delete the file", (done)-> @handler.deleteFile @bucket, @key, => - @s3Wrapper.deleteFile.calledWith(@bucket, @key).should.equal true + @FsWrapper.deleteFile.calledWith(@bucket, @key).should.equal true done() it "should tell the s3 wrapper to delete the cached foler", (done)-> @handler.deleteFile @bucket, @key, => - @s3Wrapper.deleteFile.calledWith(@bucket, @stubbedConvetedKey).should.equal true + @FsWrapper.deleteFile.calledWith(@bucket, @stubbedConvetedKey).should.equal true done() describe "getFile", -> @@ -103,11 +99,11 @@ describe "FileHandler", -> beforeEach -> @fileStream = {on:->} - @s3Wrapper.getFileStream.callsArgWith(2, "err", @fileStream) + @FsWrapper.getFileStream.callsArgWith(2, "err", @fileStream) it "should get the stream from s3 ", (done)-> @handler.getFile @bucket, @key, null, => - @s3Wrapper.getFileStream.calledWith(@bucket, @key).should.equal true + @FsWrapper.getFileStream.calledWith(@bucket, @key).should.equal true done() it "should return the stream and error", (done)-> @@ -119,14 +115,14 @@ describe "FileHandler", -> describe "_getConvertedFile", -> it "should getFileStream if it does exists", (done)-> - @s3Wrapper.checkIfFileExists.callsArgWith(2, null, true) - @s3Wrapper.getFileStream.callsArgWith(2) + @FsWrapper.checkIfFileExists.callsArgWith(2, null, true) + @FsWrapper.getFileStream.callsArgWith(2) @handler._getConvertedFile @bucket, @key, {}, => - @s3Wrapper.getFileStream.calledWith(@bucket).should.equal true + @FsWrapper.getFileStream.calledWith(@bucket).should.equal true done() it "should call _getConvertedFileAndCache if it does exists", (done)-> - @s3Wrapper.checkIfFileExists.callsArgWith(2, null, false) + @FsWrapper.checkIfFileExists.callsArgWith(2, null, false) @handler._getConvertedFileAndCache = sinon.stub().callsArgWith(4) @handler._getConvertedFile @bucket, @key, {}, => @handler._getConvertedFileAndCache.calledWith(@bucket, @key).should.equal true @@ -135,15 +131,15 @@ describe "FileHandler", -> describe "_getConvertedFileAndCache", -> it "should _convertFile ", (done)-> - @s3Wrapper.sendFileToS3 = sinon.stub().callsArgWith(3) - @s3Wrapper.getFileStream = sinon.stub().callsArgWith(2) + @FsWrapper.sendFileToS3 = sinon.stub().callsArgWith(3) + @FsWrapper.getFileStream = sinon.stub().callsArgWith(2) @convetedKey = @key+"converted" @handler._convertFile = sinon.stub().callsArgWith(3, null, @stubbedPath) @ImageOptimiser.compressPng = sinon.stub().callsArgWith(1) @handler._getConvertedFileAndCache @bucket, @key, @convetedKey, {}, => @handler._convertFile.called.should.equal true - @s3Wrapper.sendFileToS3.calledWith(@bucket, @convetedKey, @stubbedPath).should.equal true - @s3Wrapper.getFileStream.calledWith(@bucket, @convetedKey).should.equal true + @FsWrapper.sendFileToS3.calledWith(@bucket, @convetedKey, @stubbedPath).should.equal true + @FsWrapper.getFileStream.calledWith(@bucket, @convetedKey).should.equal true @ImageOptimiser.compressPng.calledWith(@stubbedPath).should.equal true done() From 5f87dfb3bf5b36bee17119f39c0b5d2799d54da4 Mon Sep 17 00:00:00 2001 From: Oliver Matthews Date: Tue, 25 Feb 2014 15:52:30 +0000 Subject: [PATCH 028/555] add unit tests for fsWrapper make other unit tests no longer depend on fsWrapper --- .../filestore/app/coffee/fsWrapper.coffee | 21 ++-- .../filestore/app/coffee/testWrapper.coffee | 11 --- .../config/settings.development.coffee | 2 +- .../unit/coffee/FileControllerTests.coffee | 12 ++- .../test/unit/coffee/FileHandlerTests.coffee | 13 ++- .../test/unit/coffee/fsWrapperTests.coffee | 96 +++++++++++++++++++ 6 files changed, 118 insertions(+), 37 deletions(-) delete mode 100644 services/filestore/app/coffee/testWrapper.coffee create mode 100644 services/filestore/test/unit/coffee/fsWrapperTests.coffee diff --git a/services/filestore/app/coffee/fsWrapper.coffee b/services/filestore/app/coffee/fsWrapper.coffee index aff75d79f8..be761cd07e 100644 --- a/services/filestore/app/coffee/fsWrapper.coffee +++ b/services/filestore/app/coffee/fsWrapper.coffee @@ -1,19 +1,10 @@ settings = require("settings-sharelatex") logger = require("logger-sharelatex") s3Wrapper = require("./s3Wrapper") -testWrapper = require("./testWrapper") -module.exports = - selectBackend: (backend) -> - wrappedFs = switch backend - when "s3" then s3Wrapper - when "test" then testWrapper - else null - - if !wrappedFs - throw new Error( "Unknown filestore wrapper #{backend}" ) - - module.exports[name] = method for name,method of wrappedFs - -if settings.fileStoreWrapper? - module.exports.selectBackend(settings.fileStoreWrapper) +logger.log backend:settings.filestoreBackend,"Loading backend" +module.exports = switch settings.filestoreBackend + when "s3" + s3Wrapper + else + throw new Error( "Unknown filestore backend: #{settings.filestoreBackend}" ) diff --git a/services/filestore/app/coffee/testWrapper.coffee b/services/filestore/app/coffee/testWrapper.coffee deleted file mode 100644 index 966d4869b0..0000000000 --- a/services/filestore/app/coffee/testWrapper.coffee +++ /dev/null @@ -1,11 +0,0 @@ -sinon = require('sinon') -logger = require("logger-sharelatex") - -module.exports = - getFileStream: sinon.stub() - checkIfFileExists: sinon.stub() - deleteFile: sinon.stub() - deleteDirectory: sinon.stub() - sendStreamToS3: sinon.stub() - insertFile: sinon.stub() - copyFile: sinon.stub() diff --git a/services/filestore/config/settings.development.coffee b/services/filestore/config/settings.development.coffee index f4f0c482e1..7bd2ea59a1 100644 --- a/services/filestore/config/settings.development.coffee +++ b/services/filestore/config/settings.development.coffee @@ -7,7 +7,7 @@ module.exports = # which backend to use # current options are: # "s3" - Amazon S3 - filestoreWrapper: "s3" + filestoreBackend: "s3" # ShareLaTeX stores binary files like images in S3. # Fill in your Amazon S3 credentials below. diff --git a/services/filestore/test/unit/coffee/FileControllerTests.coffee b/services/filestore/test/unit/coffee/FileControllerTests.coffee index 7ff2c5f14c..8a6a671dab 100644 --- a/services/filestore/test/unit/coffee/FileControllerTests.coffee +++ b/services/filestore/test/unit/coffee/FileControllerTests.coffee @@ -9,8 +9,12 @@ SandboxedModule = require('sandboxed-module') describe "FileController", -> beforeEach -> + @FsWrapper = + sendStreamToS3: sinon.stub() + copyFile: sinon.stub() + deleteFile:sinon.stub() + @settings = - filestreamWrapper:"test" s3: buckets: user_files:"user_files" @@ -27,17 +31,15 @@ describe "FileController", -> "logger-sharelatex": log:-> err:-> - @FsWrapper = require("../../../app/js/fsWrapper.js") - @FsWrapper.selectBackend("test") @project_id = "project_id" @file_id = "file_id" @bucket = "user_files" @key = "#{@project_id}/#{@file_id}" - @req = + @req = key:@key bucket:@bucket query:{} - params: + params: project_id:@project_id file_id:@file_id @res = diff --git a/services/filestore/test/unit/coffee/FileHandlerTests.coffee b/services/filestore/test/unit/coffee/FileHandlerTests.coffee index d642031263..2c23c2ec17 100644 --- a/services/filestore/test/unit/coffee/FileHandlerTests.coffee +++ b/services/filestore/test/unit/coffee/FileHandlerTests.coffee @@ -1,4 +1,3 @@ - assert = require("chai").assert sinon = require('sinon') chai = require('chai') @@ -11,23 +10,27 @@ describe "FileHandler", -> beforeEach -> @settings = - filestreamWrapper:"test" s3: buckets: user_files:"user_files" + @FsWrapper = + getFileStream: sinon.stub() + checkIfFileExists: sinon.stub() + deleteFile: sinon.stub() + deleteDirectory: sinon.stub() + sendStreamToS3: sinon.stub() + insertFile: sinon.stub() @LocalFileWriter = writeStream: sinon.stub() @FileConverter = convert: sinon.stub() thumbnail: sinon.stub() preview: sinon.stub() - @keyBuilder = + @keyBuilder = addCachingToKey: sinon.stub() getConvertedFolderKey: sinon.stub() @ImageOptimiser = compressPng: sinon.stub() - @FsWrapper = require("../../../app/js/fsWrapper.js") - @FsWrapper.selectBackend("test") @handler = SandboxedModule.require modulePath, requires: "settings-sharelatex": @settings "./fsWrapper":@FsWrapper diff --git a/services/filestore/test/unit/coffee/fsWrapperTests.coffee b/services/filestore/test/unit/coffee/fsWrapperTests.coffee new file mode 100644 index 0000000000..d8543766b2 --- /dev/null +++ b/services/filestore/test/unit/coffee/fsWrapperTests.coffee @@ -0,0 +1,96 @@ +logger = require("logger-sharelatex") +assert = require("chai").assert +sinon = require('sinon') +chai = require('chai') +should = chai.should() +expect = chai.expect +modulePath = "../../../app/js/fsWrapper.js" +SandboxedModule = require('sandboxed-module') + + +describe "fsWrapperTests", -> + + beforeEach -> + @s3Wrapper = + getFileStream: sinon.stub() + checkIfFileExists: sinon.stub() + deleteFile: sinon.stub() + deleteDirectory: sinon.stub() + sendStreamToS3: sinon.stub() + insertFile: sinon.stub() + + describe "test s3 mixin", -> + beforeEach -> + @settings = + filestoreBackend: "s3" + @requires = + "./s3Wrapper": @s3Wrapper + "settings-sharelatex": @settings + "logger-sharelatex": + log:-> + err:-> + @fsWrapper = SandboxedModule.require modulePath, requires: @requires + + it "should load getFileStream", (done) -> + @fsWrapper.should.respondTo("getFileStream") + @fsWrapper.getFileStream() + @s3Wrapper.getFileStream.calledOnce.should.equal true + done() + + it "should load checkIfFileExists", (done) -> + @fsWrapper.checkIfFileExists() + @s3Wrapper.checkIfFileExists.calledOnce.should.equal true + done() + + it "should load deleteFile", (done) -> + @fsWrapper.deleteFile() + @s3Wrapper.deleteFile.calledOnce.should.equal true + done() + + it "should load deleteDirectory", (done) -> + @fsWrapper.deleteDirectory() + @s3Wrapper.deleteDirectory.calledOnce.should.equal true + done() + + it "should load sendStreamToS3", (done) -> + @fsWrapper.sendStreamToS3() + @s3Wrapper.sendStreamToS3.calledOnce.should.equal true + done() + + it "should load insertFile", (done) -> + @fsWrapper.insertFile() + @s3Wrapper.insertFile.calledOnce.should.equal true + done() + + describe "test invalid mixins", -> + + it "should not load a null wrapper", (done) -> + @settings = + @requires = + "./s3Wrapper": @s3Wrapper + "settings-sharelatex": @settings + "logger-sharelatex": + log:-> + err:-> + try + @fsWrapper=SandboxedModule.require modulePath, requires: @requires + catch error + assert.equal("Unknown filestore backend: null",error.message) + done() + + it "should not load an invalid wrapper", (done) -> + @settings = + filestoreBackend:"magic" + @requires = + "./s3Wrapper": @s3Wrapper + "settings-sharelatex": @settings + "logger-sharelatex": + log:-> + err:-> + try + @fsWrapper=SandboxedModule.require modulePath, requires: @requires + catch error + assert.equal("Unknown filestore backend: magic",error.message) + done() + + From 49ffeb633d2d4b741a721af302d275b28075368f Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Wed, 26 Feb 2014 09:06:47 +0000 Subject: [PATCH 029/555] fix where cacheWarm param is read from and improved some logging --- services/filestore/app/coffee/FileController.coffee | 2 +- services/filestore/app/coffee/FileConverter.coffee | 10 +++++----- .../test/unit/coffee/FileControllerTests.coffee | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/services/filestore/app/coffee/FileController.coffee b/services/filestore/app/coffee/FileController.coffee index 85046b222a..842f564f77 100644 --- a/services/filestore/app/coffee/FileController.coffee +++ b/services/filestore/app/coffee/FileController.coffee @@ -17,7 +17,7 @@ module.exports = if err? logger.err err:err, key:key, bucket:bucket, format:format, style:style, "problem getting file" res.send 500 - else if req.params.cacheWarm + else if req.query.cacheWarm logger.log key:key, bucket:bucket, format:format, style:style, "request is only for cache warm so not sending stream" res.send 200 else diff --git a/services/filestore/app/coffee/FileConverter.coffee b/services/filestore/app/coffee/FileConverter.coffee index 3093c888c2..337f721ffa 100644 --- a/services/filestore/app/coffee/FileConverter.coffee +++ b/services/filestore/app/coffee/FileConverter.coffee @@ -22,9 +22,9 @@ module.exports = exec args, opts, (err, stdout, stderr)-> timer.done() if err? - logger.err err:err, stderr:stderr, sourcePath:sourcePath, requestedFormat:requestedFormat, "something went wrong converting file" + logger.err err:err, stderr:stderr, sourcePath:sourcePath, requestedFormat:requestedFormat, destPath:destPath, "something went wrong converting file" else - logger.log sourcePath:sourcePath, requestedFormat:requestedFormat, "finished converting file" + logger.log sourcePath:sourcePath, requestedFormat:requestedFormat, destPath:destPath, "finished converting file" callback(err, destPath) thumbnail: (sourcePath, callback)-> @@ -43,7 +43,7 @@ module.exports = if err? logger.err err:err, stderr:stderr, sourcePath:sourcePath, "something went wrong converting file to preview" else - logger.log sourcePath:sourcePath, "finished thumbnailing file" + logger.log sourcePath:sourcePath, destPath:destPath, "finished thumbnailing file" callback(err, destPath) preview: (sourcePath, callback)-> @@ -60,7 +60,7 @@ module.exports = timeout: twentySeconds exec args, opts,(err, stdout, stderr)-> if err? - logger.err err:err, stderr:stderr, sourcePath:sourcePath, "something went wrong converting file to preview" + logger.err err:err, stderr:stderr, sourcePath:sourcePath, destPath:destPath, "something went wrong converting file to preview" else - logger.log sourcePath:sourcePath, "finished converting file to preview" + logger.log sourcePath:sourcePath, destPath:destPath, "finished converting file to preview" callback(err, destPath) diff --git a/services/filestore/test/unit/coffee/FileControllerTests.coffee b/services/filestore/test/unit/coffee/FileControllerTests.coffee index 8a6a671dab..db4ffda4bc 100644 --- a/services/filestore/test/unit/coffee/FileControllerTests.coffee +++ b/services/filestore/test/unit/coffee/FileControllerTests.coffee @@ -56,7 +56,7 @@ describe "FileController", -> @controller.getFile @req, @res it "should send a 200 if the cacheWarm param is true", (done)-> - @req.params.cacheWarm = true + @req.query.cacheWarm = true @FileHandler.getFile.callsArgWith(3, null, @fileStream) @res.send = (statusCode)=> statusCode.should.equal 200 From c9af5e54204f3ecd11edf7c53d19f2f1a38b6aa2 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Wed, 26 Feb 2014 09:15:37 +0000 Subject: [PATCH 030/555] Renamed fsWrapper to PersistorManager not 100% happy with that still but its the best I can see so far --- .../app/coffee/FileController.coffee | 4 ++-- .../filestore/app/coffee/FileHandler.coffee | 22 ++++++++--------- ...Wrapper.coffee => PersistorManager.coffee} | 2 +- .../unit/coffee/FileControllerTests.coffee | 2 +- .../test/unit/coffee/FileHandlerTests.coffee | 2 +- ...ts.coffee => PersistorManagerTests.coffee} | 24 +++++++++---------- 6 files changed, 28 insertions(+), 28 deletions(-) rename services/filestore/app/coffee/{fsWrapper.coffee => PersistorManager.coffee} (81%) rename services/filestore/test/unit/coffee/{fsWrapperTests.coffee => PersistorManagerTests.coffee} (76%) diff --git a/services/filestore/app/coffee/FileController.coffee b/services/filestore/app/coffee/FileController.coffee index 842f564f77..291e188da1 100644 --- a/services/filestore/app/coffee/FileController.coffee +++ b/services/filestore/app/coffee/FileController.coffee @@ -1,4 +1,4 @@ -fsWrapper = require("./fsWrapper") +PersistorManager = require("./PersistorManager") settings = require("settings-sharelatex") logger = require("logger-sharelatex") FileHandler = require("./FileHandler") @@ -37,7 +37,7 @@ module.exports = oldProject_id = req.body.source.project_id oldFile_id = req.body.source.file_id logger.log key:key, bucket:bucket, oldProject_id:oldProject_id, oldFile_id:oldFile_id, "reciving request to copy file" - fsWrapper.copyFile bucket, "#{oldProject_id}/#{oldFile_id}", key, (err)-> + PersistorManager.copyFile bucket, "#{oldProject_id}/#{oldFile_id}", key, (err)-> if err? logger.log err:err, oldProject_id:oldProject_id, oldFile_id:oldFile_id, "something went wrong copying file" res.send 500 diff --git a/services/filestore/app/coffee/FileHandler.coffee b/services/filestore/app/coffee/FileHandler.coffee index eea1469845..40a8d2d5ad 100644 --- a/services/filestore/app/coffee/FileHandler.coffee +++ b/services/filestore/app/coffee/FileHandler.coffee @@ -1,5 +1,5 @@ settings = require("settings-sharelatex") -fsWrapper = require("./fsWrapper") +PersistorManager = require("./PersistorManager") LocalFileWriter = require("./LocalFileWriter") logger = require("logger-sharelatex") FileConverter = require("./FileConverter") @@ -12,15 +12,15 @@ module.exports = insertFile: (bucket, key, stream, callback)-> convetedKey = KeyBuilder.getConvertedFolderKey(key) - fsWrapper.deleteDirectory bucket, convetedKey, -> - fsWrapper.sendStreamToS3 bucket, key, stream, -> + PersistorManager.deleteDirectory bucket, convetedKey, -> + PersistorManager.sendStreamToS3 bucket, key, stream, -> callback() deleteFile: (bucket, key, callback)-> convetedKey = KeyBuilder.getConvertedFolderKey(bucket, key) async.parallel [ - (done)-> fsWrapper.deleteFile bucket, key, done - (done)-> fsWrapper.deleteFile bucket, convetedKey, done + (done)-> PersistorManager.deleteFile bucket, key, done + (done)-> PersistorManager.deleteFile bucket, convetedKey, done ], callback getFile: (bucket, key, opts = {}, callback)-> @@ -31,16 +31,16 @@ module.exports = @_getConvertedFile bucket, key, opts, callback _getStandardFile: (bucket, key, opts, callback)-> - fsWrapper.getFileStream bucket, key, (err, fileStream)-> + PersistorManager.getFileStream bucket, key, (err, fileStream)-> if err? logger.err bucket:bucket, key:key, opts:opts, "error getting fileStream" callback err, fileStream _getConvertedFile: (bucket, key, opts, callback)-> convetedKey = KeyBuilder.addCachingToKey(key, opts) - fsWrapper.checkIfFileExists bucket, convetedKey, (err, exists)=> + PersistorManager.checkIfFileExists bucket, convetedKey, (err, exists)=> if exists - fsWrapper.getFileStream bucket, convetedKey, callback + PersistorManager.getFileStream bucket, convetedKey, callback else @_getConvertedFileAndCache bucket, key, convetedKey, opts, callback @@ -53,11 +53,11 @@ module.exports = if err? logger.err err:err, fsPath:fsPath, bucket:bucket, key:key, opts:opts, "something went wrong optimising png file" return callback(err) - fsWrapper.sendFileToS3 bucket, convetedKey, fsPath, (err)-> + PersistorManager.sendFileToS3 bucket, convetedKey, fsPath, (err)-> if err? logger.err err:err, bucket:bucket, key:key, convetedKey:convetedKey, opts:opts, "something went wrong sending the file" return callback(err) - fsWrapper.getFileStream bucket, convetedKey, callback + PersistorManager.getFileStream bucket, convetedKey, callback _convertFile: (bucket, origonalKey, opts, callback)-> @_writeS3FileToDisk bucket, origonalKey, (err, origonalFsPath)-> @@ -72,6 +72,6 @@ module.exports = _writeS3FileToDisk: (bucket, key, callback)-> - fsWrapper.getFileStream bucket, key, (err, fileStream)-> + PersistorManager.getFileStream bucket, key, (err, fileStream)-> LocalFileWriter.writeStream fileStream, key, callback diff --git a/services/filestore/app/coffee/fsWrapper.coffee b/services/filestore/app/coffee/PersistorManager.coffee similarity index 81% rename from services/filestore/app/coffee/fsWrapper.coffee rename to services/filestore/app/coffee/PersistorManager.coffee index be761cd07e..1dd27bcc4e 100644 --- a/services/filestore/app/coffee/fsWrapper.coffee +++ b/services/filestore/app/coffee/PersistorManager.coffee @@ -2,7 +2,7 @@ settings = require("settings-sharelatex") logger = require("logger-sharelatex") s3Wrapper = require("./s3Wrapper") -logger.log backend:settings.filestoreBackend,"Loading backend" +logger.log backend:settings.filestoreBackend, "Loading backend" module.exports = switch settings.filestoreBackend when "s3" s3Wrapper diff --git a/services/filestore/test/unit/coffee/FileControllerTests.coffee b/services/filestore/test/unit/coffee/FileControllerTests.coffee index db4ffda4bc..aa2aa35588 100644 --- a/services/filestore/test/unit/coffee/FileControllerTests.coffee +++ b/services/filestore/test/unit/coffee/FileControllerTests.coffee @@ -26,7 +26,7 @@ describe "FileController", -> @controller = SandboxedModule.require modulePath, requires: "./LocalFileWriter":@LocalFileWriter "./FileHandler": @FileHandler - "./fsWrapper":@FsWrapper + "./PersistorManager":@FsWrapper "settings-sharelatex": @settings "logger-sharelatex": log:-> diff --git a/services/filestore/test/unit/coffee/FileHandlerTests.coffee b/services/filestore/test/unit/coffee/FileHandlerTests.coffee index 2c23c2ec17..cee48e061b 100644 --- a/services/filestore/test/unit/coffee/FileHandlerTests.coffee +++ b/services/filestore/test/unit/coffee/FileHandlerTests.coffee @@ -33,7 +33,7 @@ describe "FileHandler", -> compressPng: sinon.stub() @handler = SandboxedModule.require modulePath, requires: "settings-sharelatex": @settings - "./fsWrapper":@FsWrapper + "./PersistorManager":@FsWrapper "./LocalFileWriter":@LocalFileWriter "./FileConverter":@FileConverter "./KeyBuilder": @keyBuilder diff --git a/services/filestore/test/unit/coffee/fsWrapperTests.coffee b/services/filestore/test/unit/coffee/PersistorManagerTests.coffee similarity index 76% rename from services/filestore/test/unit/coffee/fsWrapperTests.coffee rename to services/filestore/test/unit/coffee/PersistorManagerTests.coffee index d8543766b2..c597fe32c5 100644 --- a/services/filestore/test/unit/coffee/fsWrapperTests.coffee +++ b/services/filestore/test/unit/coffee/PersistorManagerTests.coffee @@ -4,11 +4,11 @@ sinon = require('sinon') chai = require('chai') should = chai.should() expect = chai.expect -modulePath = "../../../app/js/fsWrapper.js" +modulePath = "../../../app/js/PersistorManager.js" SandboxedModule = require('sandboxed-module') -describe "fsWrapperTests", -> +describe "PersistorManagerTests", -> beforeEach -> @s3Wrapper = @@ -29,36 +29,36 @@ describe "fsWrapperTests", -> "logger-sharelatex": log:-> err:-> - @fsWrapper = SandboxedModule.require modulePath, requires: @requires + @PersistorManager = SandboxedModule.require modulePath, requires: @requires it "should load getFileStream", (done) -> - @fsWrapper.should.respondTo("getFileStream") - @fsWrapper.getFileStream() + @PersistorManager.should.respondTo("getFileStream") + @PersistorManager.getFileStream() @s3Wrapper.getFileStream.calledOnce.should.equal true done() it "should load checkIfFileExists", (done) -> - @fsWrapper.checkIfFileExists() + @PersistorManager.checkIfFileExists() @s3Wrapper.checkIfFileExists.calledOnce.should.equal true done() it "should load deleteFile", (done) -> - @fsWrapper.deleteFile() + @PersistorManager.deleteFile() @s3Wrapper.deleteFile.calledOnce.should.equal true done() it "should load deleteDirectory", (done) -> - @fsWrapper.deleteDirectory() + @PersistorManager.deleteDirectory() @s3Wrapper.deleteDirectory.calledOnce.should.equal true done() it "should load sendStreamToS3", (done) -> - @fsWrapper.sendStreamToS3() + @PersistorManager.sendStreamToS3() @s3Wrapper.sendStreamToS3.calledOnce.should.equal true done() it "should load insertFile", (done) -> - @fsWrapper.insertFile() + @PersistorManager.insertFile() @s3Wrapper.insertFile.calledOnce.should.equal true done() @@ -73,7 +73,7 @@ describe "fsWrapperTests", -> log:-> err:-> try - @fsWrapper=SandboxedModule.require modulePath, requires: @requires + @PersistorManager=SandboxedModule.require modulePath, requires: @requires catch error assert.equal("Unknown filestore backend: null",error.message) done() @@ -88,7 +88,7 @@ describe "fsWrapperTests", -> log:-> err:-> try - @fsWrapper=SandboxedModule.require modulePath, requires: @requires + @PersistorManager=SandboxedModule.require modulePath, requires: @requires catch error assert.equal("Unknown filestore backend: magic",error.message) done() From 4b5f9ad4fbd64256649fc91ca47a0361358a2880 Mon Sep 17 00:00:00 2001 From: Oliver Matthews Date: Tue, 25 Feb 2014 16:38:13 +0000 Subject: [PATCH 031/555] make s3 chosen if no config option specified rename *wrapper to FilestoreManager remove ToS3 from filestore commands rename configuration option for filestore. add details of interface to FilestoreWrapper.coffee for reference. remove api comments rename settings.filestoreManager to settings.persistorManager --- .../filestore/app/coffee/FileHandler.coffee | 6 +- .../app/coffee/PersistorManager.coffee | 12 ++-- ...apper.coffee => S3PersistorManager.coffee} | 6 +- .../config/settings.development.coffee | 7 +- .../unit/coffee/FileControllerTests.coffee | 16 ++--- .../test/unit/coffee/FileHandlerTests.coffee | 48 ++++++------- .../unit/coffee/PersistorManagerTests.coffee | 61 +++++++++-------- ....coffee => S3PersistorManagerTests.coffee} | 68 +++++++++---------- 8 files changed, 114 insertions(+), 110 deletions(-) rename services/filestore/app/coffee/{s3Wrapper.coffee => S3PersistorManager.coffee} (96%) rename services/filestore/test/unit/coffee/{s3WrapperTests.coffee => S3PersistorManagerTests.coffee} (67%) diff --git a/services/filestore/app/coffee/FileHandler.coffee b/services/filestore/app/coffee/FileHandler.coffee index 40a8d2d5ad..ece883f772 100644 --- a/services/filestore/app/coffee/FileHandler.coffee +++ b/services/filestore/app/coffee/FileHandler.coffee @@ -13,7 +13,7 @@ module.exports = insertFile: (bucket, key, stream, callback)-> convetedKey = KeyBuilder.getConvertedFolderKey(key) PersistorManager.deleteDirectory bucket, convetedKey, -> - PersistorManager.sendStreamToS3 bucket, key, stream, -> + PersistorManager.sendStream bucket, key, stream, -> callback() deleteFile: (bucket, key, callback)-> @@ -53,7 +53,7 @@ module.exports = if err? logger.err err:err, fsPath:fsPath, bucket:bucket, key:key, opts:opts, "something went wrong optimising png file" return callback(err) - PersistorManager.sendFileToS3 bucket, convetedKey, fsPath, (err)-> + PersistorManager.sendFile bucket, convetedKey, fsPath, (err)-> if err? logger.err err:err, bucket:bucket, key:key, convetedKey:convetedKey, opts:opts, "something went wrong sending the file" return callback(err) @@ -67,7 +67,7 @@ module.exports = FileConverter.thumbnail origonalFsPath, callback else if opts.style == "preview" FileConverter.preview origonalFsPath, callback - else + else throw new Error("should have specified opts to convert file with #{JSON.stringify(opts)}") diff --git a/services/filestore/app/coffee/PersistorManager.coffee b/services/filestore/app/coffee/PersistorManager.coffee index 1dd27bcc4e..7bd55c47dd 100644 --- a/services/filestore/app/coffee/PersistorManager.coffee +++ b/services/filestore/app/coffee/PersistorManager.coffee @@ -1,10 +1,10 @@ settings = require("settings-sharelatex") logger = require("logger-sharelatex") -s3Wrapper = require("./s3Wrapper") +S3PersistorManager = require("./S3PersistorManager") -logger.log backend:settings.filestoreBackend, "Loading backend" -module.exports = switch settings.filestoreBackend - when "s3" - s3Wrapper +logger.log backend:settings.persistorManager, "Loading backend" +module.exports = switch settings.persistorManager + when "s3",null + S3PersistorManager else - throw new Error( "Unknown filestore backend: #{settings.filestoreBackend}" ) + throw new Error( "Unknown filestore backend: #{settings.persistorManager}" ) diff --git a/services/filestore/app/coffee/s3Wrapper.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee similarity index 96% rename from services/filestore/app/coffee/s3Wrapper.coffee rename to services/filestore/app/coffee/S3PersistorManager.coffee index 8d2492ba53..dd65b79abc 100644 --- a/services/filestore/app/coffee/s3Wrapper.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -34,7 +34,7 @@ buildDefaultOptions = (bucketName, method, key)-> module.exports = - sendFileToS3: (bucketName, key, fsPath, callback)-> + sendFile: (bucketName, key, fsPath, callback)-> s3Client = knox.createClient key: settings.s3.key secret: settings.s3.secret @@ -57,7 +57,7 @@ module.exports = callback err - sendStreamToS3: (bucketName, key, readStream, callback)-> + sendStream: (bucketName, key, readStream, callback)-> logger.log bucketName:bucketName, key:key, "sending file to s3" readStream.on "error", (err)-> logger.err bucketName:bucketName, key:key, "error on stream to send to s3" @@ -65,7 +65,7 @@ module.exports = if err? logger.err bucketName:bucketName, key:key, fsPath:fsPath, err:err, "something went wrong writing stream to disk" return callback(err) - @sendFileToS3 bucketName, key, fsPath, callback + @sendFile bucketName, key, fsPath, callback getFileStream: (bucketName, key, callback = (err, res)->)-> logger.log bucketName:bucketName, key:key, "getting file from s3" diff --git a/services/filestore/config/settings.development.coffee b/services/filestore/config/settings.development.coffee index 7bd2ea59a1..b8bebc6b31 100644 --- a/services/filestore/config/settings.development.coffee +++ b/services/filestore/config/settings.development.coffee @@ -4,10 +4,11 @@ module.exports = port: 3009 host: "localhost" - # which backend to use + # which persistor to use for file storage # current options are: # "s3" - Amazon S3 - filestoreBackend: "s3" + # if no persistor is chosen, s3 will be used by default + persistorManager: "s3" # ShareLaTeX stores binary files like images in S3. # Fill in your Amazon S3 credentials below. @@ -20,7 +21,7 @@ module.exports = # Filestore health check # ---------------------- - # Project and file details to check in filestore when calling /health_check + # Project and file details to check in persistor when calling /health_check # health_check: # project_id: "" # file_id: "" diff --git a/services/filestore/test/unit/coffee/FileControllerTests.coffee b/services/filestore/test/unit/coffee/FileControllerTests.coffee index aa2aa35588..6c1c92dba3 100644 --- a/services/filestore/test/unit/coffee/FileControllerTests.coffee +++ b/services/filestore/test/unit/coffee/FileControllerTests.coffee @@ -9,8 +9,8 @@ SandboxedModule = require('sandboxed-module') describe "FileController", -> beforeEach -> - @FsWrapper = - sendStreamToS3: sinon.stub() + @PersistorManager = + sendStream: sinon.stub() copyFile: sinon.stub() deleteFile:sinon.stub() @@ -26,7 +26,7 @@ describe "FileController", -> @controller = SandboxedModule.require modulePath, requires: "./LocalFileWriter":@LocalFileWriter "./FileHandler": @FileHandler - "./PersistorManager":@FsWrapper + "./PersistorManager":@PersistorManager "settings-sharelatex": @settings "logger-sharelatex": log:-> @@ -73,7 +73,7 @@ describe "FileController", -> describe "insertFile", -> - it "should send bucket name key and res to FsWrapper", (done)-> + it "should send bucket name key and res to PersistorManager", (done)-> @FileHandler.insertFile.callsArgWith(3) @res.send = => @FileHandler.insertFile.calledWith(@bucket, @key, @req).should.equal true @@ -90,17 +90,17 @@ describe "FileController", -> project_id: @oldProject_id file_id: @oldFile_id - it "should send bucket name and both keys to FsWrapper", (done)-> - @FsWrapper.copyFile.callsArgWith(3) + it "should send bucket name and both keys to PersistorManager", (done)-> + @PersistorManager.copyFile.callsArgWith(3) @res.send = (code)=> code.should.equal 200 - @FsWrapper.copyFile.calledWith(@bucket, "#{@oldProject_id}/#{@oldFile_id}", @key).should.equal true + @PersistorManager.copyFile.calledWith(@bucket, "#{@oldProject_id}/#{@oldFile_id}", @key).should.equal true done() @controller.copyFile @req, @res it "should send a 500 if there was an error", (done)-> - @FsWrapper.copyFile.callsArgWith(3, "error") + @PersistorManager.copyFile.callsArgWith(3, "error") @res.send = (code)=> code.should.equal 500 done() diff --git a/services/filestore/test/unit/coffee/FileHandlerTests.coffee b/services/filestore/test/unit/coffee/FileHandlerTests.coffee index cee48e061b..c2bed383ab 100644 --- a/services/filestore/test/unit/coffee/FileHandlerTests.coffee +++ b/services/filestore/test/unit/coffee/FileHandlerTests.coffee @@ -13,12 +13,12 @@ describe "FileHandler", -> s3: buckets: user_files:"user_files" - @FsWrapper = + @PersistorManager = getFileStream: sinon.stub() checkIfFileExists: sinon.stub() deleteFile: sinon.stub() deleteDirectory: sinon.stub() - sendStreamToS3: sinon.stub() + sendStream: sinon.stub() insertFile: sinon.stub() @LocalFileWriter = writeStream: sinon.stub() @@ -33,7 +33,7 @@ describe "FileHandler", -> compressPng: sinon.stub() @handler = SandboxedModule.require modulePath, requires: "settings-sharelatex": @settings - "./PersistorManager":@FsWrapper + "./PersistorManager":@PersistorManager "./LocalFileWriter":@LocalFileWriter "./FileConverter":@FileConverter "./KeyBuilder": @keyBuilder @@ -50,33 +50,33 @@ describe "FileHandler", -> describe "insertFile", -> beforeEach -> @stream = {} - @FsWrapper.deleteDirectory.callsArgWith(2) - @FsWrapper.sendStreamToS3.callsArgWith(3) + @PersistorManager.deleteDirectory.callsArgWith(2) + @PersistorManager.sendStream.callsArgWith(3) - it "should send file to s3", (done)-> + it "should send file to the filestore", (done)-> @handler.insertFile @bucket, @key, @stream, => - @FsWrapper.sendStreamToS3.calledWith(@bucket, @key, @stream).should.equal true + @PersistorManager.sendStream.calledWith(@bucket, @key, @stream).should.equal true done() it "should delete the convetedKey folder", (done)-> @keyBuilder.getConvertedFolderKey.returns(@stubbedConvetedKey) @handler.insertFile @bucket, @key, @stream, => - @FsWrapper.deleteDirectory.calledWith(@bucket, @stubbedConvetedKey).should.equal true + @PersistorManager.deleteDirectory.calledWith(@bucket, @stubbedConvetedKey).should.equal true done() describe "deleteFile", -> beforeEach -> @keyBuilder.getConvertedFolderKey.returns(@stubbedConvetedKey) - @FsWrapper.deleteFile.callsArgWith(2) + @PersistorManager.deleteFile.callsArgWith(2) - it "should tell the s3 wrapper to delete the file", (done)-> + it "should tell the filestore manager to delete the file", (done)-> @handler.deleteFile @bucket, @key, => - @FsWrapper.deleteFile.calledWith(@bucket, @key).should.equal true + @PersistorManager.deleteFile.calledWith(@bucket, @key).should.equal true done() - it "should tell the s3 wrapper to delete the cached foler", (done)-> + it "should tell the filestore manager to delete the cached foler", (done)-> @handler.deleteFile @bucket, @key, => - @FsWrapper.deleteFile.calledWith(@bucket, @stubbedConvetedKey).should.equal true + @PersistorManager.deleteFile.calledWith(@bucket, @stubbedConvetedKey).should.equal true done() describe "getFile", -> @@ -102,11 +102,11 @@ describe "FileHandler", -> beforeEach -> @fileStream = {on:->} - @FsWrapper.getFileStream.callsArgWith(2, "err", @fileStream) + @PersistorManager.getFileStream.callsArgWith(2, "err", @fileStream) - it "should get the stream from s3 ", (done)-> + it "should get the stream", (done)-> @handler.getFile @bucket, @key, null, => - @FsWrapper.getFileStream.calledWith(@bucket, @key).should.equal true + @PersistorManager.getFileStream.calledWith(@bucket, @key).should.equal true done() it "should return the stream and error", (done)-> @@ -118,14 +118,14 @@ describe "FileHandler", -> describe "_getConvertedFile", -> it "should getFileStream if it does exists", (done)-> - @FsWrapper.checkIfFileExists.callsArgWith(2, null, true) - @FsWrapper.getFileStream.callsArgWith(2) + @PersistorManager.checkIfFileExists.callsArgWith(2, null, true) + @PersistorManager.getFileStream.callsArgWith(2) @handler._getConvertedFile @bucket, @key, {}, => - @FsWrapper.getFileStream.calledWith(@bucket).should.equal true + @PersistorManager.getFileStream.calledWith(@bucket).should.equal true done() it "should call _getConvertedFileAndCache if it does exists", (done)-> - @FsWrapper.checkIfFileExists.callsArgWith(2, null, false) + @PersistorManager.checkIfFileExists.callsArgWith(2, null, false) @handler._getConvertedFileAndCache = sinon.stub().callsArgWith(4) @handler._getConvertedFile @bucket, @key, {}, => @handler._getConvertedFileAndCache.calledWith(@bucket, @key).should.equal true @@ -134,15 +134,15 @@ describe "FileHandler", -> describe "_getConvertedFileAndCache", -> it "should _convertFile ", (done)-> - @FsWrapper.sendFileToS3 = sinon.stub().callsArgWith(3) - @FsWrapper.getFileStream = sinon.stub().callsArgWith(2) + @PersistorManager.sendFile = sinon.stub().callsArgWith(3) + @PersistorManager.getFileStream = sinon.stub().callsArgWith(2) @convetedKey = @key+"converted" @handler._convertFile = sinon.stub().callsArgWith(3, null, @stubbedPath) @ImageOptimiser.compressPng = sinon.stub().callsArgWith(1) @handler._getConvertedFileAndCache @bucket, @key, @convetedKey, {}, => @handler._convertFile.called.should.equal true - @FsWrapper.sendFileToS3.calledWith(@bucket, @convetedKey, @stubbedPath).should.equal true - @FsWrapper.getFileStream.calledWith(@bucket, @convetedKey).should.equal true + @PersistorManager.sendFile.calledWith(@bucket, @convetedKey, @stubbedPath).should.equal true + @PersistorManager.getFileStream.calledWith(@bucket, @convetedKey).should.equal true @ImageOptimiser.compressPng.calledWith(@stubbedPath).should.equal true done() diff --git a/services/filestore/test/unit/coffee/PersistorManagerTests.coffee b/services/filestore/test/unit/coffee/PersistorManagerTests.coffee index c597fe32c5..e82700cf1a 100644 --- a/services/filestore/test/unit/coffee/PersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/PersistorManagerTests.coffee @@ -11,20 +11,20 @@ SandboxedModule = require('sandboxed-module') describe "PersistorManagerTests", -> beforeEach -> - @s3Wrapper = + @S3PersistorManager = getFileStream: sinon.stub() checkIfFileExists: sinon.stub() deleteFile: sinon.stub() deleteDirectory: sinon.stub() - sendStreamToS3: sinon.stub() + sendStream: sinon.stub() insertFile: sinon.stub() describe "test s3 mixin", -> beforeEach -> @settings = - filestoreBackend: "s3" + persistorManager: "s3" @requires = - "./s3Wrapper": @s3Wrapper + "./S3PersistorManager": @S3PersistorManager "settings-sharelatex": @settings "logger-sharelatex": log:-> @@ -34,63 +34,66 @@ describe "PersistorManagerTests", -> it "should load getFileStream", (done) -> @PersistorManager.should.respondTo("getFileStream") @PersistorManager.getFileStream() - @s3Wrapper.getFileStream.calledOnce.should.equal true + @S3PersistorManager.getFileStream.calledOnce.should.equal true done() it "should load checkIfFileExists", (done) -> @PersistorManager.checkIfFileExists() - @s3Wrapper.checkIfFileExists.calledOnce.should.equal true + @S3PersistorManager.checkIfFileExists.calledOnce.should.equal true done() it "should load deleteFile", (done) -> @PersistorManager.deleteFile() - @s3Wrapper.deleteFile.calledOnce.should.equal true + @S3PersistorManager.deleteFile.calledOnce.should.equal true done() it "should load deleteDirectory", (done) -> @PersistorManager.deleteDirectory() - @s3Wrapper.deleteDirectory.calledOnce.should.equal true + @S3PersistorManager.deleteDirectory.calledOnce.should.equal true done() - it "should load sendStreamToS3", (done) -> - @PersistorManager.sendStreamToS3() - @s3Wrapper.sendStreamToS3.calledOnce.should.equal true + it "should load sendStream", (done) -> + @PersistorManager.sendStream() + @S3PersistorManager.sendStream.calledOnce.should.equal true done() it "should load insertFile", (done) -> @PersistorManager.insertFile() - @s3Wrapper.insertFile.calledOnce.should.equal true + @S3PersistorManager.insertFile.calledOnce.should.equal true + done() + + describe "test unspecified mixins", -> + + it "should load s3 when no wrapper specified", (done) -> + @settings = + @requires = + "./S3PersistorManager": @S3PersistorManager + "settings-sharelatex": @settings + "logger-sharelatex": + log:-> + err:-> + @PersistorManager=SandboxedModule.require modulePath, requires: @requires + @PersistorManager.should.respondTo("getFileStream") + @PersistorManager.getFileStream() + @S3PersistorManager.getFileStream.calledOnce.should.equal true done() describe "test invalid mixins", -> - - it "should not load a null wrapper", (done) -> - @settings = - @requires = - "./s3Wrapper": @s3Wrapper - "settings-sharelatex": @settings - "logger-sharelatex": - log:-> - err:-> - try - @PersistorManager=SandboxedModule.require modulePath, requires: @requires - catch error - assert.equal("Unknown filestore backend: null",error.message) - done() - it "should not load an invalid wrapper", (done) -> @settings = - filestoreBackend:"magic" + persistorManager:"magic" @requires = - "./s3Wrapper": @s3Wrapper + "./S3PersistorManager": @S3PersistorManager "settings-sharelatex": @settings "logger-sharelatex": log:-> err:-> + @fsWrapper=null try @PersistorManager=SandboxedModule.require modulePath, requires: @requires catch error assert.equal("Unknown filestore backend: magic",error.message) + assert.isNull(@fsWrapper) done() diff --git a/services/filestore/test/unit/coffee/s3WrapperTests.coffee b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee similarity index 67% rename from services/filestore/test/unit/coffee/s3WrapperTests.coffee rename to services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee index 42023fea29..76872fb140 100644 --- a/services/filestore/test/unit/coffee/s3WrapperTests.coffee +++ b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee @@ -3,10 +3,10 @@ sinon = require('sinon') chai = require('chai') should = chai.should() expect = chai.expect -modulePath = "../../../app/js/s3Wrapper.js" +modulePath = "../../../app/js/S3PersistorManager.js" SandboxedModule = require('sandboxed-module') -describe "s3WrapperTests", -> +describe "S3PersistorManagerTests", -> beforeEach -> @settings = @@ -39,7 +39,7 @@ describe "s3WrapperTests", -> describe "getFileStream", -> beforeEach -> - @s3Wrapper = SandboxedModule.require modulePath, requires: @requires + @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires it "should use correct key", (done)-> @@ -47,20 +47,20 @@ describe "s3WrapperTests", -> on:-> end:-> ) - @s3Wrapper.getFileStream @bucketName, @key, @fsPath, (err)=> + @S3PersistorManager.getFileStream @bucketName, @key, @fsPath, (err)=> @stubbedKnoxClient.get.calledWith(@key).should.equal true done() - describe "sendFileToS3", -> + describe "sendFile", -> beforeEach -> - @s3Wrapper = SandboxedModule.require modulePath, requires: @requires + @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires @stubbedKnoxClient.putFile.returns on:-> it "should put file with knox", (done)-> @LocalFileWriter.deleteFile.callsArgWith(1) @stubbedKnoxClient.putFile.callsArgWith(2, @error) - @s3Wrapper.sendFileToS3 @bucketName, @key, @fsPath, (err)=> + @S3PersistorManager.sendFile @bucketName, @key, @fsPath, (err)=> @stubbedKnoxClient.putFile.calledWith(@fsPath, @key).should.equal true err.should.equal @error done() @@ -68,49 +68,49 @@ describe "s3WrapperTests", -> it "should delete the file and pass the error with it", (done)-> @LocalFileWriter.deleteFile.callsArgWith(1) @stubbedKnoxClient.putFile.callsArgWith(2, @error) - @s3Wrapper.sendFileToS3 @bucketName, @key, @fsPath, (err)=> + @S3PersistorManager.sendFile @bucketName, @key, @fsPath, (err)=> @stubbedKnoxClient.putFile.calledWith(@fsPath, @key).should.equal true err.should.equal @error done() - describe "sendStreamToS3", -> + describe "sendStream", -> beforeEach -> @fsPath = "to/some/where" - @origin = + @origin = on:-> - @s3Wrapper = SandboxedModule.require modulePath, requires: @requires - @s3Wrapper.sendFileToS3 = sinon.stub().callsArgWith(3) + @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires + @S3PersistorManager.sendFile = sinon.stub().callsArgWith(3) it "should send stream to LocalFileWriter", (done)-> @LocalFileWriter.deleteFile.callsArgWith(1) @LocalFileWriter.writeStream.callsArgWith(2, null, @fsPath) - @s3Wrapper.sendStreamToS3 @bucketName, @key, @origin, => + @S3PersistorManager.sendStream @bucketName, @key, @origin, => @LocalFileWriter.writeStream.calledWith(@origin).should.equal true done() it "should return the error from LocalFileWriter", (done)-> @LocalFileWriter.deleteFile.callsArgWith(1) @LocalFileWriter.writeStream.callsArgWith(2, @error) - @s3Wrapper.sendStreamToS3 @bucketName, @key, @origin, (err)=> + @S3PersistorManager.sendStream @bucketName, @key, @origin, (err)=> err.should.equal @error done() - it "should send the file to s3", (done)-> + it "should send the file to the filestore", (done)-> @LocalFileWriter.deleteFile.callsArgWith(1) @LocalFileWriter.writeStream.callsArgWith(2) - @s3Wrapper.sendStreamToS3 @bucketName, @key, @origin, (err)=> - @s3Wrapper.sendFileToS3.called.should.equal true + @S3PersistorManager.sendStream @bucketName, @key, @origin, (err)=> + @S3PersistorManager.sendFile.called.should.equal true done() describe "copyFile", -> beforeEach -> @sourceKey = "my/key" @destKey = "my/dest/key" - @s3Wrapper = SandboxedModule.require modulePath, requires: @requires + @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires it "should use knox to copy file", (done)-> @stubbedKnoxClient.copyFile.callsArgWith(2, @error) - @s3Wrapper.copyFile @bucketName, @sourceKey, @destKey, (err)=> + @S3PersistorManager.copyFile @bucketName, @sourceKey, @destKey, (err)=> err.should.equal @error @stubbedKnoxClient.copyFile.calledWith(@sourceKey, @destKey).should.equal true done() @@ -118,14 +118,14 @@ describe "s3WrapperTests", -> describe "deleteDirectory", -> beforeEach -> - @s3Wrapper = SandboxedModule.require modulePath, requires: @requires + @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires it "should list the contents passing them onto multi delete", (done)-> data = Contents: [{Key:"1234"}, {Key: "456"}] @stubbedKnoxClient.list.callsArgWith(1, null, data) @stubbedKnoxClient.deleteMultiple.callsArgWith(1) - @s3Wrapper.deleteDirectory @bucketName, @key, (err)=> + @S3PersistorManager.deleteDirectory @bucketName, @key, (err)=> @stubbedKnoxClient.deleteMultiple.calledWith(["1234","456"]).should.equal true done() @@ -134,9 +134,9 @@ describe "s3WrapperTests", -> it "should use correct options", (done)-> @request = sinon.stub().callsArgWith(1) @requires["request"] = @request - @s3Wrapper = SandboxedModule.require modulePath, requires: @requires + @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires - @s3Wrapper.deleteFile @bucketName, @key, (err)=> + @S3PersistorManager.deleteFile @bucketName, @key, (err)=> opts = @request.args[0][0] assert.deepEqual(opts.aws, {key:@settings.s3.key, secret:@settings.s3.secret, bucket:@bucketName}) opts.method.should.equal "delete" @@ -147,9 +147,9 @@ describe "s3WrapperTests", -> it "should return the error", (done)-> @request = sinon.stub().callsArgWith(1, @error) @requires["request"] = @request - @s3Wrapper = SandboxedModule.require modulePath, requires: @requires + @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires - @s3Wrapper.deleteFile @bucketName, @key, (err)=> + @S3PersistorManager.deleteFile @bucketName, @key, (err)=> err.should.equal @error done() @@ -158,9 +158,9 @@ describe "s3WrapperTests", -> it "should use correct options", (done)-> @request = sinon.stub().callsArgWith(1, null, statusCode:200) @requires["request"] = @request - @s3Wrapper = SandboxedModule.require modulePath, requires: @requires + @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires - @s3Wrapper.checkIfFileExists @bucketName, @key, (err)=> + @S3PersistorManager.checkIfFileExists @bucketName, @key, (err)=> opts = @request.args[0][0] assert.deepEqual(opts.aws, {key:@settings.s3.key, secret:@settings.s3.secret, bucket:@bucketName}) opts.method.should.equal "head" @@ -171,24 +171,24 @@ describe "s3WrapperTests", -> it "should return true for a 200", (done)-> @request = sinon.stub().callsArgWith(1, null, statusCode:200) @requires["request"] = @request - @s3Wrapper = SandboxedModule.require modulePath, requires: @requires - @s3Wrapper.checkIfFileExists @bucketName, @key, (err, exists)=> + @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires + @S3PersistorManager.checkIfFileExists @bucketName, @key, (err, exists)=> exists.should.equal true done() it "should return false for a non 200", (done)-> @request = sinon.stub().callsArgWith(1, null, statusCode:404) @requires["request"] = @request - @s3Wrapper = SandboxedModule.require modulePath, requires: @requires - @s3Wrapper.checkIfFileExists @bucketName, @key, (err, exists)=> + @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires + @S3PersistorManager.checkIfFileExists @bucketName, @key, (err, exists)=> exists.should.equal false done() it "should return the error", (done)-> @request = sinon.stub().callsArgWith(1, @error, {}) @requires["request"] = @request - @s3Wrapper = SandboxedModule.require modulePath, requires: @requires + @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires - @s3Wrapper.checkIfFileExists @bucketName, @key, (err)=> + @S3PersistorManager.checkIfFileExists @bucketName, @key, (err)=> err.should.equal @error - done() \ No newline at end of file + done() From e0f0bfb0eafe7b97a62ad9d1b44191dd33817ee2 Mon Sep 17 00:00:00 2001 From: Oliver Matthews Date: Wed, 26 Feb 2014 11:49:15 +0000 Subject: [PATCH 032/555] revert settings.persistorManager --- services/filestore/app/coffee/PersistorManager.coffee | 6 +++--- services/filestore/config/settings.development.coffee | 2 +- .../filestore/test/unit/coffee/PersistorManagerTests.coffee | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/services/filestore/app/coffee/PersistorManager.coffee b/services/filestore/app/coffee/PersistorManager.coffee index 7bd55c47dd..e0266e64af 100644 --- a/services/filestore/app/coffee/PersistorManager.coffee +++ b/services/filestore/app/coffee/PersistorManager.coffee @@ -2,9 +2,9 @@ settings = require("settings-sharelatex") logger = require("logger-sharelatex") S3PersistorManager = require("./S3PersistorManager") -logger.log backend:settings.persistorManager, "Loading backend" -module.exports = switch settings.persistorManager +logger.log backend:settings.filestoreBackend, "Loading backend" +module.exports = switch settings.filestoreBackend when "s3",null S3PersistorManager else - throw new Error( "Unknown filestore backend: #{settings.persistorManager}" ) + throw new Error( "Unknown filestore backend: #{settings.filestoreBackend}" ) diff --git a/services/filestore/config/settings.development.coffee b/services/filestore/config/settings.development.coffee index b8bebc6b31..a2a1b5cc26 100644 --- a/services/filestore/config/settings.development.coffee +++ b/services/filestore/config/settings.development.coffee @@ -8,7 +8,7 @@ module.exports = # current options are: # "s3" - Amazon S3 # if no persistor is chosen, s3 will be used by default - persistorManager: "s3" + filestoreBackend: "s3" # ShareLaTeX stores binary files like images in S3. # Fill in your Amazon S3 credentials below. diff --git a/services/filestore/test/unit/coffee/PersistorManagerTests.coffee b/services/filestore/test/unit/coffee/PersistorManagerTests.coffee index e82700cf1a..af11fa7408 100644 --- a/services/filestore/test/unit/coffee/PersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/PersistorManagerTests.coffee @@ -22,7 +22,7 @@ describe "PersistorManagerTests", -> describe "test s3 mixin", -> beforeEach -> @settings = - persistorManager: "s3" + filestoreBackend: "s3" @requires = "./S3PersistorManager": @S3PersistorManager "settings-sharelatex": @settings @@ -81,7 +81,7 @@ describe "PersistorManagerTests", -> describe "test invalid mixins", -> it "should not load an invalid wrapper", (done) -> @settings = - persistorManager:"magic" + filestoreBackend:"magic" @requires = "./S3PersistorManager": @S3PersistorManager "settings-sharelatex": @settings From 53bcdccc04ab470bd4fb26e2a9fbf2e07cc24425 Mon Sep 17 00:00:00 2001 From: Oliver Matthews Date: Wed, 26 Feb 2014 11:58:48 +0000 Subject: [PATCH 033/555] change the method for assuming s3 --- services/filestore/app/coffee/PersistorManager.coffee | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/services/filestore/app/coffee/PersistorManager.coffee b/services/filestore/app/coffee/PersistorManager.coffee index e0266e64af..d4922b9c3b 100644 --- a/services/filestore/app/coffee/PersistorManager.coffee +++ b/services/filestore/app/coffee/PersistorManager.coffee @@ -2,9 +2,13 @@ settings = require("settings-sharelatex") logger = require("logger-sharelatex") S3PersistorManager = require("./S3PersistorManager") +# assume s3 if none specified +settings.filestoreBackend ||= "s3" + + logger.log backend:settings.filestoreBackend, "Loading backend" module.exports = switch settings.filestoreBackend - when "s3",null + when "s3" S3PersistorManager else throw new Error( "Unknown filestore backend: #{settings.filestoreBackend}" ) From ff96f37b2dcfd8be437770e64eb54610b8184e30 Mon Sep 17 00:00:00 2001 From: Oliver Matthews Date: Wed, 26 Feb 2014 15:10:55 +0000 Subject: [PATCH 034/555] unit test passing version of FSPersistorManager --- .../app/coffee/FSPersistorManager.coffee | 59 +++++++ .../coffee/FSPersistorManagerTests.coffee | 156 ++++++++++++++++++ 2 files changed, 215 insertions(+) create mode 100644 services/filestore/app/coffee/FSPersistorManager.coffee create mode 100644 services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee diff --git a/services/filestore/app/coffee/FSPersistorManager.coffee b/services/filestore/app/coffee/FSPersistorManager.coffee new file mode 100644 index 0000000000..a0779e5b61 --- /dev/null +++ b/services/filestore/app/coffee/FSPersistorManager.coffee @@ -0,0 +1,59 @@ +logger = require("logger-sharelatex") +fs = require("fs") +LocalFileWriter = require("./LocalFileWriter") + +module.exports = + + sendFile: ( location, target, source, callback = (err)->) -> + logger.log location:location, target:target, source:source, "sending file" + fs.rename source, "#{location}/#{target}", (err) -> + logger.err err:err, location:location, target:target, source:source, "Error on put of file" + callback err + + sendStream: ( location, target, sourceStream, callback = (err)->) -> + logger.log location:location, target:target, source:sourceStream, "sending file stream" + sourceStream.on "error", (err)-> + logger.err location:location, target:target, source:sourceStream, err:err "error on stream to send" + LocalFileWriter.writeStream sourceStream, null, (err, fsPath)=> + if err? + logger.err location:location, target:target, fsPath:fsPath, err:err, "something went wrong writing stream to disk" + return callback err + @sendFile location, target, fsPath, callback + + getFileStream: (location, name, callback = (err, res)->)-> + logger.log location:location, name:name, "getting file" + sourceStream = fs.createReadStream "#{location}/#{name}" + sourceStream.on 'error', (err) -> + logger.err err:err, location:location, name:name, "Error reading from file" + callback err + callback null,sourceStream + + + copyFile: (location, fromName, toName, callback = (err)->)-> + logger.log location:location, fromName:fromName, toName:toName, "copying file" + sourceStream = fs.createReadStream "#{location}/#{fromName}" + sourceStream.on 'error', (err) -> + logger.err err:err, location:location, key:fromName, "Error reading from file" + callback err + targetStream = fs.createWriteStream "#{location}/#{toName}" + targetStream.on 'error', (err) -> + logger.err err:err, location:location, key:targetKey, "Error writing to file" + callback err + sourceStream.pipe targetStream + + deleteFile: (location, name, callback)-> + logger.log location:location, name:name, "delete file" + fs.unlink "#{location}/#{name}", (err) -> + logger.err err:err, location:location, name:name, "Error on delete." + callback err + + deleteDirectory: (location, name, callback = (err)->)-> + fs.rmdir "#{location}/#{name}", (err) -> + logger.err err:err, location:location, name:name, "Error on rmdir." + callback err + + checkIfFileExists:(location, name, callback = (err,exists)->)-> + logger.log location:location, name:name, "checking if file exists" + fs.exists "#{location}/#{name}", (exists) -> + logger.log location:location, name:name, exists:exists, "checked if file exists" + callback null, exists diff --git a/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee b/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee new file mode 100644 index 0000000000..44580d115a --- /dev/null +++ b/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee @@ -0,0 +1,156 @@ +assert = require("chai").assert +sinon = require('sinon') +chai = require('chai') +should = chai.should +expect = chai.expect +modulePath = "../../../app/js/FSPersistorManager.js" +SandboxedModule = require('sandboxed-module') +fs = require("fs") + +describe "FSPersistorManagerTests", -> + + beforeEach -> + @Fs = + rename:sinon.stub() + createReadStream:sinon.stub() + createWriteStream:sinon.stub() + unlink:sinon.stub() + rmdir:sinon.stub() + exists:sinon.stub() + @LocalFileWriter = + writeStream: sinon.stub() + @requires = + "./LocalFileWriter":@LocalFileWriter + "fs":@Fs + "logger-sharelatex": + log:-> + err:-> + @location = "/tmp" + @name1 = "first_file" + @name2 = "second_file" + @error = "error_message" + @FSPersistorManager = SandboxedModule.require modulePath, requires: @requires + + describe "sendFile", -> + it "should put the file", (done) -> + @Fs.rename.callsArgWith(2,@error) + @FSPersistorManager.sendFile @location, @name1, @name2, (err)=> + @Fs.rename.calledWith( @name2, "#{@location}/#{@name1}" ).should.equal true + err.should.equal @error + done() + + describe "sendStream", -> + beforeEach -> + @FSPersistorManager.sendFile = sinon.stub().callsArgWith(3) + @LocalFileWriter.writeStream.callsArgWith(2, null, @name1) + @SourceStream = + on:-> + + it "should sent stream to LocalFileWriter", (done)-> + @FSPersistorManager.sendStream @location, @name1, @SourceStream, => + @LocalFileWriter.writeStream.calledWith(@SourceStream).should.equal true + done() + + it "should return the error from LocalFileWriter", (done)-> + @LocalFileWriter.writeStream.callsArgWith(2, @error) + @FSPersistorManager.sendStream @location, @name1, @SourceStream, (err)=> + err.should.equal @error + done() + + it "should send the file to the filestore", (done)-> + @LocalFileWriter.writeStream.callsArgWith(2) + @FSPersistorManager.sendStream @location, @name1, @SourceStream, (err)=> + @FSPersistorManager.sendFile.called.should.equal true + done() + + describe "getFileStream", -> + it "should use correct file location", (done) -> + @Fs.createReadStream.returns( + on:-> + ) + @FSPersistorManager.getFileStream @location, @name1, (err,res)=> + @Fs.createReadStream.calledWith("#{@location}/#{@name1}").should.equal.true + done() + + describe "copyFile", -> + beforeEach -> + @ReadStream= + on:-> + pipe:sinon.stub() + @WriteStream= + on:-> + @Fs.createReadStream.returns(@ReadStream) + @Fs.createWriteStream.returns(@WriteStream) + + it "Should open the source for reading", (done) -> + @FSPersistorManager.copyFile @location, @name1, @name2, -> + @Fs.createReadStream.calledWith("#{@location}/#{@name1}").should.equal.true + done() + + it "Should open the target for writing", (done) -> + @FSPersistorManager.copyFile @location, @name1, @name2, -> + @Fs.createWriteStream.calledWith("#{@location}/#{@name2}").should.equal.true + done() + + it "Should pipe the source to the target", (done) -> + @FSPersistorManager.copyFile @location, @name1, @name2, -> + @ReadStream.pipe.calledWith(@WriteStream).should.equal.true + done() + + describe "deleteFile", -> + beforeEach -> + @Fs.unlink.callsArgWith(1,@error) + + it "Should call unlink with correct options", (done) -> + @FSPersistorManager.deleteFile @location, @name1, (err) => + @Fs.unlink.calledWith("#{@location}/#{@name1}").should.equal.true + done() + + it "Should propogate the error", (done) -> + @FSPersistorManager.deleteFile @location, @name1, (err) => + err.should.equal @error + done() + + + describe "deleteDirectory", -> + beforeEach -> + @Fs.rmdir.callsArgWith(1,@error) + + it "Should call rmdir with correct options", (done) -> + @FSPersistorManager.deleteDirectory @location, @name1, (err) => + @Fs.rmdir.calledWith("#{@location}/#{@name1}").should.equal.true + done() + + it "Should propogate the error", (done) -> + @FSPersistorManager.deleteDirectory @location, @name1, (err) => + err.should.equal @error + done() + + describe "checkIfFileExists", -> + beforeEach -> + @Fs.exists.callsArgWith(1,true) + + it "Should call exists with correct options", (done) -> + @FSPersistorManager.checkIfFileExists @location, @name1, (exists) => + @Fs.exists.calledWith("#{@location}/#{@name1}").should.equal.true + done() + + # fs.exists simply returns false on any error, so... + it "should not return an error", (done) -> + @FSPersistorManager.checkIfFileExists @location, @name1, (err,exists) => + expect(err).to.be.null + done() + + it "Should return true for existing files", (done) -> + @Fs.exists.callsArgWith(1,true) + @FSPersistorManager.checkIfFileExists @location, @name1, (err,exists) => + exists.should.be.true + done() + + it "Should return false for non-existing files", (done) -> + @Fs.exists.callsArgWith(1,false) + @FSPersistorManager.checkIfFileExists @location, @name1, (err,exists) => + exists.should.be.false + done() + + From 7c5634044f546960b8d43d99e151b9449bdc8170 Mon Sep 17 00:00:00 2001 From: Oliver Matthews Date: Wed, 26 Feb 2014 15:15:03 +0000 Subject: [PATCH 035/555] Allow selection of FS persistor manager. Only require the chosen persistor manager. --- services/filestore/app/coffee/PersistorManager.coffee | 5 +++-- services/filestore/config/settings.development.coffee | 1 + 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/services/filestore/app/coffee/PersistorManager.coffee b/services/filestore/app/coffee/PersistorManager.coffee index d4922b9c3b..1b200c58d3 100644 --- a/services/filestore/app/coffee/PersistorManager.coffee +++ b/services/filestore/app/coffee/PersistorManager.coffee @@ -1,6 +1,5 @@ settings = require("settings-sharelatex") logger = require("logger-sharelatex") -S3PersistorManager = require("./S3PersistorManager") # assume s3 if none specified settings.filestoreBackend ||= "s3" @@ -9,6 +8,8 @@ settings.filestoreBackend ||= "s3" logger.log backend:settings.filestoreBackend, "Loading backend" module.exports = switch settings.filestoreBackend when "s3" - S3PersistorManager + require("./S3PersistorManager") + when "fs" + require("./FSPersistorManager") else throw new Error( "Unknown filestore backend: #{settings.filestoreBackend}" ) diff --git a/services/filestore/config/settings.development.coffee b/services/filestore/config/settings.development.coffee index a2a1b5cc26..db79f35ca6 100644 --- a/services/filestore/config/settings.development.coffee +++ b/services/filestore/config/settings.development.coffee @@ -7,6 +7,7 @@ module.exports = # which persistor to use for file storage # current options are: # "s3" - Amazon S3 + # "fs" - local filesystem # if no persistor is chosen, s3 will be used by default filestoreBackend: "s3" From 88cc89a0d1176fb77d8e947563c7d4ce23a7beab Mon Sep 17 00:00:00 2001 From: Oliver Matthews Date: Sat, 1 Mar 2014 15:10:47 +0000 Subject: [PATCH 036/555] filter /s from key ids --- .../app/coffee/FSPersistorManager.coffee | 49 ++++++++++++------- .../coffee/FSPersistorManagerTests.coffee | 7 +-- 2 files changed, 34 insertions(+), 22 deletions(-) diff --git a/services/filestore/app/coffee/FSPersistorManager.coffee b/services/filestore/app/coffee/FSPersistorManager.coffee index a0779e5b61..1226c57d90 100644 --- a/services/filestore/app/coffee/FSPersistorManager.coffee +++ b/services/filestore/app/coffee/FSPersistorManager.coffee @@ -2,12 +2,17 @@ logger = require("logger-sharelatex") fs = require("fs") LocalFileWriter = require("./LocalFileWriter") -module.exports = +filterName = (key) -> + return key.replace /\//, "_" + +module.exports = sendFile: ( location, target, source, callback = (err)->) -> - logger.log location:location, target:target, source:source, "sending file" - fs.rename source, "#{location}/#{target}", (err) -> - logger.err err:err, location:location, target:target, source:source, "Error on put of file" + filteredTarget = filterName target + logger.log location:location, target:filteredTarget, source:source, "sending file" + fs.rename source, "#{location}/#{filteredTarget}", (err) -> + if err!=null + logger.err err:err, location:location, target:filteredTarget, source:source, "Error on put of file" callback err sendStream: ( location, target, sourceStream, callback = (err)->) -> @@ -21,8 +26,9 @@ module.exports = @sendFile location, target, fsPath, callback getFileStream: (location, name, callback = (err, res)->)-> - logger.log location:location, name:name, "getting file" - sourceStream = fs.createReadStream "#{location}/#{name}" + filteredName = filterName name + logger.log location:location, name:filteredName, "getting file" + sourceStream = fs.createReadStream "#{location}/#{filteredName}" sourceStream.on 'error', (err) -> logger.err err:err, location:location, name:name, "Error reading from file" callback err @@ -30,30 +36,35 @@ module.exports = copyFile: (location, fromName, toName, callback = (err)->)-> - logger.log location:location, fromName:fromName, toName:toName, "copying file" - sourceStream = fs.createReadStream "#{location}/#{fromName}" + filteredFromName=filterName fromName + filteredToName=filterName toName + logger.log location:location, fromName:filteredFromName, toName:filteredToName, "copying file" + sourceStream = fs.createReadStream "#{location}/#{filteredFromName}" sourceStream.on 'error', (err) -> - logger.err err:err, location:location, key:fromName, "Error reading from file" + logger.err err:err, location:location, key:filteredFromName, "Error reading from file" callback err - targetStream = fs.createWriteStream "#{location}/#{toName}" + targetStream = fs.createWriteStream "#{location}/#{filteredToName}" targetStream.on 'error', (err) -> - logger.err err:err, location:location, key:targetKey, "Error writing to file" + logger.err err:err, location:location, key:filteredToName, "Error writing to file" callback err sourceStream.pipe targetStream deleteFile: (location, name, callback)-> - logger.log location:location, name:name, "delete file" - fs.unlink "#{location}/#{name}", (err) -> - logger.err err:err, location:location, name:name, "Error on delete." + filteredName = filterName name + logger.log location:location, name:filteredName, "delete file" + fs.unlink "#{location}/#{filteredName}", (err) -> + logger.err err:err, location:location, name:filteredName, "Error on delete." callback err deleteDirectory: (location, name, callback = (err)->)-> - fs.rmdir "#{location}/#{name}", (err) -> - logger.err err:err, location:location, name:name, "Error on rmdir." + filteredName = filterName name + fs.rmdir "#{location}/#{filteredName}", (err) -> + logger.err err:err, location:location, name:filteredName, "Error on rmdir." callback err checkIfFileExists:(location, name, callback = (err,exists)->)-> - logger.log location:location, name:name, "checking if file exists" - fs.exists "#{location}/#{name}", (exists) -> - logger.log location:location, name:name, exists:exists, "checked if file exists" + filteredName = filterName name + logger.log location:location, name:filteredName, "checking if file exists" + fs.exists "#{location}/#{filteredName}", (exists) -> + logger.log location:location, name:filteredName, exists:exists, "checked if file exists" callback null, exists diff --git a/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee b/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee index 44580d115a..bf5f08ea9d 100644 --- a/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee @@ -26,7 +26,8 @@ describe "FSPersistorManagerTests", -> log:-> err:-> @location = "/tmp" - @name1 = "first_file" + @name1 = "530f2407e7ef165704000007/530f838b46d9a9e859000008" + @name1Filtered ="530f2407e7ef165704000007_530f838b46d9a9e859000008" @name2 = "second_file" @error = "error_message" @FSPersistorManager = SandboxedModule.require modulePath, requires: @requires @@ -35,7 +36,7 @@ describe "FSPersistorManagerTests", -> it "should put the file", (done) -> @Fs.rename.callsArgWith(2,@error) @FSPersistorManager.sendFile @location, @name1, @name2, (err)=> - @Fs.rename.calledWith( @name2, "#{@location}/#{@name1}" ).should.equal true + @Fs.rename.calledWith( @name2, "#{@location}/#{@name1Filtered}" ).should.equal true err.should.equal @error done() @@ -69,7 +70,7 @@ describe "FSPersistorManagerTests", -> on:-> ) @FSPersistorManager.getFileStream @location, @name1, (err,res)=> - @Fs.createReadStream.calledWith("#{@location}/#{@name1}").should.equal.true + @Fs.createReadStream.calledWith("#{@location}/#{@name1Filtered}").should.equal.true done() describe "copyFile", -> From 40e2cb1c6d3f6e2ef08ec5545443ea1cd18371de Mon Sep 17 00:00:00 2001 From: Oliver Matthews Date: Sat, 1 Mar 2014 15:23:11 +0000 Subject: [PATCH 037/555] fix tabbing --- services/filestore/app/coffee/FSPersistorManager.coffee | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/filestore/app/coffee/FSPersistorManager.coffee b/services/filestore/app/coffee/FSPersistorManager.coffee index 1226c57d90..09bbe91255 100644 --- a/services/filestore/app/coffee/FSPersistorManager.coffee +++ b/services/filestore/app/coffee/FSPersistorManager.coffee @@ -3,7 +3,7 @@ fs = require("fs") LocalFileWriter = require("./LocalFileWriter") filterName = (key) -> - return key.replace /\//, "_" + return key.replace /\//, "_" module.exports = @@ -11,8 +11,8 @@ module.exports = filteredTarget = filterName target logger.log location:location, target:filteredTarget, source:source, "sending file" fs.rename source, "#{location}/#{filteredTarget}", (err) -> - if err!=null - logger.err err:err, location:location, target:filteredTarget, source:source, "Error on put of file" + if err!=null + logger.err err:err, location:location, target:filteredTarget, source:source, "Error on put of file" callback err sendStream: ( location, target, sourceStream, callback = (err)->) -> From 94a65046350b8ede7ceed0a2274e27875cd87e2f Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Tue, 4 Mar 2014 12:44:16 +0000 Subject: [PATCH 038/555] cranked up kill to child processes to killSignal: "SIGKILL" --- .../filestore/app/coffee/FileConverter.coffee | 16 +++++++--------- .../filestore/app/coffee/ImageOptimiser.coffee | 3 ++- 2 files changed, 9 insertions(+), 10 deletions(-) diff --git a/services/filestore/app/coffee/FileConverter.coffee b/services/filestore/app/coffee/FileConverter.coffee index 337f721ffa..c53828d454 100644 --- a/services/filestore/app/coffee/FileConverter.coffee +++ b/services/filestore/app/coffee/FileConverter.coffee @@ -6,6 +6,10 @@ approvedFormats = ["png"] twentySeconds = 20 * 1000 +childProcessOpts = + killSignal: "SIGKILL" + timeout: twentySeconds + module.exports = convert: (sourcePath, requestedFormat, callback)-> @@ -17,9 +21,7 @@ module.exports = err = new Error("invalid format requested") return callback err args = "nice convert -flatten -density 300 #{sourcePath} #{destPath}" - opts = - timeout: twentySeconds - exec args, opts, (err, stdout, stderr)-> + exec args, childProcessOpts, (err, stdout, stderr)-> timer.done() if err? logger.err err:err, stderr:stderr, sourcePath:sourcePath, requestedFormat:requestedFormat, destPath:destPath, "something went wrong converting file" @@ -37,9 +39,7 @@ module.exports = width: 424 height: 300 args = "nice convert -flatten -background white -resize 260x -density 300 #{sourcePath} #{destPath}" - opts = - timeout: twentySeconds - exec args, opts,(err, stdout, stderr)-> + exec args, childProcessOpts, (err, stdout, stderr)-> if err? logger.err err:err, stderr:stderr, sourcePath:sourcePath, "something went wrong converting file to preview" else @@ -56,9 +56,7 @@ module.exports = width: 600 height: 849 args = "nice convert -flatten -background white -resize 548x -density 300 #{sourcePath} #{destPath}" - opts = - timeout: twentySeconds - exec args, opts,(err, stdout, stderr)-> + exec args, childProcessOpts, (err, stdout, stderr)-> if err? logger.err err:err, stderr:stderr, sourcePath:sourcePath, destPath:destPath, "something went wrong converting file to preview" else diff --git a/services/filestore/app/coffee/ImageOptimiser.coffee b/services/filestore/app/coffee/ImageOptimiser.coffee index 7aece25464..dbddaa3205 100644 --- a/services/filestore/app/coffee/ImageOptimiser.coffee +++ b/services/filestore/app/coffee/ImageOptimiser.coffee @@ -10,11 +10,12 @@ module.exports = args = "optipng #{localPath}" opts = timeout: 20 * 1000 + killSignal: "SIGKILL" exec args, opts,(err, stdout, stderr)-> if err? logger.err err:err, stderr:stderr, localPath:localPath, "something went wrong converting compressPng" else logger.log localPath:localPath, "finished compressPng file" - callback(err) + callback(err) From 711e95a82a33e9fcfbaf6cf98a678d2f1a05039c Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Tue, 4 Mar 2014 13:36:47 +0000 Subject: [PATCH 039/555] delete converted file when finished to clean up --- .../app/coffee/FileController.coffee | 1 - .../filestore/app/coffee/FileHandler.coffee | 38 +++++++++++-------- .../app/coffee/ImageOptimiser.coffee | 1 - .../app/coffee/LocalFileWriter.coffee | 1 + .../app/coffee/S3PersistorManager.coffee | 2 +- .../test/unit/coffee/FileHandlerTests.coffee | 11 +++++- 6 files changed, 34 insertions(+), 20 deletions(-) diff --git a/services/filestore/app/coffee/FileController.coffee b/services/filestore/app/coffee/FileController.coffee index 291e188da1..25c612d870 100644 --- a/services/filestore/app/coffee/FileController.coffee +++ b/services/filestore/app/coffee/FileController.coffee @@ -2,7 +2,6 @@ PersistorManager = require("./PersistorManager") settings = require("settings-sharelatex") logger = require("logger-sharelatex") FileHandler = require("./FileHandler") -LocalFileWriter = require("./LocalFileWriter") metrics = require("./metrics") oneDayInSeconds = 60 * 60 * 24 diff --git a/services/filestore/app/coffee/FileHandler.coffee b/services/filestore/app/coffee/FileHandler.coffee index ece883f772..aa9602b5a8 100644 --- a/services/filestore/app/coffee/FileHandler.coffee +++ b/services/filestore/app/coffee/FileHandler.coffee @@ -7,7 +7,6 @@ KeyBuilder = require("./KeyBuilder") async = require("async") ImageOptimiser = require("./ImageOptimiser") - module.exports = insertFile: (bucket, key, stream, callback)-> @@ -45,28 +44,37 @@ module.exports = @_getConvertedFileAndCache bucket, key, convetedKey, opts, callback _getConvertedFileAndCache: (bucket, key, convetedKey, opts, callback)-> - @_convertFile bucket, key, opts, (err, fsPath)-> + self = @ + convertedFsPath = "" + async.series [ + (cb)-> + self._convertFile bucket, key, opts, (err, fileSystemPath)-> + convertedFsPath = fileSystemPath + cb err + (cb)-> + ImageOptimiser.compressPng convertedFsPath, cb + (cb)-> + PersistorManager.sendFile bucket, convetedKey, convertedFsPath, cb + ], (err)-> if err? - logger.err err:err, fsPath:fsPath, bucket:bucket, key:key, opts:opts, "something went wrong with converting file" return callback(err) - ImageOptimiser.compressPng fsPath, (err)-> - if err? - logger.err err:err, fsPath:fsPath, bucket:bucket, key:key, opts:opts, "something went wrong optimising png file" - return callback(err) - PersistorManager.sendFile bucket, convetedKey, fsPath, (err)-> - if err? - logger.err err:err, bucket:bucket, key:key, convetedKey:convetedKey, opts:opts, "something went wrong sending the file" - return callback(err) - PersistorManager.getFileStream bucket, convetedKey, callback + PersistorManager.getFileStream bucket, convetedKey, callback _convertFile: (bucket, origonalKey, opts, callback)-> @_writeS3FileToDisk bucket, origonalKey, (err, origonalFsPath)-> + done = (err, destPath)-> + if err? + logger.err err:err, bucket:bucket, origonalKey:origonalKey, opts:opts, "error converting file" + return callback(err) + LocalFileWriter.deleteFile origonalFsPath, -> + callback(err, destPath) + if opts.format? - FileConverter.convert origonalFsPath, opts.format, callback + FileConverter.convert origonalFsPath, opts.format, done else if opts.style == "thumbnail" - FileConverter.thumbnail origonalFsPath, callback + FileConverter.thumbnail origonalFsPath, done else if opts.style == "preview" - FileConverter.preview origonalFsPath, callback + FileConverter.preview origonalFsPath, done else throw new Error("should have specified opts to convert file with #{JSON.stringify(opts)}") diff --git a/services/filestore/app/coffee/ImageOptimiser.coffee b/services/filestore/app/coffee/ImageOptimiser.coffee index dbddaa3205..be3fed1ca2 100644 --- a/services/filestore/app/coffee/ImageOptimiser.coffee +++ b/services/filestore/app/coffee/ImageOptimiser.coffee @@ -1,7 +1,6 @@ exec = require('child_process').exec logger = require("logger-sharelatex") - module.exports = compressPng: (localPath, callback)-> diff --git a/services/filestore/app/coffee/LocalFileWriter.coffee b/services/filestore/app/coffee/LocalFileWriter.coffee index 8bb8a6bc97..bf0c0fd82f 100644 --- a/services/filestore/app/coffee/LocalFileWriter.coffee +++ b/services/filestore/app/coffee/LocalFileWriter.coffee @@ -26,6 +26,7 @@ module.exports = callback err deleteFile: (fsPath, callback)-> + logger.log fsPath:fsPath, "removing local temp file" fs.unlink fsPath, callback _getPath : (key)-> diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index dd65b79abc..bfc8a89c6b 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -45,7 +45,7 @@ module.exports = return callback(err) if !res? logger.err err:err, res:res, bucketName:bucketName, key:key, fsPath:fsPath, "no response from s3 put file" - callback("no response from put file") + return callback("no response from put file") if res.statusCode != 200 logger.err bucketName:bucketName, key:key, fsPath:fsPath, "non 200 response from s3 putting file" return callback("non 200 response from s3 on put file") diff --git a/services/filestore/test/unit/coffee/FileHandlerTests.coffee b/services/filestore/test/unit/coffee/FileHandlerTests.coffee index c2bed383ab..b00f23af0e 100644 --- a/services/filestore/test/unit/coffee/FileHandlerTests.coffee +++ b/services/filestore/test/unit/coffee/FileHandlerTests.coffee @@ -22,6 +22,7 @@ describe "FileHandler", -> insertFile: sinon.stub() @LocalFileWriter = writeStream: sinon.stub() + deleteFile: sinon.stub() @FileConverter = convert: sinon.stub() thumbnail: sinon.stub() @@ -134,16 +135,18 @@ describe "FileHandler", -> describe "_getConvertedFileAndCache", -> it "should _convertFile ", (done)-> + @stubbedStream = {"something":"here"} @PersistorManager.sendFile = sinon.stub().callsArgWith(3) - @PersistorManager.getFileStream = sinon.stub().callsArgWith(2) + @PersistorManager.getFileStream = sinon.stub().callsArgWith(2, null, @stubbedStream) @convetedKey = @key+"converted" @handler._convertFile = sinon.stub().callsArgWith(3, null, @stubbedPath) @ImageOptimiser.compressPng = sinon.stub().callsArgWith(1) - @handler._getConvertedFileAndCache @bucket, @key, @convetedKey, {}, => + @handler._getConvertedFileAndCache @bucket, @key, @convetedKey, {}, (err, fsStream)=> @handler._convertFile.called.should.equal true @PersistorManager.sendFile.calledWith(@bucket, @convetedKey, @stubbedPath).should.equal true @PersistorManager.getFileStream.calledWith(@bucket, @convetedKey).should.equal true @ImageOptimiser.compressPng.calledWith(@stubbedPath).should.equal true + fsStream.should.equal @stubbedStream done() describe "_convertFile", -> @@ -152,23 +155,27 @@ describe "FileHandler", -> @FileConverter.thumbnail.callsArgWith(1, null, @formattedStubbedPath) @FileConverter.preview.callsArgWith(1, null, @formattedStubbedPath) @handler._writeS3FileToDisk = sinon.stub().callsArgWith(2, null, @stubbedPath) + @LocalFileWriter.deleteFile.callsArgWith(1) it "should call thumbnail on the writer path if style was thumbnail was specified", (done)-> @handler._convertFile @bucket, @key, style:"thumbnail", (err, path)=> path.should.equal @formattedStubbedPath @FileConverter.thumbnail.calledWith(@stubbedPath).should.equal true + @LocalFileWriter.deleteFile.calledWith(@stubbedPath).should.equal true done() it "should call preview on the writer path if style was preview was specified", (done)-> @handler._convertFile @bucket, @key, style:"preview", (err, path)=> path.should.equal @formattedStubbedPath @FileConverter.preview.calledWith(@stubbedPath).should.equal true + @LocalFileWriter.deleteFile.calledWith(@stubbedPath).should.equal true done() it "should call convert on the writer path if a format was specified", (done)-> @handler._convertFile @bucket, @key, format:@format, (err, path)=> path.should.equal @formattedStubbedPath @FileConverter.convert.calledWith(@stubbedPath, @format).should.equal true + @LocalFileWriter.deleteFile.calledWith(@stubbedPath).should.equal true done() From 957df0eb04f1bbd7073d36ab3b9fcaade865e30d Mon Sep 17 00:00:00 2001 From: Oliver Matthews Date: Tue, 4 Mar 2014 14:45:32 +0000 Subject: [PATCH 040/555] Don't dump streams to log files. --- services/filestore/app/coffee/FSPersistorManager.coffee | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/filestore/app/coffee/FSPersistorManager.coffee b/services/filestore/app/coffee/FSPersistorManager.coffee index 09bbe91255..dd4e39d981 100644 --- a/services/filestore/app/coffee/FSPersistorManager.coffee +++ b/services/filestore/app/coffee/FSPersistorManager.coffee @@ -16,9 +16,9 @@ module.exports = callback err sendStream: ( location, target, sourceStream, callback = (err)->) -> - logger.log location:location, target:target, source:sourceStream, "sending file stream" + logger.log location:location, target:target, "sending file stream" sourceStream.on "error", (err)-> - logger.err location:location, target:target, source:sourceStream, err:err "error on stream to send" + logger.err location:location, target:target, err:err "error on stream to send" LocalFileWriter.writeStream sourceStream, null, (err, fsPath)=> if err? logger.err location:location, target:target, fsPath:fsPath, err:err, "something went wrong writing stream to disk" From f920fd0b16bd2b7d0664731927c974fb007109d1 Mon Sep 17 00:00:00 2001 From: Oliver Matthews Date: Tue, 4 Mar 2014 15:01:13 +0000 Subject: [PATCH 041/555] match refactor_config on cwoac/sharelatex --- .../filestore/app/coffee/KeyBuilder.coffee | 6 ++-- .../app/coffee/PersistorManager.coffee | 8 ++--- .../app/coffee/S3PersistorManager.coffee | 20 ++++++------- .../config/settings.development.coffee | 28 ++++++++++++----- .../unit/coffee/PersistorManagerTests.coffee | 6 ++-- .../coffee/S3PersistorManagerTests.coffee | 30 ++++++++++--------- 6 files changed, 58 insertions(+), 40 deletions(-) diff --git a/services/filestore/app/coffee/KeyBuilder.coffee b/services/filestore/app/coffee/KeyBuilder.coffee index 113c0eac57..45aa351487 100644 --- a/services/filestore/app/coffee/KeyBuilder.coffee +++ b/services/filestore/app/coffee/KeyBuilder.coffee @@ -20,15 +20,15 @@ module.exports = userFileKey: (req, res, next)-> {project_id, file_id} = req.params req.key = "#{project_id}/#{file_id}" - req.bucket = settings.s3.buckets.user_files + req.bucket = settings.filestore.stores.user_files next() templateFileKey: (req, res, next)-> {template_id, format, version} = req.params req.key = "#{template_id}/#{version}/#{format}" - req.bucket = settings.s3.buckets.template_files + req.bucket = settings.filestore.stores.template_files req.version = version opts = req.query next() - \ No newline at end of file + diff --git a/services/filestore/app/coffee/PersistorManager.coffee b/services/filestore/app/coffee/PersistorManager.coffee index 1b200c58d3..1dad923098 100644 --- a/services/filestore/app/coffee/PersistorManager.coffee +++ b/services/filestore/app/coffee/PersistorManager.coffee @@ -2,14 +2,14 @@ settings = require("settings-sharelatex") logger = require("logger-sharelatex") # assume s3 if none specified -settings.filestoreBackend ||= "s3" +settings.filestore.backend ||= "s3" -logger.log backend:settings.filestoreBackend, "Loading backend" -module.exports = switch settings.filestoreBackend +logger.log backend:settings.filestore.backend, "Loading backend" +module.exports = switch settings.filestore.backend when "s3" require("./S3PersistorManager") when "fs" require("./FSPersistorManager") else - throw new Error( "Unknown filestore backend: #{settings.filestoreBackend}" ) + throw new Error( "Unknown filestore backend: #{settings.filestore.backend}" ) diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index dd65b79abc..0b35ea7b52 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -24,8 +24,8 @@ printSockets() buildDefaultOptions = (bucketName, method, key)-> return { aws: - key: settings.s3.key - secret: settings.s3.secret + key: settings.filestore.s3.key + secret: settings.filestore.s3.secret bucket: bucketName method: method timeout: thirtySeconds @@ -36,8 +36,8 @@ module.exports = sendFile: (bucketName, key, fsPath, callback)-> s3Client = knox.createClient - key: settings.s3.key - secret: settings.s3.secret + key: settings.filestore.s3.key + secret: settings.filestore.s3.secret bucket: bucketName putEventEmiter = s3Client.putFile fsPath, key, (err, res)-> if err? @@ -70,8 +70,8 @@ module.exports = getFileStream: (bucketName, key, callback = (err, res)->)-> logger.log bucketName:bucketName, key:key, "getting file from s3" s3Client = knox.createClient - key: settings.s3.key - secret: settings.s3.secret + key: settings.filestore.s3.key + secret: settings.filestore.s3.secret bucket: bucketName s3Stream = s3Client.get(key) s3Stream.end() @@ -84,8 +84,8 @@ module.exports = copyFile: (bucketName, sourceKey, destKey, callback)-> logger.log bucketName:bucketName, sourceKey:sourceKey, destKey:destKey, "copying file in s3" s3Client = knox.createClient - key: settings.s3.key - secret: settings.s3.secret + key: settings.filestore.s3.key + secret: settings.filestore.s3.secret bucket: bucketName s3Client.copyFile sourceKey, destKey, (err)-> if err? @@ -102,8 +102,8 @@ module.exports = deleteDirectory: (bucketName, key, callback)-> s3Client = knox.createClient - key: settings.s3.key - secret: settings.s3.secret + key: settings.filestore.s3.key + secret: settings.filestore.s3.secret bucket: bucketName s3Client.list prefix:key, (err, data)-> keys = _.map data.Contents, (entry)-> diff --git a/services/filestore/config/settings.development.coffee b/services/filestore/config/settings.development.coffee index db79f35ca6..19088d3c76 100644 --- a/services/filestore/config/settings.development.coffee +++ b/services/filestore/config/settings.development.coffee @@ -3,13 +3,27 @@ module.exports = filestore: port: 3009 host: "localhost" - - # which persistor to use for file storage - # current options are: - # "s3" - Amazon S3 - # "fs" - local filesystem - # if no persistor is chosen, s3 will be used by default - filestoreBackend: "s3" + + filestore: + # which backend persistor to use. + # choices are + # s3 - Amazon S3 + # fs - local filesystem + backend: "s3" + stores: + # where to store user and template binary files + # + # For Amazon S3 this is the bucket name to store binary files in + # Must contain full url like: .s3.amazonaws.com + # + # For local filesystem this is the directory to store the files in. + # Must contain full path, e.g. "/var/lib/sharelatex/data" + # This path must exist, not be tmpfs and be writable to by the user sharelatex is run as. + user_files: "" + s3: + # if you are using S3, then fill in your S3 details below + key: "" + secret: "" # ShareLaTeX stores binary files like images in S3. # Fill in your Amazon S3 credentials below. diff --git a/services/filestore/test/unit/coffee/PersistorManagerTests.coffee b/services/filestore/test/unit/coffee/PersistorManagerTests.coffee index af11fa7408..ca7a82cbaa 100644 --- a/services/filestore/test/unit/coffee/PersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/PersistorManagerTests.coffee @@ -22,7 +22,8 @@ describe "PersistorManagerTests", -> describe "test s3 mixin", -> beforeEach -> @settings = - filestoreBackend: "s3" + filestore: + backend: "s3" @requires = "./S3PersistorManager": @S3PersistorManager "settings-sharelatex": @settings @@ -81,7 +82,8 @@ describe "PersistorManagerTests", -> describe "test invalid mixins", -> it "should not load an invalid wrapper", (done) -> @settings = - filestoreBackend:"magic" + filestore: + backend:"magic" @requires = "./S3PersistorManager": @S3PersistorManager "settings-sharelatex": @settings diff --git a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee index 76872fb140..fe70f1008d 100644 --- a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee @@ -9,24 +9,26 @@ SandboxedModule = require('sandboxed-module') describe "S3PersistorManagerTests", -> beforeEach -> - @settings = - s3: - secret: "secret" - key: "this_key" - buckets: - user_files:"sl_user_files" - @stubbedKnoxClient = + @settings = + filestore: + backend: "s3" + s3: + secret: "secret" + key: "this_key" + stores: + user_files:"sl_user_files" + @stubbedKnoxClient = putFile:sinon.stub() copyFile:sinon.stub() list: sinon.stub() deleteMultiple: sinon.stub() get: sinon.stub() - @knox = + @knox = createClient: sinon.stub().returns(@stubbedKnoxClient) - @LocalFileWriter = + @LocalFileWriter = writeStream: sinon.stub() deleteFile: sinon.stub() - @requires = + @requires = "knox": @knox "settings-sharelatex": @settings "./LocalFileWriter":@LocalFileWriter @@ -48,7 +50,7 @@ describe "S3PersistorManagerTests", -> end:-> ) @S3PersistorManager.getFileStream @bucketName, @key, @fsPath, (err)=> - @stubbedKnoxClient.get.calledWith(@key).should.equal true + @stubbedKnoxClient.get.calledWith(@key).should.equal true done() describe "sendFile", -> @@ -121,7 +123,7 @@ describe "S3PersistorManagerTests", -> @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires it "should list the contents passing them onto multi delete", (done)-> - data = + data = Contents: [{Key:"1234"}, {Key: "456"}] @stubbedKnoxClient.list.callsArgWith(1, null, data) @stubbedKnoxClient.deleteMultiple.callsArgWith(1) @@ -138,7 +140,7 @@ describe "S3PersistorManagerTests", -> @S3PersistorManager.deleteFile @bucketName, @key, (err)=> opts = @request.args[0][0] - assert.deepEqual(opts.aws, {key:@settings.s3.key, secret:@settings.s3.secret, bucket:@bucketName}) + assert.deepEqual(opts.aws, {key:@settings.filestore.s3.key, secret:@settings.filestore.s3.secret, bucket:@bucketName}) opts.method.should.equal "delete" opts.timeout.should.equal (30*1000) opts.uri.should.equal "https://#{@bucketName}.s3.amazonaws.com/#{@key}" @@ -162,7 +164,7 @@ describe "S3PersistorManagerTests", -> @S3PersistorManager.checkIfFileExists @bucketName, @key, (err)=> opts = @request.args[0][0] - assert.deepEqual(opts.aws, {key:@settings.s3.key, secret:@settings.s3.secret, bucket:@bucketName}) + assert.deepEqual(opts.aws, {key:@settings.filestore.s3.key, secret:@settings.filestore.s3.secret, bucket:@bucketName}) opts.method.should.equal "head" opts.timeout.should.equal (30*1000) opts.uri.should.equal "https://#{@bucketName}.s3.amazonaws.com/#{@key}" From 2f22563d59f002bf5a34c24c04eed1bc23f88373 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Wed, 5 Mar 2014 17:39:27 +0000 Subject: [PATCH 042/555] changed key for templates to put /v/ in for version --- services/filestore/app/coffee/KeyBuilder.coffee | 2 +- .../filestore/test/unit/coffee/FileControllerTests.coffee | 4 ---- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/services/filestore/app/coffee/KeyBuilder.coffee b/services/filestore/app/coffee/KeyBuilder.coffee index 45aa351487..71f6c05785 100644 --- a/services/filestore/app/coffee/KeyBuilder.coffee +++ b/services/filestore/app/coffee/KeyBuilder.coffee @@ -25,7 +25,7 @@ module.exports = templateFileKey: (req, res, next)-> {template_id, format, version} = req.params - req.key = "#{template_id}/#{version}/#{format}" + req.key = "#{template_id}/v/#{version}/#{format}" req.bucket = settings.filestore.stores.template_files req.version = version opts = req.query diff --git a/services/filestore/test/unit/coffee/FileControllerTests.coffee b/services/filestore/test/unit/coffee/FileControllerTests.coffee index 6c1c92dba3..ecf067976f 100644 --- a/services/filestore/test/unit/coffee/FileControllerTests.coffee +++ b/services/filestore/test/unit/coffee/FileControllerTests.coffee @@ -70,7 +70,6 @@ describe "FileController", -> done() @controller.getFile @req, @res - describe "insertFile", -> it "should send bucket name key and res to PersistorManager", (done)-> @@ -79,7 +78,6 @@ describe "FileController", -> @FileHandler.insertFile.calledWith(@bucket, @key, @req).should.equal true done() @controller.insertFile @req, @res - describe "copyFile", -> beforeEach -> @@ -98,7 +96,6 @@ describe "FileController", -> done() @controller.copyFile @req, @res - it "should send a 500 if there was an error", (done)-> @PersistorManager.copyFile.callsArgWith(3, "error") @res.send = (code)=> @@ -106,7 +103,6 @@ describe "FileController", -> done() @controller.copyFile @req, @res - describe "delete file", -> it "should tell the file handler", (done)-> From e8b245e857d00c087bb2b331766034d03c704ca9 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Wed, 5 Mar 2014 18:13:40 +0000 Subject: [PATCH 043/555] check that res has not already been sent before sending catch 500 --- services/filestore/app.coffee | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index fc6cce6ede..027ec11119 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -35,7 +35,8 @@ app.use (req, res, next) -> requestDomain.add req requestDomain.add res requestDomain.on "error", (err)-> - res.send 500 + if !res.finished + res.send(500) logger = require('logger-sharelatex') req = body:req.body From 41b32deac39d4124422c424778bfbcf3ef6efc83 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Thu, 13 Mar 2014 14:04:46 +0000 Subject: [PATCH 044/555] changed conversions to work with new image magick feature pdf:fit-page= --- .../filestore/app/coffee/FileConverter.coffee | 20 +++++++------------ 1 file changed, 7 insertions(+), 13 deletions(-) diff --git a/services/filestore/app/coffee/FileConverter.coffee b/services/filestore/app/coffee/FileConverter.coffee index c53828d454..94edce7f4b 100644 --- a/services/filestore/app/coffee/FileConverter.coffee +++ b/services/filestore/app/coffee/FileConverter.coffee @@ -20,7 +20,9 @@ module.exports = if !_.include approvedFormats, requestedFormat err = new Error("invalid format requested") return callback err - args = "nice convert -flatten -density 300 #{sourcePath} #{destPath}" + width = "600x" + args = "nice convert -define pdf:fit-page=#{width} -flatten -density 300 #{sourcePath} #{destPath}" + console.log args exec args, childProcessOpts, (err, stdout, stderr)-> timer.done() if err? @@ -33,12 +35,8 @@ module.exports = logger.log sourcePath:sourcePath, "thumbnail convert file" destPath = "#{sourcePath}.png" sourcePath = "#{sourcePath}[0]" - args = - src: sourcePath - dst: destPath - width: 424 - height: 300 - args = "nice convert -flatten -background white -resize 260x -density 300 #{sourcePath} #{destPath}" + width = "260x" + args = "nice convert -flatten -background white -density 300 -define pdf:fit-page=#{width} #{sourcePath} -resize #{width} #{destPath}" exec args, childProcessOpts, (err, stdout, stderr)-> if err? logger.err err:err, stderr:stderr, sourcePath:sourcePath, "something went wrong converting file to preview" @@ -50,12 +48,8 @@ module.exports = logger.log sourcePath:sourcePath, "preview convert file" destPath = "#{sourcePath}.png" sourcePath = "#{sourcePath}[0]" - args = - src: sourcePath - dst: destPath - width: 600 - height: 849 - args = "nice convert -flatten -background white -resize 548x -density 300 #{sourcePath} #{destPath}" + width = "548x" + args = "nice convert -flatten -background white -density 300 -define pdf:fit-page=#{width} #{sourcePath} -resize #{width} #{destPath}" exec args, childProcessOpts, (err, stdout, stderr)-> if err? logger.err err:err, stderr:stderr, sourcePath:sourcePath, destPath:destPath, "something went wrong converting file to preview" From 191170509f232796196dca65b20dae02483d5da4 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Fri, 14 Mar 2014 08:55:28 +0000 Subject: [PATCH 045/555] bumped convert timeout to 40 seconds --- services/filestore/app/coffee/FileConverter.coffee | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/services/filestore/app/coffee/FileConverter.coffee b/services/filestore/app/coffee/FileConverter.coffee index 94edce7f4b..c96b805180 100644 --- a/services/filestore/app/coffee/FileConverter.coffee +++ b/services/filestore/app/coffee/FileConverter.coffee @@ -4,11 +4,12 @@ logger = require("logger-sharelatex") exec = require('child_process').exec approvedFormats = ["png"] -twentySeconds = 20 * 1000 +fourtySeconds = 40 * 1000 childProcessOpts = killSignal: "SIGKILL" - timeout: twentySeconds + timeout: fourtySeconds + module.exports = From 5e5f9cf4e51f602d08b58526d3be0c60ca6e8ca0 Mon Sep 17 00:00:00 2001 From: James Allen Date: Mon, 31 Mar 2014 15:15:00 +0100 Subject: [PATCH 046/555] Replace all instances of / in file paths in FSPersistorManager --- services/filestore/app/coffee/FSPersistorManager.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/app/coffee/FSPersistorManager.coffee b/services/filestore/app/coffee/FSPersistorManager.coffee index dd4e39d981..032d4edb29 100644 --- a/services/filestore/app/coffee/FSPersistorManager.coffee +++ b/services/filestore/app/coffee/FSPersistorManager.coffee @@ -3,7 +3,7 @@ fs = require("fs") LocalFileWriter = require("./LocalFileWriter") filterName = (key) -> - return key.replace /\//, "_" + return key.replace /\//g, "_" module.exports = From d0816096b6560f21c6cc589d7f102e3f572a0f61 Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 2 Apr 2014 17:45:41 +0100 Subject: [PATCH 047/555] Use default settings file --- services/filestore/.gitignore | 2 ++ ...opment.coffee => settings.defaults.coffee} | 34 +++++++------------ services/filestore/package.json | 4 +-- services/filestore/user_files/.gitignore | 0 4 files changed, 17 insertions(+), 23 deletions(-) rename services/filestore/config/{settings.development.coffee => settings.defaults.coffee} (51%) create mode 100644 services/filestore/user_files/.gitignore diff --git a/services/filestore/.gitignore b/services/filestore/.gitignore index 6d486a3b2c..723e09aef3 100644 --- a/services/filestore/.gitignore +++ b/services/filestore/.gitignore @@ -55,6 +55,8 @@ public/minjs/ test/unit/js/ test/acceptence/js +user_files/* + **.swp /log.json diff --git a/services/filestore/config/settings.development.coffee b/services/filestore/config/settings.defaults.coffee similarity index 51% rename from services/filestore/config/settings.development.coffee rename to services/filestore/config/settings.defaults.coffee index 19088d3c76..642e1b5c3d 100644 --- a/services/filestore/config/settings.development.coffee +++ b/services/filestore/config/settings.defaults.coffee @@ -1,3 +1,5 @@ +Path = require "path" + module.exports = internal: filestore: @@ -5,34 +7,24 @@ module.exports = host: "localhost" filestore: - # which backend persistor to use. - # choices are + # Which backend persistor to use. + # Choices are # s3 - Amazon S3 # fs - local filesystem - backend: "s3" + backend: "fs" stores: - # where to store user and template binary files + # where to store user and template binary files # - # For Amazon S3 this is the bucket name to store binary files in - # Must contain full url like: .s3.amazonaws.com + # For Amazon S3 this is the bucket name to store binary files in. # # For local filesystem this is the directory to store the files in. - # Must contain full path, e.g. "/var/lib/sharelatex/data" + # Must contain full path, e.g. "/var/lib/sharelatex/data". # This path must exist, not be tmpfs and be writable to by the user sharelatex is run as. - user_files: "" - s3: - # if you are using S3, then fill in your S3 details below - key: "" - secret: "" - - # ShareLaTeX stores binary files like images in S3. - # Fill in your Amazon S3 credentials below. - s3: - key: '' - secret: '' - buckets: - user_files: "" - template_files: "" + user_files: Path.resolve(__dirname + "/../user_files") + # if you are using S3, then fill in your S3 details below + # s3: + # key: "" + # secret: "" # Filestore health check # ---------------------- diff --git a/services/filestore/package.json b/services/filestore/package.json index f623ce2ae3..029b1e97e5 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -2,8 +2,8 @@ "name": "filestore-sharelatex", "version": "0.0.1", "dependencies": { - "settings": "git+https://github.com/sharelatex/settings-sharelatex.git#master", - "logger": "git+https://github.com/sharelatex/logger-sharelatex.git#master", + "settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#master", + "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#master", "request": "2.14.0", "lynx": "0.0.11", "grunt-mocha-test": "~0.8.2", diff --git a/services/filestore/user_files/.gitignore b/services/filestore/user_files/.gitignore new file mode 100644 index 0000000000..e69de29bb2 From be8fdcfd6a45892d57d1ecfe82aaffbf420c69e4 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Wed, 7 May 2014 08:23:44 +0100 Subject: [PATCH 048/555] added null check on send property of res --- services/filestore/app/coffee/FileController.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/app/coffee/FileController.coffee b/services/filestore/app/coffee/FileController.coffee index 25c612d870..d1221f7b32 100644 --- a/services/filestore/app/coffee/FileController.coffee +++ b/services/filestore/app/coffee/FileController.coffee @@ -15,7 +15,7 @@ module.exports = FileHandler.getFile bucket, key, {format:format,style:style}, (err, fileStream)-> if err? logger.err err:err, key:key, bucket:bucket, format:format, style:style, "problem getting file" - res.send 500 + res?.send? 500 else if req.query.cacheWarm logger.log key:key, bucket:bucket, format:format, style:style, "request is only for cache warm so not sending stream" res.send 200 From 09224007a7f829dda77cc65e657f67ae919c02b7 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Wed, 7 May 2014 09:06:05 +0100 Subject: [PATCH 049/555] on domain error wait 3 seconds to send 500 to allow for other cleanup. Also check inside function if the res has finished. #confused --- services/filestore/app.coffee | 6 ++++-- services/filestore/app/coffee/FileController.coffee | 3 ++- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index 027ec11119..0e405d6e34 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -35,8 +35,10 @@ app.use (req, res, next) -> requestDomain.add req requestDomain.add res requestDomain.on "error", (err)-> - if !res.finished - res.send(500) + setTimeout(-> + if !res.finished + res.send(500) + , 3000) logger = require('logger-sharelatex') req = body:req.body diff --git a/services/filestore/app/coffee/FileController.coffee b/services/filestore/app/coffee/FileController.coffee index d1221f7b32..2beaef8a96 100644 --- a/services/filestore/app/coffee/FileController.coffee +++ b/services/filestore/app/coffee/FileController.coffee @@ -15,7 +15,8 @@ module.exports = FileHandler.getFile bucket, key, {format:format,style:style}, (err, fileStream)-> if err? logger.err err:err, key:key, bucket:bucket, format:format, style:style, "problem getting file" - res?.send? 500 + if !res.finished and res?.send? + res.send 500 else if req.query.cacheWarm logger.log key:key, bucket:bucket, format:format, style:style, "request is only for cache warm so not sending stream" res.send 200 From 76f929f4158a0f45fbcd6b122dfa671c32fc7a69 Mon Sep 17 00:00:00 2001 From: James Allen Date: Fri, 9 May 2014 13:30:35 +0100 Subject: [PATCH 050/555] Add in new http monitoring --- services/filestore/app.coffee | 10 ++++++---- services/filestore/package.json | 1 + 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index 0e405d6e34..d5d0e1a459 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -1,7 +1,6 @@ express = require('express') logger = require('logger-sharelatex') logger.initialize("filestore") -metrics = require("./app/js/metrics") settings = require("settings-sharelatex") request = require("request") fileController = require("./app/js/FileController") @@ -11,9 +10,13 @@ appIsOk = true app = express() streamBuffers = require("stream-buffers") +Metrics = require "metrics-sharelatex" +Metrics.initialize("filestore") +Metrics.open_sockets.monitor(logger) app.configure -> app.use express.bodyParser() + app.use Metrics.http.monitor(logger) app.configure 'development', -> console.log "Development Enviroment" @@ -21,13 +24,12 @@ app.configure 'development', -> app.configure 'production', -> console.log "Production Enviroment" - app.use express.logger() app.use express.errorHandler() -metrics.inc "startup" +Metrics.inc "startup" app.use (req, res, next)-> - metrics.inc "http-request" + Metrics.inc "http-request" next() app.use (req, res, next) -> diff --git a/services/filestore/package.json b/services/filestore/package.json index 029b1e97e5..f60ad6d2c9 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -4,6 +4,7 @@ "dependencies": { "settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#master", "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#master", + "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#master", "request": "2.14.0", "lynx": "0.0.11", "grunt-mocha-test": "~0.8.2", From 3bbeff5aff1ab8e50e5087bbaa4e969bf8f9e371 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Mon, 12 May 2014 15:45:15 +0100 Subject: [PATCH 051/555] moved all metrics to use the module, deleting old metrics file and added different check in err domain --- services/filestore/app.coffee | 3 ++- .../app/coffee/FileController.coffee | 2 +- .../filestore/app/coffee/FileConverter.coffee | 2 +- .../app/coffee/LocalFileWriter.coffee | 2 +- services/filestore/app/coffee/metrics.coffee | 24 ------------------- 5 files changed, 5 insertions(+), 28 deletions(-) delete mode 100644 services/filestore/app/coffee/metrics.coffee diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index d5d0e1a459..03daabaa19 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -37,8 +37,9 @@ app.use (req, res, next) -> requestDomain.add req requestDomain.add res requestDomain.on "error", (err)-> + metrics.inc "err.uncaught" setTimeout(-> - if !res.finished + if !res.headerSent res.send(500) , 3000) logger = require('logger-sharelatex') diff --git a/services/filestore/app/coffee/FileController.coffee b/services/filestore/app/coffee/FileController.coffee index 2beaef8a96..3b83e203fd 100644 --- a/services/filestore/app/coffee/FileController.coffee +++ b/services/filestore/app/coffee/FileController.coffee @@ -2,7 +2,7 @@ PersistorManager = require("./PersistorManager") settings = require("settings-sharelatex") logger = require("logger-sharelatex") FileHandler = require("./FileHandler") -metrics = require("./metrics") +metrics = require("metrics-sharelatex") oneDayInSeconds = 60 * 60 * 24 module.exports = diff --git a/services/filestore/app/coffee/FileConverter.coffee b/services/filestore/app/coffee/FileConverter.coffee index c96b805180..0d6eb0d9f3 100644 --- a/services/filestore/app/coffee/FileConverter.coffee +++ b/services/filestore/app/coffee/FileConverter.coffee @@ -1,5 +1,5 @@ _ = require("underscore") -metrics = require("./metrics") +metrics = require("metrics-sharelatex") logger = require("logger-sharelatex") exec = require('child_process').exec approvedFormats = ["png"] diff --git a/services/filestore/app/coffee/LocalFileWriter.coffee b/services/filestore/app/coffee/LocalFileWriter.coffee index bf0c0fd82f..71cbb32622 100644 --- a/services/filestore/app/coffee/LocalFileWriter.coffee +++ b/services/filestore/app/coffee/LocalFileWriter.coffee @@ -3,7 +3,7 @@ uuid = require('node-uuid') path = require("path") _ = require("underscore") logger = require("logger-sharelatex") -metrics = require("./metrics") +metrics = require("metrics-sharelatex") module.exports = diff --git a/services/filestore/app/coffee/metrics.coffee b/services/filestore/app/coffee/metrics.coffee deleted file mode 100644 index cd5c7ab215..0000000000 --- a/services/filestore/app/coffee/metrics.coffee +++ /dev/null @@ -1,24 +0,0 @@ -StatsD = require('lynx') -settings = require('settings-sharelatex') -statsd = new StatsD('localhost', 8125, {on_error:->}) - -buildKey = (key)-> "filestore.#{process.env.NODE_ENV}.#{key}" - -module.exports = - set : (key, value, sampleRate = 1)-> - statsd.set buildKey(key), value, sampleRate - - inc : (key, sampleRate = 1)-> - statsd.increment buildKey(key), sampleRate - - Timer : class - constructor :(key, sampleRate = 1)-> - this.start = new Date() - this.key = buildKey(key) - done:-> - timeSpan = new Date - this.start - statsd.timing(this.key, timeSpan, this.sampleRate) - - gauge : (key, value, sampleRate = 1)-> - statsd.gauge key, value, sampleRate - From ce67c8c655c2f2f5ade0a6e896a46241119bcc0b Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Mon, 12 May 2014 15:57:08 +0100 Subject: [PATCH 052/555] added logging to err domain --- services/filestore/app.coffee | 1 + 1 file changed, 1 insertion(+) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index 03daabaa19..974d4b8967 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -40,6 +40,7 @@ app.use (req, res, next) -> metrics.inc "err.uncaught" setTimeout(-> if !res.headerSent + logger.log err:err, "sending 500 our as header has not been sent yet for uncaught exception" res.send(500) , 3000) logger = require('logger-sharelatex') From 1f1f535c8bcf054b97d1e7565de62e047c978a18 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Mon, 12 May 2014 18:33:09 +0100 Subject: [PATCH 053/555] removed lines which may be causing issue with domain --- services/filestore/app.coffee | 2 -- 1 file changed, 2 deletions(-) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index 974d4b8967..3c8fcde1c1 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -37,10 +37,8 @@ app.use (req, res, next) -> requestDomain.add req requestDomain.add res requestDomain.on "error", (err)-> - metrics.inc "err.uncaught" setTimeout(-> if !res.headerSent - logger.log err:err, "sending 500 our as header has not been sent yet for uncaught exception" res.send(500) , 3000) logger = require('logger-sharelatex') From de0d79e1bf87fc2cdb512d2c184f5ce3a9cbf0ee Mon Sep 17 00:00:00 2001 From: James Allen Date: Sat, 17 May 2014 21:01:48 +0100 Subject: [PATCH 054/555] Use configurable upload directory --- services/filestore/app/coffee/LocalFileWriter.coffee | 3 ++- services/filestore/config/settings.defaults.coffee | 3 +++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/services/filestore/app/coffee/LocalFileWriter.coffee b/services/filestore/app/coffee/LocalFileWriter.coffee index 71cbb32622..a839c46656 100644 --- a/services/filestore/app/coffee/LocalFileWriter.coffee +++ b/services/filestore/app/coffee/LocalFileWriter.coffee @@ -4,6 +4,7 @@ path = require("path") _ = require("underscore") logger = require("logger-sharelatex") metrics = require("metrics-sharelatex") +Settings = require("settings-sharelatex") module.exports = @@ -33,4 +34,4 @@ module.exports = if !key? key = uuid.v1() key = key.replace(/\//g,"-") - path.join(__dirname, "../../uploads/#{key}") + path.join(Settings.path.uploadFolder, key) diff --git a/services/filestore/config/settings.defaults.coffee b/services/filestore/config/settings.defaults.coffee index 642e1b5c3d..a7c92528ac 100644 --- a/services/filestore/config/settings.defaults.coffee +++ b/services/filestore/config/settings.defaults.coffee @@ -26,6 +26,9 @@ module.exports = # key: "" # secret: "" + path: + uploadFolder: Path.resolve(__dirname + "/../uploads") + # Filestore health check # ---------------------- # Project and file details to check in persistor when calling /health_check From daf4f1ffd49a1b542e424ce68b47d53b3751d4e3 Mon Sep 17 00:00:00 2001 From: James Allen Date: Fri, 23 May 2014 13:54:20 +0100 Subject: [PATCH 055/555] Send content-length header when getting file --- .../app/coffee/FSPersistorManager.coffee | 13 +++++++------ .../app/coffee/FileController.coffee | 3 ++- .../filestore/app/coffee/FileHandler.coffee | 10 +++++----- .../app/coffee/S3PersistorManager.coffee | 4 ++-- .../coffee/FSPersistorManagerTests.coffee | 9 +++++---- .../unit/coffee/FileControllerTests.coffee | 5 +++-- .../coffee/S3PersistorManagerTests.coffee | 19 ++++++++++++++----- 7 files changed, 38 insertions(+), 25 deletions(-) diff --git a/services/filestore/app/coffee/FSPersistorManager.coffee b/services/filestore/app/coffee/FSPersistorManager.coffee index 032d4edb29..c86cd35727 100644 --- a/services/filestore/app/coffee/FSPersistorManager.coffee +++ b/services/filestore/app/coffee/FSPersistorManager.coffee @@ -28,12 +28,13 @@ module.exports = getFileStream: (location, name, callback = (err, res)->)-> filteredName = filterName name logger.log location:location, name:filteredName, "getting file" - sourceStream = fs.createReadStream "#{location}/#{filteredName}" - sourceStream.on 'error', (err) -> - logger.err err:err, location:location, name:name, "Error reading from file" - callback err - callback null,sourceStream - + path = "#{location}/#{filteredName}" + fs.stat path, (error, stat) -> + sourceStream = fs.createReadStream path + sourceStream.on 'error', (err) -> + logger.err err:err, location:location, name:name, "Error reading from file" + callback err + callback null, sourceStream, stat.size copyFile: (location, fromName, toName, callback = (err)->)-> filteredFromName=filterName fromName diff --git a/services/filestore/app/coffee/FileController.coffee b/services/filestore/app/coffee/FileController.coffee index 3b83e203fd..2d3792642e 100644 --- a/services/filestore/app/coffee/FileController.coffee +++ b/services/filestore/app/coffee/FileController.coffee @@ -12,7 +12,7 @@ module.exports = {key, bucket} = req {format, style} = req.query logger.log key:key, bucket:bucket, format:format, style:style, "reciving request to get file" - FileHandler.getFile bucket, key, {format:format,style:style}, (err, fileStream)-> + FileHandler.getFile bucket, key, {format:format,style:style}, (err, fileStream, size)-> if err? logger.err err:err, key:key, bucket:bucket, format:format, style:style, "problem getting file" if !res.finished and res?.send? @@ -22,6 +22,7 @@ module.exports = res.send 200 else logger.log key:key, bucket:bucket, format:format, style:style, "sending file to response" + res.header("Content-Length", size) fileStream.pipe res insertFile: (req, res)-> diff --git a/services/filestore/app/coffee/FileHandler.coffee b/services/filestore/app/coffee/FileHandler.coffee index aa9602b5a8..b2535e4488 100644 --- a/services/filestore/app/coffee/FileHandler.coffee +++ b/services/filestore/app/coffee/FileHandler.coffee @@ -22,20 +22,20 @@ module.exports = (done)-> PersistorManager.deleteFile bucket, convetedKey, done ], callback - getFile: (bucket, key, opts = {}, callback)-> + getFile: (bucket, key, opts = {}, callback = (err, fileStream, size) ->)-> logger.log bucket:bucket, key:key, opts:opts, "getting file" if !opts.format? and !opts.style? @_getStandardFile bucket, key, opts, callback else @_getConvertedFile bucket, key, opts, callback - _getStandardFile: (bucket, key, opts, callback)-> - PersistorManager.getFileStream bucket, key, (err, fileStream)-> + _getStandardFile: (bucket, key, opts, callback = (err, fileStream, size) ->)-> + PersistorManager.getFileStream bucket, key, (err, fileStream, size)-> if err? logger.err bucket:bucket, key:key, opts:opts, "error getting fileStream" - callback err, fileStream + callback err, fileStream, size - _getConvertedFile: (bucket, key, opts, callback)-> + _getConvertedFile: (bucket, key, opts, callback = (err, fileStream, size) ->)-> convetedKey = KeyBuilder.addCachingToKey(key, opts) PersistorManager.checkIfFileExists bucket, convetedKey, (err, exists)=> if exists diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index 432294ced6..97759e879b 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -67,7 +67,7 @@ module.exports = return callback(err) @sendFile bucketName, key, fsPath, callback - getFileStream: (bucketName, key, callback = (err, res)->)-> + getFileStream: (bucketName, key, callback = (err, res, size)->)-> logger.log bucketName:bucketName, key:key, "getting file from s3" s3Client = knox.createClient key: settings.filestore.s3.key @@ -76,7 +76,7 @@ module.exports = s3Stream = s3Client.get(key) s3Stream.end() s3Stream.on 'response', (res) -> - callback null, res + callback null, res, res.headers["content-length"] s3Stream.on 'error', (err) -> logger.err err:err, bucketName:bucketName, key:key, "error getting file stream from s3" callback err diff --git a/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee b/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee index bf5f08ea9d..757d1cc8df 100644 --- a/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee @@ -66,11 +66,12 @@ describe "FSPersistorManagerTests", -> describe "getFileStream", -> it "should use correct file location", (done) -> - @Fs.createReadStream.returns( - on:-> - ) - @FSPersistorManager.getFileStream @location, @name1, (err,res)=> + @Fs.createReadStream.returns(@stream = on:->) + @Fs.stat = sinon.stub().callsArgWith(1, null, { size: @size = 42 }) + @FSPersistorManager.getFileStream @location, @name1, (err, res, size)=> @Fs.createReadStream.calledWith("#{@location}/#{@name1Filtered}").should.equal.true + res.should.equal @stream + size.should.equal @size done() describe "copyFile", -> diff --git a/services/filestore/test/unit/coffee/FileControllerTests.coffee b/services/filestore/test/unit/coffee/FileControllerTests.coffee index ecf067976f..d30d819f2a 100644 --- a/services/filestore/test/unit/coffee/FileControllerTests.coffee +++ b/services/filestore/test/unit/coffee/FileControllerTests.coffee @@ -43,15 +43,16 @@ describe "FileController", -> project_id:@project_id file_id:@file_id @res = - setHeader: -> + header: sinon.stub() @fileStream = {} describe "getFile", -> it "should pipe the stream", (done)-> - @FileHandler.getFile.callsArgWith(3, null, @fileStream) + @FileHandler.getFile.callsArgWith(3, null, @fileStream, @size = 42) @fileStream.pipe = (res)=> res.should.equal @res + res.header.calledWith("Content-Length", @size).should.equal true done() @controller.getFile @req, @res diff --git a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee index fe70f1008d..efe142a6f8 100644 --- a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee @@ -45,13 +45,22 @@ describe "S3PersistorManagerTests", -> it "should use correct key", (done)-> - @stubbedKnoxClient.get.returns( - on:-> + @response = + headers: + "content-length": @size = 42 + + @stubbedKnoxClient.get.returns(@stream = + on: (e, callback) => + if e == "response" + callback(@response) end:-> + ) - @S3PersistorManager.getFileStream @bucketName, @key, @fsPath, (err)=> - @stubbedKnoxClient.get.calledWith(@key).should.equal true - done() + @S3PersistorManager.getFileStream @bucketName, @key, (err, res, size) => + res.should.equal @response + size.should.equal @size + @stubbedKnoxClient.get.calledWith(@key).should.equal true + done() describe "sendFile", -> From e5298752458e953b81ac995f26b0474e14cb2793 Mon Sep 17 00:00:00 2001 From: James Allen Date: Fri, 23 May 2014 13:57:18 +0100 Subject: [PATCH 056/555] Revert "Send content-length header when getting file" This reverts commit 6f5f7a98fd3ce53091aacc744f43c7704ba73461. --- .../app/coffee/FSPersistorManager.coffee | 13 ++++++------- .../app/coffee/FileController.coffee | 3 +-- .../filestore/app/coffee/FileHandler.coffee | 10 +++++----- .../app/coffee/S3PersistorManager.coffee | 4 ++-- .../coffee/FSPersistorManagerTests.coffee | 9 ++++----- .../unit/coffee/FileControllerTests.coffee | 5 ++--- .../coffee/S3PersistorManagerTests.coffee | 19 +++++-------------- 7 files changed, 25 insertions(+), 38 deletions(-) diff --git a/services/filestore/app/coffee/FSPersistorManager.coffee b/services/filestore/app/coffee/FSPersistorManager.coffee index c86cd35727..032d4edb29 100644 --- a/services/filestore/app/coffee/FSPersistorManager.coffee +++ b/services/filestore/app/coffee/FSPersistorManager.coffee @@ -28,13 +28,12 @@ module.exports = getFileStream: (location, name, callback = (err, res)->)-> filteredName = filterName name logger.log location:location, name:filteredName, "getting file" - path = "#{location}/#{filteredName}" - fs.stat path, (error, stat) -> - sourceStream = fs.createReadStream path - sourceStream.on 'error', (err) -> - logger.err err:err, location:location, name:name, "Error reading from file" - callback err - callback null, sourceStream, stat.size + sourceStream = fs.createReadStream "#{location}/#{filteredName}" + sourceStream.on 'error', (err) -> + logger.err err:err, location:location, name:name, "Error reading from file" + callback err + callback null,sourceStream + copyFile: (location, fromName, toName, callback = (err)->)-> filteredFromName=filterName fromName diff --git a/services/filestore/app/coffee/FileController.coffee b/services/filestore/app/coffee/FileController.coffee index 2d3792642e..3b83e203fd 100644 --- a/services/filestore/app/coffee/FileController.coffee +++ b/services/filestore/app/coffee/FileController.coffee @@ -12,7 +12,7 @@ module.exports = {key, bucket} = req {format, style} = req.query logger.log key:key, bucket:bucket, format:format, style:style, "reciving request to get file" - FileHandler.getFile bucket, key, {format:format,style:style}, (err, fileStream, size)-> + FileHandler.getFile bucket, key, {format:format,style:style}, (err, fileStream)-> if err? logger.err err:err, key:key, bucket:bucket, format:format, style:style, "problem getting file" if !res.finished and res?.send? @@ -22,7 +22,6 @@ module.exports = res.send 200 else logger.log key:key, bucket:bucket, format:format, style:style, "sending file to response" - res.header("Content-Length", size) fileStream.pipe res insertFile: (req, res)-> diff --git a/services/filestore/app/coffee/FileHandler.coffee b/services/filestore/app/coffee/FileHandler.coffee index b2535e4488..aa9602b5a8 100644 --- a/services/filestore/app/coffee/FileHandler.coffee +++ b/services/filestore/app/coffee/FileHandler.coffee @@ -22,20 +22,20 @@ module.exports = (done)-> PersistorManager.deleteFile bucket, convetedKey, done ], callback - getFile: (bucket, key, opts = {}, callback = (err, fileStream, size) ->)-> + getFile: (bucket, key, opts = {}, callback)-> logger.log bucket:bucket, key:key, opts:opts, "getting file" if !opts.format? and !opts.style? @_getStandardFile bucket, key, opts, callback else @_getConvertedFile bucket, key, opts, callback - _getStandardFile: (bucket, key, opts, callback = (err, fileStream, size) ->)-> - PersistorManager.getFileStream bucket, key, (err, fileStream, size)-> + _getStandardFile: (bucket, key, opts, callback)-> + PersistorManager.getFileStream bucket, key, (err, fileStream)-> if err? logger.err bucket:bucket, key:key, opts:opts, "error getting fileStream" - callback err, fileStream, size + callback err, fileStream - _getConvertedFile: (bucket, key, opts, callback = (err, fileStream, size) ->)-> + _getConvertedFile: (bucket, key, opts, callback)-> convetedKey = KeyBuilder.addCachingToKey(key, opts) PersistorManager.checkIfFileExists bucket, convetedKey, (err, exists)=> if exists diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index 97759e879b..432294ced6 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -67,7 +67,7 @@ module.exports = return callback(err) @sendFile bucketName, key, fsPath, callback - getFileStream: (bucketName, key, callback = (err, res, size)->)-> + getFileStream: (bucketName, key, callback = (err, res)->)-> logger.log bucketName:bucketName, key:key, "getting file from s3" s3Client = knox.createClient key: settings.filestore.s3.key @@ -76,7 +76,7 @@ module.exports = s3Stream = s3Client.get(key) s3Stream.end() s3Stream.on 'response', (res) -> - callback null, res, res.headers["content-length"] + callback null, res s3Stream.on 'error', (err) -> logger.err err:err, bucketName:bucketName, key:key, "error getting file stream from s3" callback err diff --git a/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee b/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee index 757d1cc8df..bf5f08ea9d 100644 --- a/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee @@ -66,12 +66,11 @@ describe "FSPersistorManagerTests", -> describe "getFileStream", -> it "should use correct file location", (done) -> - @Fs.createReadStream.returns(@stream = on:->) - @Fs.stat = sinon.stub().callsArgWith(1, null, { size: @size = 42 }) - @FSPersistorManager.getFileStream @location, @name1, (err, res, size)=> + @Fs.createReadStream.returns( + on:-> + ) + @FSPersistorManager.getFileStream @location, @name1, (err,res)=> @Fs.createReadStream.calledWith("#{@location}/#{@name1Filtered}").should.equal.true - res.should.equal @stream - size.should.equal @size done() describe "copyFile", -> diff --git a/services/filestore/test/unit/coffee/FileControllerTests.coffee b/services/filestore/test/unit/coffee/FileControllerTests.coffee index d30d819f2a..ecf067976f 100644 --- a/services/filestore/test/unit/coffee/FileControllerTests.coffee +++ b/services/filestore/test/unit/coffee/FileControllerTests.coffee @@ -43,16 +43,15 @@ describe "FileController", -> project_id:@project_id file_id:@file_id @res = - header: sinon.stub() + setHeader: -> @fileStream = {} describe "getFile", -> it "should pipe the stream", (done)-> - @FileHandler.getFile.callsArgWith(3, null, @fileStream, @size = 42) + @FileHandler.getFile.callsArgWith(3, null, @fileStream) @fileStream.pipe = (res)=> res.should.equal @res - res.header.calledWith("Content-Length", @size).should.equal true done() @controller.getFile @req, @res diff --git a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee index efe142a6f8..fe70f1008d 100644 --- a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee @@ -45,22 +45,13 @@ describe "S3PersistorManagerTests", -> it "should use correct key", (done)-> - @response = - headers: - "content-length": @size = 42 - - @stubbedKnoxClient.get.returns(@stream = - on: (e, callback) => - if e == "response" - callback(@response) + @stubbedKnoxClient.get.returns( + on:-> end:-> - ) - @S3PersistorManager.getFileStream @bucketName, @key, (err, res, size) => - res.should.equal @response - size.should.equal @size - @stubbedKnoxClient.get.calledWith(@key).should.equal true - done() + @S3PersistorManager.getFileStream @bucketName, @key, @fsPath, (err)=> + @stubbedKnoxClient.get.calledWith(@key).should.equal true + done() describe "sendFile", -> From 415d937f846ddf5173f7152caf75df0cc0962f83 Mon Sep 17 00:00:00 2001 From: James Allen Date: Mon, 2 Jun 2014 16:49:49 +0100 Subject: [PATCH 057/555] Add more logging and don't delete temp files --- .../filestore/app/coffee/S3PersistorManager.coffee | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index 432294ced6..ef8cf3ea4d 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -49,9 +49,9 @@ module.exports = if res.statusCode != 200 logger.err bucketName:bucketName, key:key, fsPath:fsPath, "non 200 response from s3 putting file" return callback("non 200 response from s3 on put file") - LocalFileWriter.deleteFile fsPath, (err)-> - logger.log res:res, bucketName:bucketName, key:key, fsPath:fsPath,"file uploaded to s3" - callback(err) + #LocalFileWriter.deleteFile fsPath, (err)-> + logger.log res:res, bucketName:bucketName, key:key, fsPath:fsPath,"file uploaded to s3" + callback(err) putEventEmiter.on "error", (err)-> logger.err err:err, bucketName:bucketName, key:key, fsPath:fsPath, "error emmited on put of file" callback err @@ -100,7 +100,12 @@ module.exports = logger.err err:err, res:res, bucketName:bucketName, key:key, "something went wrong deleting file in aws" callback(err) - deleteDirectory: (bucketName, key, callback)-> + deleteDirectory: (bucketName, key, _callback)-> + callback = (args...) -> + logger.log key: key, bucketName: bucketName, "calling delete callback" + _callback(args...) + + logger.log key: key, bucketName: bucketName, "deleting directory" s3Client = knox.createClient key: settings.filestore.s3.key secret: settings.filestore.s3.secret From b3f796a093c72b85db516103635508bc194a8aef Mon Sep 17 00:00:00 2001 From: James Allen Date: Mon, 2 Jun 2014 17:02:45 +0100 Subject: [PATCH 058/555] More logging --- services/filestore/app/coffee/S3PersistorManager.coffee | 1 + 1 file changed, 1 insertion(+) diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index ef8cf3ea4d..fbbd673499 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -111,6 +111,7 @@ module.exports = secret: settings.filestore.s3.secret bucket: bucketName s3Client.list prefix:key, (err, data)-> + logger.log data: data, key: key, bucketName: bucketName, "got file list" keys = _.map data.Contents, (entry)-> return entry.Key s3Client.deleteMultiple keys, callback From e17aaba3bad9a23bfc24baced237604684edfb57 Mon Sep 17 00:00:00 2001 From: James Allen Date: Mon, 2 Jun 2014 17:11:25 +0100 Subject: [PATCH 059/555] Catch errors correctly --- services/filestore/app/coffee/FileHandler.coffee | 6 +++--- services/filestore/app/coffee/S3PersistorManager.coffee | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/services/filestore/app/coffee/FileHandler.coffee b/services/filestore/app/coffee/FileHandler.coffee index aa9602b5a8..8968470300 100644 --- a/services/filestore/app/coffee/FileHandler.coffee +++ b/services/filestore/app/coffee/FileHandler.coffee @@ -11,9 +11,9 @@ module.exports = insertFile: (bucket, key, stream, callback)-> convetedKey = KeyBuilder.getConvertedFolderKey(key) - PersistorManager.deleteDirectory bucket, convetedKey, -> - PersistorManager.sendStream bucket, key, stream, -> - callback() + PersistorManager.deleteDirectory bucket, convetedKey, (error) -> + return callback(error) if error? + PersistorManager.sendStream bucket, key, stream, callback deleteFile: (bucket, key, callback)-> convetedKey = KeyBuilder.getConvertedFolderKey(bucket, key) diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index fbbd673499..dead151905 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -102,7 +102,7 @@ module.exports = deleteDirectory: (bucketName, key, _callback)-> callback = (args...) -> - logger.log key: key, bucketName: bucketName, "calling delete callback" + logger.log key: key, bucketName: bucketName, args: args, "calling delete callback" _callback(args...) logger.log key: key, bucketName: bucketName, "deleting directory" From df6b5132c2176bcdff1658cb761f4a529ffafa9e Mon Sep 17 00:00:00 2001 From: James Allen Date: Mon, 2 Jun 2014 17:25:40 +0100 Subject: [PATCH 060/555] Tidy up testing/logging --- .../filestore/app/coffee/S3PersistorManager.coffee | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index dead151905..703241920d 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -49,9 +49,9 @@ module.exports = if res.statusCode != 200 logger.err bucketName:bucketName, key:key, fsPath:fsPath, "non 200 response from s3 putting file" return callback("non 200 response from s3 on put file") - #LocalFileWriter.deleteFile fsPath, (err)-> - logger.log res:res, bucketName:bucketName, key:key, fsPath:fsPath,"file uploaded to s3" - callback(err) + LocalFileWriter.deleteFile fsPath, (err)-> + logger.log res:res, bucketName:bucketName, key:key, fsPath:fsPath,"file uploaded to s3" + callback(err) putEventEmiter.on "error", (err)-> logger.err err:err, bucketName:bucketName, key:key, fsPath:fsPath, "error emmited on put of file" callback err @@ -101,9 +101,10 @@ module.exports = callback(err) deleteDirectory: (bucketName, key, _callback)-> + # deleteMultiple can call the callback multiple times so protect against this. callback = (args...) -> - logger.log key: key, bucketName: bucketName, args: args, "calling delete callback" _callback(args...) + _callback = () -> logger.log key: key, bucketName: bucketName, "deleting directory" s3Client = knox.createClient @@ -111,7 +112,6 @@ module.exports = secret: settings.filestore.s3.secret bucket: bucketName s3Client.list prefix:key, (err, data)-> - logger.log data: data, key: key, bucketName: bucketName, "got file list" keys = _.map data.Contents, (entry)-> return entry.Key s3Client.deleteMultiple keys, callback From a9be97622b37f39b2b709d191401f23a767705d2 Mon Sep 17 00:00:00 2001 From: c4live Date: Thu, 5 Jun 2014 10:31:05 +0200 Subject: [PATCH 061/555] Update FileHandler.coffee: ignore any error that happens while deleting directory during insertFile. --- services/filestore/app/coffee/FileHandler.coffee | 1 - 1 file changed, 1 deletion(-) diff --git a/services/filestore/app/coffee/FileHandler.coffee b/services/filestore/app/coffee/FileHandler.coffee index 8968470300..795460629a 100644 --- a/services/filestore/app/coffee/FileHandler.coffee +++ b/services/filestore/app/coffee/FileHandler.coffee @@ -12,7 +12,6 @@ module.exports = insertFile: (bucket, key, stream, callback)-> convetedKey = KeyBuilder.getConvertedFolderKey(key) PersistorManager.deleteDirectory bucket, convetedKey, (error) -> - return callback(error) if error? PersistorManager.sendStream bucket, key, stream, callback deleteFile: (bucket, key, callback)-> From 1e42221954fcaec1a28aaecde6dbf1c2eb3f8132 Mon Sep 17 00:00:00 2001 From: c4live Date: Thu, 5 Jun 2014 14:10:05 +0200 Subject: [PATCH 062/555] Update FSPersistorManager.coffee Do not propagate the error if trying to delete a directory that does not exist. --- services/filestore/app/coffee/FSPersistorManager.coffee | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/services/filestore/app/coffee/FSPersistorManager.coffee b/services/filestore/app/coffee/FSPersistorManager.coffee index 032d4edb29..992a9b25be 100644 --- a/services/filestore/app/coffee/FSPersistorManager.coffee +++ b/services/filestore/app/coffee/FSPersistorManager.coffee @@ -60,7 +60,10 @@ module.exports = filteredName = filterName name fs.rmdir "#{location}/#{filteredName}", (err) -> logger.err err:err, location:location, name:filteredName, "Error on rmdir." - callback err + if err and err.errno != 34 + callback err + else + callback() checkIfFileExists:(location, name, callback = (err,exists)->)-> filteredName = filterName name From 5bc89c3cbfca0fa9de05720e6579f3ed0b7926e6 Mon Sep 17 00:00:00 2001 From: c4live Date: Thu, 5 Jun 2014 14:10:49 +0200 Subject: [PATCH 063/555] Update FileHandler.coffee Restoring error check. --- services/filestore/app/coffee/FileHandler.coffee | 1 + 1 file changed, 1 insertion(+) diff --git a/services/filestore/app/coffee/FileHandler.coffee b/services/filestore/app/coffee/FileHandler.coffee index 795460629a..8968470300 100644 --- a/services/filestore/app/coffee/FileHandler.coffee +++ b/services/filestore/app/coffee/FileHandler.coffee @@ -12,6 +12,7 @@ module.exports = insertFile: (bucket, key, stream, callback)-> convetedKey = KeyBuilder.getConvertedFolderKey(key) PersistorManager.deleteDirectory bucket, convetedKey, (error) -> + return callback(error) if error? PersistorManager.sendStream bucket, key, stream, callback deleteFile: (bucket, key, callback)-> From 408bf43b985bd5fdc3798b89a87f0e1a06c63160 Mon Sep 17 00:00:00 2001 From: c4live Date: Thu, 5 Jun 2014 16:01:38 +0200 Subject: [PATCH 064/555] Update FSPersistorManager.coffee Using code 'ENOENT' for error checking instead of errno 34. --- services/filestore/app/coffee/FSPersistorManager.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/app/coffee/FSPersistorManager.coffee b/services/filestore/app/coffee/FSPersistorManager.coffee index 992a9b25be..cbdad516d6 100644 --- a/services/filestore/app/coffee/FSPersistorManager.coffee +++ b/services/filestore/app/coffee/FSPersistorManager.coffee @@ -60,7 +60,7 @@ module.exports = filteredName = filterName name fs.rmdir "#{location}/#{filteredName}", (err) -> logger.err err:err, location:location, name:filteredName, "Error on rmdir." - if err and err.errno != 34 + if err and err.code != 'ENOENT' callback err else callback() From 8a052d4c71b7cfca43e55127e6832c697b296584 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Tue, 17 Jun 2014 12:44:10 +0100 Subject: [PATCH 065/555] added coffeescript to package.json --- services/filestore/package.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/services/filestore/package.json b/services/filestore/package.json index f60ad6d2c9..15a5fa18ab 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -16,7 +16,8 @@ "async": "~0.2.10", "pngcrush": "0.0.3", "stream-buffers": "~0.2.5", - "node-transloadit": "0.0.4" + "node-transloadit": "0.0.4", + "coffee-script": "~1.7.1" }, "devDependencies": { "sinon": "", From c8c758642a0a154effc29847f73b41aad0562b86 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Tue, 17 Jun 2014 12:47:53 +0100 Subject: [PATCH 066/555] removed reference to easy image --- .../filestore/test/unit/coffee/FileConverterTests.coffee | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/services/filestore/test/unit/coffee/FileConverterTests.coffee b/services/filestore/test/unit/coffee/FileConverterTests.coffee index b56cb55eb2..a1305684d1 100644 --- a/services/filestore/test/unit/coffee/FileConverterTests.coffee +++ b/services/filestore/test/unit/coffee/FileConverterTests.coffee @@ -10,13 +10,9 @@ describe "FileConverter", -> beforeEach -> - @easyimage = - convert:sinon.stub() - exec: sinon.stub() @child_process = exec : sinon.stub() @converter = SandboxedModule.require modulePath, requires: - "easyimage":@easyimage 'child_process': @child_process "logger-sharelatex": log:-> @@ -55,7 +51,7 @@ describe "FileConverter", -> done() describe "thumbnail", -> - it "should call easy image resize with args", (done)-> + it "should call converter resize with args", (done)-> @child_process.exec.callsArgWith(2) @converter.thumbnail @sourcePath, (err)=> args = @child_process.exec.args[0][0] @@ -63,7 +59,7 @@ describe "FileConverter", -> done() describe "preview", -> - it "should call easy image resize with args", (done)-> + it "should call converter resize with args", (done)-> @child_process.exec.callsArgWith(2) @converter.preview @sourcePath, (err)=> args = @child_process.exec.args[0][0] From 04bffaecebc7ef8caff2f6d56cb53a6ce0045bfb Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Tue, 17 Jun 2014 13:33:36 +0100 Subject: [PATCH 067/555] get local file writer tests using fake settings --- .../filestore/test/unit/coffee/LocalFileWriterTests.coffee | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/services/filestore/test/unit/coffee/LocalFileWriterTests.coffee b/services/filestore/test/unit/coffee/LocalFileWriterTests.coffee index b8b443a040..0b9eec035e 100644 --- a/services/filestore/test/unit/coffee/LocalFileWriterTests.coffee +++ b/services/filestore/test/unit/coffee/LocalFileWriterTests.coffee @@ -18,11 +18,15 @@ describe "LocalFileWriter", -> @fs = createWriteStream : sinon.stub().returns(@writeStream) unlink: sinon.stub() + @settings = + path: + uploadFolder:"somewhere" @writer = SandboxedModule.require modulePath, requires: "fs": @fs "logger-sharelatex": log:-> err:-> + "settings-sharelatex":@settings @stubbedFsPath = "something/uploads/eio2k1j3" describe "writeStrem", -> From f83ab25b357f0efffaa434667796d49110215054 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Thu, 19 Jun 2014 17:07:50 +0100 Subject: [PATCH 068/555] basic acceptence test of sending and getting file back added --- services/filestore/Gruntfile.coffee | 21 ++++++- .../app/coffee/LocalFileWriter.coffee | 1 + .../app/coffee/S3PersistorManager.coffee | 9 --- services/filestore/package.json | 2 +- .../acceptence/coffee/SendingFileTest.coffee | 55 +++++++++++++++++++ 5 files changed, 76 insertions(+), 12 deletions(-) create mode 100644 services/filestore/test/acceptence/coffee/SendingFileTest.coffee diff --git a/services/filestore/Gruntfile.coffee b/services/filestore/Gruntfile.coffee index 6534dedc09..aa40cf7460 100644 --- a/services/filestore/Gruntfile.coffee +++ b/services/filestore/Gruntfile.coffee @@ -19,6 +19,14 @@ module.exports = (grunt) -> ext: '.js' server_tests: + expand: true, + flatten: false, + cwd: 'test/acceptence/coffee', + src: ['*.coffee', '**/*.coffee'], + dest: 'test/acceptence/js/', + ext: '.js' + + server_acc_tests: expand: true, flatten: false, cwd: 'test/unit/coffee', @@ -35,8 +43,10 @@ module.exports = (grunt) -> nodemon: dev: + script: 'app.js' options: - file: 'app.js' + ext:"*.coffee" + concurrent: dev: @@ -50,7 +60,12 @@ module.exports = (grunt) -> options: reporter: grunt.option('reporter') or 'spec' grep: grunt.option("grep") - + acceptence: + src: ["test/acceptence/js/#{grunt.option('feature') or '**'}/*.js"] + options: + reporter: grunt.option('reporter') or 'spec' + grep: grunt.option("grep") + grunt.loadNpmTasks 'grunt-contrib-coffee' grunt.loadNpmTasks 'grunt-contrib-watch' @@ -60,6 +75,8 @@ module.exports = (grunt) -> grunt.loadNpmTasks 'grunt-mocha-test' grunt.registerTask "test:unit", ["coffee", "mochaTest:unit"] + grunt.registerTask "test:acceptence", ["coffee", "mochaTest:acceptence"] + grunt.registerTask "ci", "test:unit" grunt.registerTask 'default', ['coffee', 'concurrent'] diff --git a/services/filestore/app/coffee/LocalFileWriter.coffee b/services/filestore/app/coffee/LocalFileWriter.coffee index a839c46656..12d505b05b 100644 --- a/services/filestore/app/coffee/LocalFileWriter.coffee +++ b/services/filestore/app/coffee/LocalFileWriter.coffee @@ -34,4 +34,5 @@ module.exports = if !key? key = uuid.v1() key = key.replace(/\//g,"-") + console.log Settings.path.uploadFolder, key path.join(Settings.path.uploadFolder, key) diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index 703241920d..a6e8d18243 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -13,14 +13,6 @@ _ = require("underscore") thirtySeconds = 30 * 1000 - -printSockets = -> - console.log require('https').globalAgent.sockets - console.log require('http').globalAgent.sockets - setTimeout printSockets, thirtySeconds - -printSockets() - buildDefaultOptions = (bucketName, method, key)-> return { aws: @@ -56,7 +48,6 @@ module.exports = logger.err err:err, bucketName:bucketName, key:key, fsPath:fsPath, "error emmited on put of file" callback err - sendStream: (bucketName, key, readStream, callback)-> logger.log bucketName:bucketName, key:key, "sending file to s3" readStream.on "error", (err)-> diff --git a/services/filestore/package.json b/services/filestore/package.json index 15a5fa18ab..5b97e24e40 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -27,7 +27,7 @@ "grunt-contrib-requirejs": "0.4.1", "grunt-contrib-coffee": "0.7.0", "grunt-contrib-watch": "0.5.3", - "grunt-nodemon": "0.1.2", + "grunt-nodemon": "0.2.1", "grunt-contrib-clean": "0.5.0", "grunt-concurrent": "0.4.2" } diff --git a/services/filestore/test/acceptence/coffee/SendingFileTest.coffee b/services/filestore/test/acceptence/coffee/SendingFileTest.coffee new file mode 100644 index 0000000000..db7625afb8 --- /dev/null +++ b/services/filestore/test/acceptence/coffee/SendingFileTest.coffee @@ -0,0 +1,55 @@ + +assert = require("chai").assert +sinon = require('sinon') +chai = require('chai') +should = chai.should() +expect = chai.expect +modulePath = "../../../app/js/LocalFileWriter.js" +SandboxedModule = require('sandboxed-module') +fs = require("fs") +request = require("request") +settings = require("settings-sharelatex") + +describe "Sending a file", -> + + before (done)-> + @localFileReadPath = "/tmp/filestore_acceptence_tests_file_read.txt" + @localFileWritePath = "/tmp/filestore_acceptence_tests_file_write.txt" + + @constantFileContent = [ + "hello world" + "line 2 goes here #{Math.random()}" + "there are 3 lines in all" + ].join("\n") + + fs.writeFile(@localFileReadPath, @constantFileContent, done) + @filestoreUrl = "http://localhost:#{settings.internal.filestore.port}" + + beforeEach (done)-> + fs.unlink @localFileWritePath, -> + done() + + + + it "should send a 200 for status endpoing", (done)-> + request "#{@filestoreUrl}/status", (err, response, body)-> + response.statusCode.should.equal 200 + body.indexOf("filestore").should.not.equal -1 + body.indexOf("up").should.not.equal -1 + done() + + it "should be able get the file back", (done)-> + @timeout(1000 * 10) + @fileUrl = "#{@filestoreUrl}/project/acceptence_tests/file/12345" + + writeStream = request.post(@fileUrl) + + writeStream.on "end", => + request.get @fileUrl, (err, response, body)=> + body.should.equal @constantFileContent + done() + + fs.createReadStream(@localFileReadPath).pipe writeStream + + + From 5d00f70a31e07e2db823825e601a1811c6e7a5c0 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Thu, 19 Jun 2014 17:16:45 +0100 Subject: [PATCH 069/555] added delete accp test --- .../acceptence/coffee/SendingFileTest.coffee | 27 ++++++++++++++----- 1 file changed, 20 insertions(+), 7 deletions(-) diff --git a/services/filestore/test/acceptence/coffee/SendingFileTest.coffee b/services/filestore/test/acceptence/coffee/SendingFileTest.coffee index db7625afb8..7fe75ccf01 100644 --- a/services/filestore/test/acceptence/coffee/SendingFileTest.coffee +++ b/services/filestore/test/acceptence/coffee/SendingFileTest.coffee @@ -26,7 +26,7 @@ describe "Sending a file", -> @filestoreUrl = "http://localhost:#{settings.internal.filestore.port}" beforeEach (done)-> - fs.unlink @localFileWritePath, -> + fs.unlink @localFileWritePath, => done() @@ -38,18 +38,31 @@ describe "Sending a file", -> body.indexOf("up").should.not.equal -1 done() - it "should be able get the file back", (done)-> - @timeout(1000 * 10) - @fileUrl = "#{@filestoreUrl}/project/acceptence_tests/file/12345" + describe "with a file on the server", -> - writeStream = request.post(@fileUrl) + beforeEach (done)-> + @timeout(1000 * 5) + @fileUrl = "#{@filestoreUrl}/project/acceptence_tests/file/#{Math.random()}" - writeStream.on "end", => + writeStream = request.post(@fileUrl) + + writeStream.on "end", => + done() + fs.createReadStream(@localFileReadPath).pipe writeStream + + it "should be able get the file back", (done)-> + @timeout(1000 * 10) request.get @fileUrl, (err, response, body)=> body.should.equal @constantFileContent done() - fs.createReadStream(@localFileReadPath).pipe writeStream + it "should be able to delete the file", (done)-> + request.del @fileUrl, (err, response, body)=> + response.statusCode.should.equal 204 + request.get @fileUrl, (err, response, body)=> + body.indexOf("NoSuchKey").should.not.equal -1 + done() + From 72df59e81e286b23fe1677a7e697e14fe158c0c2 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Thu, 19 Jun 2014 17:32:04 +0100 Subject: [PATCH 070/555] added copy file acceptence test --- .../acceptence/coffee/SendingFileTest.coffee | 28 +++++++++++++++---- 1 file changed, 23 insertions(+), 5 deletions(-) diff --git a/services/filestore/test/acceptence/coffee/SendingFileTest.coffee b/services/filestore/test/acceptence/coffee/SendingFileTest.coffee index 7fe75ccf01..781a1ef6ad 100644 --- a/services/filestore/test/acceptence/coffee/SendingFileTest.coffee +++ b/services/filestore/test/acceptence/coffee/SendingFileTest.coffee @@ -10,7 +10,7 @@ fs = require("fs") request = require("request") settings = require("settings-sharelatex") -describe "Sending a file", -> +describe "Filestore", -> before (done)-> @localFileReadPath = "/tmp/filestore_acceptence_tests_file_read.txt" @@ -41,13 +41,13 @@ describe "Sending a file", -> describe "with a file on the server", -> beforeEach (done)-> - @timeout(1000 * 5) - @fileUrl = "#{@filestoreUrl}/project/acceptence_tests/file/#{Math.random()}" + @timeout(1000 * 10) + @file_id = Math.random() + @fileUrl = "#{@filestoreUrl}/project/acceptence_tests/file/#{@file_id}" writeStream = request.post(@fileUrl) - writeStream.on "end", => - done() + writeStream.on "end", done fs.createReadStream(@localFileReadPath).pipe writeStream it "should be able get the file back", (done)-> @@ -57,12 +57,30 @@ describe "Sending a file", -> done() it "should be able to delete the file", (done)-> + @timeout(1000 * 20) request.del @fileUrl, (err, response, body)=> response.statusCode.should.equal 204 request.get @fileUrl, (err, response, body)=> body.indexOf("NoSuchKey").should.not.equal -1 done() + it "should be able to copy files", (done)-> + @timeout(1000 * 20) + + newProjectID = "acceptence_tests_copyied_project" + newFileId = Math.random() + newFileUrl = "#{@filestoreUrl}/project/#{newProjectID}/file/#{newFileId}" + opts = + uri: newFileUrl + json: + source: + project_id:"acceptence_tests" + file_id: @file_id + request.put opts, (err)=> + request.del @fileUrl, (err, response, body)=> + request.get newFileUrl, (err, response, body)=> + body.should.equal @constantFileContent + done() From b79dfcd477dcd5a814374a5ca343b1d6d3a78541 Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 19 Aug 2014 14:10:37 +0100 Subject: [PATCH 071/555] Lock down module versions --- services/filestore/package.json | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/services/filestore/package.json b/services/filestore/package.json index 5b97e24e40..9f10aacd72 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -1,10 +1,15 @@ { "name": "filestore-sharelatex", "version": "0.0.1", + "description": "An API for CRUD operations on binary files stored in S3", + "repository": { + "type": "git", + "url": "https://github.com/sharelatex/filestore-sharelatex.git" + }, "dependencies": { - "settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#master", - "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#master", - "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#master", + "settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.0.0", + "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.0.0", + "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.0.0", "request": "2.14.0", "lynx": "0.0.11", "grunt-mocha-test": "~0.8.2", From 72ad81887a6ae6bdb7c1b86f25cc0bf76f020e3d Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 19 Aug 2014 14:10:48 +0100 Subject: [PATCH 072/555] Release version 0.1.0 --- services/filestore/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/package.json b/services/filestore/package.json index 9f10aacd72..60bc7d7500 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -1,6 +1,6 @@ { "name": "filestore-sharelatex", - "version": "0.0.1", + "version": "0.1.0", "description": "An API for CRUD operations on binary files stored in S3", "repository": { "type": "git", From 9d97d1dc5382b724289f4ba14eb9623aaa7898d1 Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 27 Nov 2014 11:08:29 +0000 Subject: [PATCH 073/555] Handle templates locally ok --- services/filestore/.gitignore | 1 + services/filestore/config/settings.defaults.coffee | 1 + services/filestore/template_files/.gitignore | 0 3 files changed, 2 insertions(+) create mode 100644 services/filestore/template_files/.gitignore diff --git a/services/filestore/.gitignore b/services/filestore/.gitignore index 723e09aef3..7d881c3ca7 100644 --- a/services/filestore/.gitignore +++ b/services/filestore/.gitignore @@ -56,6 +56,7 @@ test/unit/js/ test/acceptence/js user_files/* +template_files/* **.swp diff --git a/services/filestore/config/settings.defaults.coffee b/services/filestore/config/settings.defaults.coffee index a7c92528ac..dccefea525 100644 --- a/services/filestore/config/settings.defaults.coffee +++ b/services/filestore/config/settings.defaults.coffee @@ -21,6 +21,7 @@ module.exports = # Must contain full path, e.g. "/var/lib/sharelatex/data". # This path must exist, not be tmpfs and be writable to by the user sharelatex is run as. user_files: Path.resolve(__dirname + "/../user_files") + template_files: Path.resolve(__dirname + "/../template_files") # if you are using S3, then fill in your S3 details below # s3: # key: "" diff --git a/services/filestore/template_files/.gitignore b/services/filestore/template_files/.gitignore new file mode 100644 index 0000000000..e69de29bb2 From dee7799fd4df914ba908d110fe7bea1b4bc45800 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Thu, 27 Nov 2014 13:48:39 +0000 Subject: [PATCH 074/555] added heapdump and endpoint to make dump --- services/filestore/app.coffee | 4 ++++ services/filestore/package.json | 29 +++++++++++++++-------------- 2 files changed, 19 insertions(+), 14 deletions(-) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index 3c8fcde1c1..c899bf05a1 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -66,6 +66,10 @@ app.del "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileContro app.get "/template/:template_id/v/:version/:format", keyBuilder.templateFileKey, fileController.getFile app.post "/template/:template_id/v/:version/:format", keyBuilder.templateFileKey, fileController.insertFile +app.get "/heapdump", (req, res)-> + require('heapdump').writeSnapshot '/tmp/' + Date.now() + '.filestore.heapsnapshot', (err, filename)-> + res.send filename + app.post "/shutdown", (req, res)-> appIsOk = false res.send() diff --git a/services/filestore/package.json b/services/filestore/package.json index 5b97e24e40..cf95afd0a2 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -2,22 +2,23 @@ "name": "filestore-sharelatex", "version": "0.0.1", "dependencies": { - "settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#master", - "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#master", - "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#master", - "request": "2.14.0", - "lynx": "0.0.11", - "grunt-mocha-test": "~0.8.2", - "knox": "~0.8.8", - "node-uuid": "~1.4.1", - "underscore": "~1.5.2", - "express": "~3.4.8", - "longjohn": "~0.2.2", "async": "~0.2.10", - "pngcrush": "0.0.3", - "stream-buffers": "~0.2.5", + "coffee-script": "~1.7.1", + "express": "~3.4.8", + "grunt-mocha-test": "~0.8.2", + "heapdump": "^0.3.2", + "knox": "~0.8.8", + "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#master", + "longjohn": "~0.2.2", + "lynx": "0.0.11", + "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#master", "node-transloadit": "0.0.4", - "coffee-script": "~1.7.1" + "node-uuid": "~1.4.1", + "pngcrush": "0.0.3", + "request": "2.14.0", + "settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#master", + "stream-buffers": "~0.2.5", + "underscore": "~1.5.2" }, "devDependencies": { "sinon": "", From e90eb4b3fb0b986fc50b3dfedc98b284dafce24e Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Sat, 29 Nov 2014 13:02:06 +0000 Subject: [PATCH 075/555] bump knox --- services/filestore/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/package.json b/services/filestore/package.json index 9dd42f7a99..be103715f7 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -12,7 +12,7 @@ "express": "~3.4.8", "grunt-mocha-test": "~0.8.2", "heapdump": "^0.3.2", - "knox": "~0.8.8", + "knox": "~0.9.1", "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#master", "longjohn": "~0.2.2", "lynx": "0.0.11", From c2e50c619d219b5467d81ea4d8ba349aeedf03c9 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 18 Dec 2014 16:55:11 +0000 Subject: [PATCH 076/555] request restart on unhandled exceptions to avoid memory leak --- services/filestore/app.coffee | 2 ++ 1 file changed, 2 insertions(+) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index 3c8fcde1c1..a29943931e 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -37,6 +37,8 @@ app.use (req, res, next) -> requestDomain.add req requestDomain.add res requestDomain.on "error", (err)-> + # request a shutdown to prevent memory leaks + appIsOk = false setTimeout(-> if !res.headerSent res.send(500) From 55c4e220faa1b291897418495066afd7bc93b3c0 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 19 Dec 2014 16:28:42 +0000 Subject: [PATCH 077/555] prevent double callback in getFileStream --- services/filestore/app/coffee/S3PersistorManager.coffee | 1 + 1 file changed, 1 insertion(+) diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index a6e8d18243..ef1824491e 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -59,6 +59,7 @@ module.exports = @sendFile bucketName, key, fsPath, callback getFileStream: (bucketName, key, callback = (err, res)->)-> + callback = _.once callback logger.log bucketName:bucketName, key:key, "getting file from s3" s3Client = knox.createClient key: settings.filestore.s3.key From af4c72150f0fa93f33eb156b699e9e73235cb441 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 19 Dec 2014 16:57:44 +0000 Subject: [PATCH 078/555] fix path in directory deletion --- services/filestore/app/coffee/FileHandler.coffee | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/filestore/app/coffee/FileHandler.coffee b/services/filestore/app/coffee/FileHandler.coffee index 8968470300..51aec4bba6 100644 --- a/services/filestore/app/coffee/FileHandler.coffee +++ b/services/filestore/app/coffee/FileHandler.coffee @@ -16,10 +16,10 @@ module.exports = PersistorManager.sendStream bucket, key, stream, callback deleteFile: (bucket, key, callback)-> - convetedKey = KeyBuilder.getConvertedFolderKey(bucket, key) + convetedKey = KeyBuilder.getConvertedFolderKey(key) async.parallel [ (done)-> PersistorManager.deleteFile bucket, key, done - (done)-> PersistorManager.deleteFile bucket, convetedKey, done + (done)-> PersistorManager.deleteDirectory bucket, convetedKey, done ], callback getFile: (bucket, key, opts = {}, callback)-> From 8744cf94a55f27df0f0d63f1db52afb0b16d7333 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 19 Dec 2014 16:58:04 +0000 Subject: [PATCH 079/555] delete directory with rimraf --- services/filestore/app/coffee/FSPersistorManager.coffee | 9 +++++---- services/filestore/package.json | 1 + 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/services/filestore/app/coffee/FSPersistorManager.coffee b/services/filestore/app/coffee/FSPersistorManager.coffee index cbdad516d6..1f2d64b8cb 100644 --- a/services/filestore/app/coffee/FSPersistorManager.coffee +++ b/services/filestore/app/coffee/FSPersistorManager.coffee @@ -1,6 +1,7 @@ logger = require("logger-sharelatex") fs = require("fs") LocalFileWriter = require("./LocalFileWriter") +rimraf = require("rimraf") filterName = (key) -> return key.replace /\//g, "_" @@ -57,10 +58,10 @@ module.exports = callback err deleteDirectory: (location, name, callback = (err)->)-> - filteredName = filterName name - fs.rmdir "#{location}/#{filteredName}", (err) -> - logger.err err:err, location:location, name:filteredName, "Error on rmdir." - if err and err.code != 'ENOENT' + filteredName = filterName name.replace(/\/$/,'') + rimraf "#{location}/#{filteredName}", (err) -> + if err + logger.err err:err, location:location, name:filteredName, "Error on rimraf rmdir." callback err else callback() diff --git a/services/filestore/package.json b/services/filestore/package.json index be103715f7..c2fcaff080 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -21,6 +21,7 @@ "node-uuid": "~1.4.1", "pngcrush": "0.0.3", "request": "2.14.0", + "rimraf": "2.2.8", "settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#master", "stream-buffers": "~0.2.5", "underscore": "~1.5.2" From 7d96f3aadd58550571c56696ea7ac534c71f1df3 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 22 Dec 2014 11:10:05 +0000 Subject: [PATCH 080/555] use 'response' to send 'NoSuchKey' when local file is not found --- services/filestore/app/coffee/FSPersistorManager.coffee | 9 +++++++-- services/filestore/package.json | 1 + 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/services/filestore/app/coffee/FSPersistorManager.coffee b/services/filestore/app/coffee/FSPersistorManager.coffee index 1f2d64b8cb..94ededd700 100644 --- a/services/filestore/app/coffee/FSPersistorManager.coffee +++ b/services/filestore/app/coffee/FSPersistorManager.coffee @@ -2,6 +2,7 @@ logger = require("logger-sharelatex") fs = require("fs") LocalFileWriter = require("./LocalFileWriter") rimraf = require("rimraf") +response = require ("response") filterName = (key) -> return key.replace /\//g, "_" @@ -32,8 +33,12 @@ module.exports = sourceStream = fs.createReadStream "#{location}/#{filteredName}" sourceStream.on 'error', (err) -> logger.err err:err, location:location, name:name, "Error reading from file" - callback err - callback null,sourceStream + if err.code = 'ENOENT' + callback null, response().html('NoSuchKey: file not found\n') + else + callback err + sourceStream.on 'readable', () -> + callback null, sourceStream copyFile: (location, fromName, toName, callback = (err)->)-> diff --git a/services/filestore/package.json b/services/filestore/package.json index c2fcaff080..530f95d00b 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -21,6 +21,7 @@ "node-uuid": "~1.4.1", "pngcrush": "0.0.3", "request": "2.14.0", + "response": "0.14.0", "rimraf": "2.2.8", "settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#master", "stream-buffers": "~0.2.5", From 72ebaea5e24d355d1c2ebb0b8983a301f52a431f Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 22 Dec 2014 11:11:31 +0000 Subject: [PATCH 081/555] log FS delete errors only on failure --- services/filestore/app/coffee/FSPersistorManager.coffee | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/services/filestore/app/coffee/FSPersistorManager.coffee b/services/filestore/app/coffee/FSPersistorManager.coffee index 94ededd700..d00b112c27 100644 --- a/services/filestore/app/coffee/FSPersistorManager.coffee +++ b/services/filestore/app/coffee/FSPersistorManager.coffee @@ -59,13 +59,16 @@ module.exports = filteredName = filterName name logger.log location:location, name:filteredName, "delete file" fs.unlink "#{location}/#{filteredName}", (err) -> - logger.err err:err, location:location, name:filteredName, "Error on delete." - callback err + if err? + logger.err err:err, location:location, name:filteredName, "Error on delete." + callback err + else + callback() deleteDirectory: (location, name, callback = (err)->)-> filteredName = filterName name.replace(/\/$/,'') rimraf "#{location}/#{filteredName}", (err) -> - if err + if err? logger.err err:err, location:location, name:filteredName, "Error on rimraf rmdir." callback err else From dad209262f15115581e1b47b03a7f795ed58ac2b Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 22 Dec 2014 11:27:00 +0000 Subject: [PATCH 082/555] execute success callback when filesystem copyfile completed --- services/filestore/app/coffee/FSPersistorManager.coffee | 2 ++ 1 file changed, 2 insertions(+) diff --git a/services/filestore/app/coffee/FSPersistorManager.coffee b/services/filestore/app/coffee/FSPersistorManager.coffee index d00b112c27..4d6d79b1a7 100644 --- a/services/filestore/app/coffee/FSPersistorManager.coffee +++ b/services/filestore/app/coffee/FSPersistorManager.coffee @@ -53,6 +53,8 @@ module.exports = targetStream.on 'error', (err) -> logger.err err:err, location:location, key:filteredToName, "Error writing to file" callback err + targetStream.on 'finish', () -> + callback null sourceStream.pipe targetStream deleteFile: (location, name, callback)-> From de465aee945e84b6f753918917bf79783fa1664c Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 22 Dec 2014 11:27:48 +0000 Subject: [PATCH 083/555] change copy test to match usage in web, check http status --- .../filestore/test/acceptence/coffee/SendingFileTest.coffee | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/services/filestore/test/acceptence/coffee/SendingFileTest.coffee b/services/filestore/test/acceptence/coffee/SendingFileTest.coffee index 781a1ef6ad..a3958ebf90 100644 --- a/services/filestore/test/acceptence/coffee/SendingFileTest.coffee +++ b/services/filestore/test/acceptence/coffee/SendingFileTest.coffee @@ -70,13 +70,15 @@ describe "Filestore", -> newProjectID = "acceptence_tests_copyied_project" newFileId = Math.random() newFileUrl = "#{@filestoreUrl}/project/#{newProjectID}/file/#{newFileId}" - opts = + opts = + method: 'put' uri: newFileUrl json: source: project_id:"acceptence_tests" file_id: @file_id - request.put opts, (err)=> + request opts, (err, response, body)=> + response.statusCode.should.equal 200 request.del @fileUrl, (err, response, body)=> request.get newFileUrl, (err, response, body)=> body.should.equal @constantFileContent From 64c6341844c7135aefc13ae7c4b55a4ed151163a Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 22 Dec 2014 11:34:13 +0000 Subject: [PATCH 084/555] check delete status code in copy test --- services/filestore/test/acceptence/coffee/SendingFileTest.coffee | 1 + 1 file changed, 1 insertion(+) diff --git a/services/filestore/test/acceptence/coffee/SendingFileTest.coffee b/services/filestore/test/acceptence/coffee/SendingFileTest.coffee index a3958ebf90..11668f230e 100644 --- a/services/filestore/test/acceptence/coffee/SendingFileTest.coffee +++ b/services/filestore/test/acceptence/coffee/SendingFileTest.coffee @@ -80,6 +80,7 @@ describe "Filestore", -> request opts, (err, response, body)=> response.statusCode.should.equal 200 request.del @fileUrl, (err, response, body)=> + response.statusCode.should.equal 204 request.get newFileUrl, (err, response, body)=> body.should.equal @constantFileContent done() From 3f18f6a7142fbb023c9683072aab2f4140172d70 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 22 Dec 2014 12:58:11 +0000 Subject: [PATCH 085/555] update unit tests - FileHandler now calls deleteDirectory on cache directory --- services/filestore/test/unit/coffee/FileHandlerTests.coffee | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/services/filestore/test/unit/coffee/FileHandlerTests.coffee b/services/filestore/test/unit/coffee/FileHandlerTests.coffee index b00f23af0e..2a2bf6e17d 100644 --- a/services/filestore/test/unit/coffee/FileHandlerTests.coffee +++ b/services/filestore/test/unit/coffee/FileHandlerTests.coffee @@ -69,6 +69,7 @@ describe "FileHandler", -> beforeEach -> @keyBuilder.getConvertedFolderKey.returns(@stubbedConvetedKey) @PersistorManager.deleteFile.callsArgWith(2) + @PersistorManager.deleteDirectory.callsArgWith(2) it "should tell the filestore manager to delete the file", (done)-> @handler.deleteFile @bucket, @key, => @@ -77,7 +78,7 @@ describe "FileHandler", -> it "should tell the filestore manager to delete the cached foler", (done)-> @handler.deleteFile @bucket, @key, => - @PersistorManager.deleteFile.calledWith(@bucket, @stubbedConvetedKey).should.equal true + @PersistorManager.deleteDirectory.calledWith(@bucket, @stubbedConvetedKey).should.equal true done() describe "getFile", -> From efd603584936147cf1f6a5ce6f8db83a69849164 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 22 Dec 2014 13:01:39 +0000 Subject: [PATCH 086/555] test against rimraf module when deleting directory recursively --- .../test/unit/coffee/FSPersistorManagerTests.coffee | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee b/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee index bf5f08ea9d..b69b17c1aa 100644 --- a/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee @@ -6,6 +6,7 @@ expect = chai.expect modulePath = "../../../app/js/FSPersistorManager.js" SandboxedModule = require('sandboxed-module') fs = require("fs") +response = require("response") describe "FSPersistorManagerTests", -> @@ -17,6 +18,7 @@ describe "FSPersistorManagerTests", -> unlink:sinon.stub() rmdir:sinon.stub() exists:sinon.stub() + @Rimraf = sinon.stub() @LocalFileWriter = writeStream: sinon.stub() @requires = @@ -25,6 +27,8 @@ describe "FSPersistorManagerTests", -> "logger-sharelatex": log:-> err:-> + "response":response + "rimraf":@Rimraf @location = "/tmp" @name1 = "530f2407e7ef165704000007/530f838b46d9a9e859000008" @name1Filtered ="530f2407e7ef165704000007_530f838b46d9a9e859000008" @@ -115,11 +119,11 @@ describe "FSPersistorManagerTests", -> describe "deleteDirectory", -> beforeEach -> - @Fs.rmdir.callsArgWith(1,@error) + @Rimraf.callsArgWith(1,@error) - it "Should call rmdir with correct options", (done) -> + it "Should call rmdir(rimraf) with correct options", (done) -> @FSPersistorManager.deleteDirectory @location, @name1, (err) => - @Fs.rmdir.calledWith("#{@location}/#{@name1}").should.equal.true + @Rimraf.calledWith("#{@location}/#{@name1}").should.equal.true done() it "Should propogate the error", (done) -> From 8b9213d09bd5d2a34bce23e355990cfaeeaa3b17 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 22 Dec 2014 13:07:03 +0000 Subject: [PATCH 087/555] fix getFileStream unit test as callback no longer fires immediately success callback is now called only when file is readable --- .../filestore/test/unit/coffee/FSPersistorManagerTests.coffee | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee b/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee index b69b17c1aa..786f6be323 100644 --- a/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee @@ -74,8 +74,8 @@ describe "FSPersistorManagerTests", -> on:-> ) @FSPersistorManager.getFileStream @location, @name1, (err,res)=> - @Fs.createReadStream.calledWith("#{@location}/#{@name1Filtered}").should.equal.true - done() + @Fs.createReadStream.calledWith("#{@location}/#{@name1Filtered}").should.equal.true + done() describe "copyFile", -> beforeEach -> From d4c281e22c9df2f59a8e6ec77e2f997647532e53 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 22 Dec 2014 13:10:40 +0000 Subject: [PATCH 088/555] add acceptance tests to travis.yml --- services/filestore/.travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/services/filestore/.travis.yml b/services/filestore/.travis.yml index 29f5884d60..ee2f36ad1c 100644 --- a/services/filestore/.travis.yml +++ b/services/filestore/.travis.yml @@ -12,6 +12,7 @@ install: script: - grunt test:unit + - grunt test:acceptance services: - redis-server From ad379ec1034bd8636b8333935634f6e35fcfffa1 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 22 Dec 2014 13:13:36 +0000 Subject: [PATCH 089/555] add grunt target for test:acceptance --- services/filestore/Gruntfile.coffee | 1 + 1 file changed, 1 insertion(+) diff --git a/services/filestore/Gruntfile.coffee b/services/filestore/Gruntfile.coffee index aa40cf7460..d2d066b543 100644 --- a/services/filestore/Gruntfile.coffee +++ b/services/filestore/Gruntfile.coffee @@ -76,6 +76,7 @@ module.exports = (grunt) -> grunt.registerTask "test:unit", ["coffee", "mochaTest:unit"] grunt.registerTask "test:acceptence", ["coffee", "mochaTest:acceptence"] + grunt.registerTask "test:acceptance", ["test:acceptence"] grunt.registerTask "ci", "test:unit" grunt.registerTask 'default', ['coffee', 'concurrent'] From f4ac7d422b8586a1b80599ce95888f2f758c4879 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 22 Dec 2014 13:41:07 +0000 Subject: [PATCH 090/555] run acceptance tests on travis under forever --- services/filestore/.travis.yml | 3 +++ services/filestore/Gruntfile.coffee | 6 ++++++ services/filestore/package.json | 3 ++- 3 files changed, 11 insertions(+), 1 deletion(-) diff --git a/services/filestore/.travis.yml b/services/filestore/.travis.yml index ee2f36ad1c..6adc08643a 100644 --- a/services/filestore/.travis.yml +++ b/services/filestore/.travis.yml @@ -10,6 +10,9 @@ install: - npm install - grunt install +before_script: + - grunt forever:app:start + script: - grunt test:unit - grunt test:acceptance diff --git a/services/filestore/Gruntfile.coffee b/services/filestore/Gruntfile.coffee index d2d066b543..91ae43bed1 100644 --- a/services/filestore/Gruntfile.coffee +++ b/services/filestore/Gruntfile.coffee @@ -2,6 +2,11 @@ module.exports = (grunt) -> # Project configuration. grunt.initConfig + forever: + app: + options: + index: "app.js" + coffee: server: expand: true, @@ -73,6 +78,7 @@ module.exports = (grunt) -> grunt.loadNpmTasks 'grunt-contrib-clean' grunt.loadNpmTasks 'grunt-concurrent' grunt.loadNpmTasks 'grunt-mocha-test' + grunt.loadNpmTasks 'grunt-forever' grunt.registerTask "test:unit", ["coffee", "mochaTest:unit"] grunt.registerTask "test:acceptence", ["coffee", "mochaTest:acceptence"] diff --git a/services/filestore/package.json b/services/filestore/package.json index 530f95d00b..919adc488f 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -37,6 +37,7 @@ "grunt-contrib-watch": "0.5.3", "grunt-nodemon": "0.2.1", "grunt-contrib-clean": "0.5.0", - "grunt-concurrent": "0.4.2" + "grunt-concurrent": "0.4.2", + "grunt-forever": "~0.4.4" } } From 74fc095913935cd5b84c895d264f4edc7331ddc8 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 23 Dec 2014 16:46:31 +0000 Subject: [PATCH 091/555] close HTTP keep-alive connections when shutting down --- services/filestore/app.coffee | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index bf7a6c2346..1f92d6e943 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -62,6 +62,12 @@ app.use (req, res, next) -> app.get "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.getFile app.post "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.insertFile +app.use (req, res, next) -> + if not appIsOk + # when shutting down, close any HTTP keep-alive connections + res.set 'Connection', 'close' + next() + app.put "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.copyFile app.del "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.deleteFile From 48f62990b4d479053d4c45153a8a2de237bb0595 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 23 Dec 2014 16:48:11 +0000 Subject: [PATCH 092/555] server can exit immediately, does not need a domain exit with an exception as no requests can be served --- services/filestore/app.coffee | 22 +++++++++++++--------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index 1f92d6e943..c17b277bf5 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -106,14 +106,18 @@ app.get "/health_check", (req, res)-> app.get '*', (req, res)-> res.send 404 -serverDomain = domain.create() -serverDomain.run -> - server = require('http').createServer(app) - port = settings.internal.filestore.port or 3009 - host = settings.internal.filestore.host or "localhost" - server.listen port, host, -> - logger.log("filestore store listening on #{host}:#{port}") +server = require('http').createServer(app) +port = settings.internal.filestore.port or 3009 +host = settings.internal.filestore.host or "localhost" -serverDomain.on "error", (err)-> - logger.log err:err, "top level uncaught exception" +beginShutdown = () -> + appIsOk = false + server.close() + logger.log "server will stop accepting connections" +server.on "close", () -> + logger.log "closed all connections" + process.exit 1 + +server.listen port, host, -> + logger.log("filestore store listening on #{host}:#{port}") From d3fa6b4a01855cf897cd6b24ed470e8a0f47cab5 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 23 Dec 2014 16:49:34 +0000 Subject: [PATCH 093/555] catch exceptions within the body of the domain error handler --- services/filestore/app.coffee | 40 ++++++++++++++++++----------------- 1 file changed, 21 insertions(+), 19 deletions(-) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index c17b277bf5..e96b0414e3 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -37,26 +37,28 @@ app.use (req, res, next) -> requestDomain.add req requestDomain.add res requestDomain.on "error", (err)-> - # request a shutdown to prevent memory leaks - appIsOk = false - setTimeout(-> + try + appIsOk = false + # request a shutdown to prevent memory leaks + beginShutdown() if !res.headerSent - res.send(500) - , 3000) - logger = require('logger-sharelatex') - req = - body:req.body - headers:req.headers - url:req.url - key: req.key - statusCode: req.statusCode - err = - message: err.message - stack: err.stack - name: err.name - type: err.type - arguments: err.arguments - logger.err err:err, req:req, res:res, "uncaught exception thrown on request" + res.send(500, "uncaught exception") + logger = require('logger-sharelatex') + req = + body:req.body + headers:req.headers + url:req.url + key: req.key + statusCode: req.statusCode + err = + message: err.message + stack: err.stack + name: err.name + type: err.type + arguments: err.arguments + logger.err err:err, req:req, res:res, "uncaught exception thrown on request" + catch exception + logger.err err: exception, "exception in request domain handler" requestDomain.run next app.get "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.getFile From d5478a8ebb9649b5208ab44ac37d085026281bde Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 23 Dec 2014 16:55:34 +0000 Subject: [PATCH 094/555] make sure shutdown only happens once --- services/filestore/app.coffee | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index e96b0414e3..f2b4c405be 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -113,9 +113,10 @@ port = settings.internal.filestore.port or 3009 host = settings.internal.filestore.host or "localhost" beginShutdown = () -> - appIsOk = false - server.close() - logger.log "server will stop accepting connections" + if appIsOk + appIsOk = false + server.close() + logger.log "server will stop accepting connections" server.on "close", () -> logger.log "closed all connections" From e8e2338c0cafa212782329cffdf48c089063d411 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Mon, 5 Jan 2015 16:56:19 +0000 Subject: [PATCH 095/555] move connection:close middleware ahead of other routes --- services/filestore/app.coffee | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index f2b4c405be..9a1bfc7ef9 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -61,15 +61,15 @@ app.use (req, res, next) -> logger.err err: exception, "exception in request domain handler" requestDomain.run next -app.get "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.getFile -app.post "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.insertFile - app.use (req, res, next) -> if not appIsOk # when shutting down, close any HTTP keep-alive connections res.set 'Connection', 'close' next() +app.get "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.getFile +app.post "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.insertFile + app.put "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.copyFile app.del "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.deleteFile From 02101868e1d9f46bede47b345a9efff3c386dc16 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 7 Jan 2015 10:26:30 +0000 Subject: [PATCH 096/555] put all shutdown logic in beginShutdown, force program to exit after 2 mins --- services/filestore/app.coffee | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index 9a1bfc7ef9..2638787b83 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -103,8 +103,6 @@ app.get "/health_check", (req, res)-> else res.send(503) - - app.get '*', (req, res)-> res.send 404 @@ -115,12 +113,16 @@ host = settings.internal.filestore.host or "localhost" beginShutdown = () -> if appIsOk appIsOk = false - server.close() + # hard-terminate this process if graceful shutdown fails + killTimer = setTimeout () -> + process.exit 1 + , 120*1000 + killTimer.unref?() # prevent timer from keeping process alive + server.close () -> + logger.log "closed all connections" + Metrics.close() + process.disconnect?() logger.log "server will stop accepting connections" -server.on "close", () -> - logger.log "closed all connections" - process.exit 1 - server.listen port, host, -> logger.log("filestore store listening on #{host}:#{port}") From 191d0a49866946911cbcaa38544047241d2a476b Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 7 Jan 2015 10:28:47 +0000 Subject: [PATCH 097/555] handle SIGTERM in filestore --- services/filestore/app.coffee | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index 2638787b83..d76d53a993 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -126,3 +126,7 @@ beginShutdown = () -> server.listen port, host, -> logger.log("filestore store listening on #{host}:#{port}") + +process.on 'SIGTERM', () -> + logger.log("filestore got SIGTERM, shutting down gracefully") + beginShutdown() From 79292b46022e28ca7721e59bfb79635d3a5cfd95 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 7 Jan 2015 10:29:05 +0000 Subject: [PATCH 098/555] fix log message --- services/filestore/app.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index d76d53a993..293f5da8e8 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -125,7 +125,7 @@ beginShutdown = () -> logger.log "server will stop accepting connections" server.listen port, host, -> - logger.log("filestore store listening on #{host}:#{port}") + logger.log("filestore listening on #{host}:#{port}") process.on 'SIGTERM', () -> logger.log("filestore got SIGTERM, shutting down gracefully") From e03ee962167f60fa156f9cfc0941ae2c68f49167 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 7 Jan 2015 10:33:55 +0000 Subject: [PATCH 099/555] added simple clustering master --- services/filestore/Gruntfile.coffee | 4 ++-- services/filestore/cluster.coffee | 9 +++++++++ services/filestore/package.json | 1 + 3 files changed, 12 insertions(+), 2 deletions(-) create mode 100644 services/filestore/cluster.coffee diff --git a/services/filestore/Gruntfile.coffee b/services/filestore/Gruntfile.coffee index aa40cf7460..a506496578 100644 --- a/services/filestore/Gruntfile.coffee +++ b/services/filestore/Gruntfile.coffee @@ -14,7 +14,7 @@ module.exports = (grunt) -> app_server: expand: true, flatten: false, - src: ['app.coffee'], + src: ['app.coffee', 'cluster.coffee'], dest: './', ext: '.js' @@ -36,7 +36,7 @@ module.exports = (grunt) -> watch: server_coffee: - files: ['app/*.coffee','app/**/*.coffee', 'test/unit/coffee/**/*.coffee', 'test/unit/coffee/*.coffee', "app.coffee"] + files: ['app/*.coffee','app/**/*.coffee', 'test/unit/coffee/**/*.coffee', 'test/unit/coffee/*.coffee', "app.coffee", "cluster.coffee"] tasks: ["clean", 'coffee', 'mochaTest'] clean: ["app/js", "test/unit/js", "app.js"] diff --git a/services/filestore/cluster.coffee b/services/filestore/cluster.coffee new file mode 100644 index 0000000000..a0ca60a619 --- /dev/null +++ b/services/filestore/cluster.coffee @@ -0,0 +1,9 @@ +recluster = require "recluster" # https://github.com/doxout/recluster +path = require "path" + +cluster = recluster path.join(__dirname, 'app.js'), { + workers: 2, + backoff: 0, + readyWhen: "listening" +} +cluster.run() diff --git a/services/filestore/package.json b/services/filestore/package.json index be103715f7..3586041555 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -20,6 +20,7 @@ "node-transloadit": "0.0.4", "node-uuid": "~1.4.1", "pngcrush": "0.0.3", + "recluster": "^0.3.7", "request": "2.14.0", "settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#master", "stream-buffers": "~0.2.5", From 4b543724661fab994b9305dbeac5cfa53b671f7b Mon Sep 17 00:00:00 2001 From: Xavier Trochu Date: Fri, 16 Jan 2015 11:04:15 +0100 Subject: [PATCH 100/555] miscellaneous cleanup _ Remove some references to scribtex _ Fix some issue when templates are unavailable --- services/filestore/.gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/services/filestore/.gitignore b/services/filestore/.gitignore index 7d881c3ca7..365bc9160d 100644 --- a/services/filestore/.gitignore +++ b/services/filestore/.gitignore @@ -40,6 +40,7 @@ test/IntergrationTests/js/* data/*/* app.js +cluster.js app/js/* test/IntergrationTests/js/* test/UnitTests/js/* From eb40dbbe964e64064ff58796bb3f9bab7d1f3610 Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 10 Feb 2015 13:19:04 +0000 Subject: [PATCH 101/555] Release version 0.1.2 --- services/filestore/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/package.json b/services/filestore/package.json index bb3cae64e9..900f7fa6fc 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -1,6 +1,6 @@ { "name": "filestore-sharelatex", - "version": "0.1.0", + "version": "0.1.2", "description": "An API for CRUD operations on binary files stored in S3", "repository": { "type": "git", From 45689fd2b8f632f4a7353078663df26538333d4f Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 26 Feb 2015 11:32:05 +0000 Subject: [PATCH 102/555] Only call getFileStream callback once --- services/filestore/app/coffee/FSPersistorManager.coffee | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/services/filestore/app/coffee/FSPersistorManager.coffee b/services/filestore/app/coffee/FSPersistorManager.coffee index 4d6d79b1a7..c2f4564987 100644 --- a/services/filestore/app/coffee/FSPersistorManager.coffee +++ b/services/filestore/app/coffee/FSPersistorManager.coffee @@ -27,7 +27,10 @@ module.exports = return callback err @sendFile location, target, fsPath, callback - getFileStream: (location, name, callback = (err, res)->)-> + getFileStream: (location, name, _callback = (err, res)->) -> + callback = (args...) -> + _callback(args...) + _callback = () -> filteredName = filterName name logger.log location:location, name:filteredName, "getting file" sourceStream = fs.createReadStream "#{location}/#{filteredName}" @@ -38,6 +41,8 @@ module.exports = else callback err sourceStream.on 'readable', () -> + # This can be called multiple times, but the callback wrapper + # ensures the callback is only called once callback null, sourceStream From eb89337ada3568a5d559d9714abc4f18858d774a Mon Sep 17 00:00:00 2001 From: James Allen Date: Thu, 26 Feb 2015 11:32:21 +0000 Subject: [PATCH 103/555] Release version 0.1.3 --- services/filestore/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/package.json b/services/filestore/package.json index 900f7fa6fc..e8f0bffe52 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -1,6 +1,6 @@ { "name": "filestore-sharelatex", - "version": "0.1.2", + "version": "0.1.3", "description": "An API for CRUD operations on binary files stored in S3", "repository": { "type": "git", From 33d8974d02598612638b1556e464e3764afb2dc3 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 12 Mar 2015 17:09:27 +0000 Subject: [PATCH 104/555] kill process group on timeout of convert commands --- .../filestore/app/coffee/FileConverter.coffee | 9 ++--- services/filestore/app/coffee/SafeExec.coffee | 40 +++++++++++++++++++ 2 files changed, 44 insertions(+), 5 deletions(-) create mode 100644 services/filestore/app/coffee/SafeExec.coffee diff --git a/services/filestore/app/coffee/FileConverter.coffee b/services/filestore/app/coffee/FileConverter.coffee index 0d6eb0d9f3..d791e4c2d0 100644 --- a/services/filestore/app/coffee/FileConverter.coffee +++ b/services/filestore/app/coffee/FileConverter.coffee @@ -1,7 +1,7 @@ _ = require("underscore") metrics = require("metrics-sharelatex") logger = require("logger-sharelatex") -exec = require('child_process').exec +safe_exec = require('./SafeExec') approvedFormats = ["png"] fourtySeconds = 40 * 1000 @@ -23,8 +23,7 @@ module.exports = return callback err width = "600x" args = "nice convert -define pdf:fit-page=#{width} -flatten -density 300 #{sourcePath} #{destPath}" - console.log args - exec args, childProcessOpts, (err, stdout, stderr)-> + safe_exec args, childProcessOpts, (err, stdout, stderr)-> timer.done() if err? logger.err err:err, stderr:stderr, sourcePath:sourcePath, requestedFormat:requestedFormat, destPath:destPath, "something went wrong converting file" @@ -38,7 +37,7 @@ module.exports = sourcePath = "#{sourcePath}[0]" width = "260x" args = "nice convert -flatten -background white -density 300 -define pdf:fit-page=#{width} #{sourcePath} -resize #{width} #{destPath}" - exec args, childProcessOpts, (err, stdout, stderr)-> + safe_exec args, childProcessOpts, (err, stdout, stderr)-> if err? logger.err err:err, stderr:stderr, sourcePath:sourcePath, "something went wrong converting file to preview" else @@ -51,7 +50,7 @@ module.exports = sourcePath = "#{sourcePath}[0]" width = "548x" args = "nice convert -flatten -background white -density 300 -define pdf:fit-page=#{width} #{sourcePath} -resize #{width} #{destPath}" - exec args, childProcessOpts, (err, stdout, stderr)-> + safe_exec args, childProcessOpts, (err, stdout, stderr)-> if err? logger.err err:err, stderr:stderr, sourcePath:sourcePath, destPath:destPath, "something went wrong converting file to preview" else diff --git a/services/filestore/app/coffee/SafeExec.coffee b/services/filestore/app/coffee/SafeExec.coffee new file mode 100644 index 0000000000..65da163538 --- /dev/null +++ b/services/filestore/app/coffee/SafeExec.coffee @@ -0,0 +1,40 @@ +_ = require("underscore") +logger = require("logger-sharelatex") +child_process = require('child_process') + +# execute a command in the same way as 'exec' but with a timeout that +# kills all child processes +# +# we spawn the command with 'detached:true' to make a new process +# group, then we can kill everything in that process group. + +module.exports = (command, options, callback = (err, stdout, stderr) ->) -> + [cmd, args...] = command.split(' ') + + child = child_process.spawn cmd, args, {detached:true} + stdout = "" + stderr = "" + + cleanup = _.once (err) -> + clearTimeout killTimer if killTimer? + callback err, stdout, stderr + + if options.timeout? + killTimer = setTimeout () -> + try + process.kill -child.pid, options.killSignal || "SIGTERM" + catch error + logger.log process: child.pid, kill_error: error, "error killing process" + , options.timeout + + child.on 'exit', (code, signal) -> + cleanup signal + + child.on 'error', (err) -> + cleanup err + + child.stdout.on 'data', (chunk) -> + stdout += chunk + + child.stderr.on 'data', (chunk) -> + stderr += chunk From 0e5abe1ff8d72d2b9482953d34650cdbe58bf42a Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 13 Mar 2015 09:31:43 +0000 Subject: [PATCH 105/555] update tests to use safe_exec --- .../unit/coffee/FileConverterTests.coffee | 23 +++++++++---------- 1 file changed, 11 insertions(+), 12 deletions(-) diff --git a/services/filestore/test/unit/coffee/FileConverterTests.coffee b/services/filestore/test/unit/coffee/FileConverterTests.coffee index a1305684d1..f8a8add22f 100644 --- a/services/filestore/test/unit/coffee/FileConverterTests.coffee +++ b/services/filestore/test/unit/coffee/FileConverterTests.coffee @@ -10,10 +10,9 @@ describe "FileConverter", -> beforeEach -> - @child_process = - exec : sinon.stub() + @safe_exec = sinon.stub() @converter = SandboxedModule.require modulePath, requires: - 'child_process': @child_process + "./SafeExec": @safe_exec "logger-sharelatex": log:-> err:-> @@ -25,43 +24,43 @@ describe "FileConverter", -> describe "convert", -> it "should convert the source to the requested format", (done)-> - @child_process.exec.callsArgWith(2) + @safe_exec.callsArgWith(2) @converter.convert @sourcePath, @format, (err)=> - args = @child_process.exec.args[0][0] + args = @safe_exec.args[0][0] args.indexOf(@sourcePath).should.not.equal -1 args.indexOf(@format).should.not.equal -1 done() it "should return the dest path", (done)-> - @child_process.exec.callsArgWith(2) + @safe_exec.callsArgWith(2) @converter.convert @sourcePath, @format, (err, destPath)=> destPath.should.equal "#{@sourcePath}.#{@format}" done() it "should return the error from convert", (done)-> - @child_process.exec.callsArgWith(2, @error) + @safe_exec.callsArgWith(2, @error) @converter.convert @sourcePath, @format, (err)=> err.should.equal @error done() it "should not accapt an non aproved format", (done)-> - @child_process.exec.callsArgWith(2) + @safe_exec.callsArgWith(2) @converter.convert @sourcePath, "ahhhhh", (err)=> expect(err).to.exist done() describe "thumbnail", -> it "should call converter resize with args", (done)-> - @child_process.exec.callsArgWith(2) + @safe_exec.callsArgWith(2) @converter.thumbnail @sourcePath, (err)=> - args = @child_process.exec.args[0][0] + args = @safe_exec.args[0][0] args.indexOf(@sourcePath).should.not.equal -1 done() describe "preview", -> it "should call converter resize with args", (done)-> - @child_process.exec.callsArgWith(2) + @safe_exec.callsArgWith(2) @converter.preview @sourcePath, (err)=> - args = @child_process.exec.args[0][0] + args = @safe_exec.args[0][0] args.indexOf(@sourcePath).should.not.equal -1 done() From 63ee4d1e7d73d2e6713c9f9a1ba80bb3620420b8 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 13 Mar 2015 10:10:21 +0000 Subject: [PATCH 106/555] use close event instead of exit to capture stdout/stderr correctly --- services/filestore/app/coffee/SafeExec.coffee | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/services/filestore/app/coffee/SafeExec.coffee b/services/filestore/app/coffee/SafeExec.coffee index 65da163538..0244f14783 100644 --- a/services/filestore/app/coffee/SafeExec.coffee +++ b/services/filestore/app/coffee/SafeExec.coffee @@ -27,8 +27,9 @@ module.exports = (command, options, callback = (err, stdout, stderr) ->) -> logger.log process: child.pid, kill_error: error, "error killing process" , options.timeout - child.on 'exit', (code, signal) -> - cleanup signal + child.on 'close', (code, signal) -> + err = if code then new Error("exit status #{code}") else signal + cleanup err child.on 'error', (err) -> cleanup err From 9aaef729ad774faf273c07009a87fb7741f651f4 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 13 Mar 2015 10:10:51 +0000 Subject: [PATCH 107/555] cleanup and comments --- services/filestore/app/coffee/FileConverter.coffee | 2 +- services/filestore/app/coffee/SafeExec.coffee | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/services/filestore/app/coffee/FileConverter.coffee b/services/filestore/app/coffee/FileConverter.coffee index d791e4c2d0..bf38833017 100644 --- a/services/filestore/app/coffee/FileConverter.coffee +++ b/services/filestore/app/coffee/FileConverter.coffee @@ -1,7 +1,7 @@ _ = require("underscore") metrics = require("metrics-sharelatex") logger = require("logger-sharelatex") -safe_exec = require('./SafeExec') +safe_exec = require("./SafeExec") approvedFormats = ["png"] fourtySeconds = 40 * 1000 diff --git a/services/filestore/app/coffee/SafeExec.coffee b/services/filestore/app/coffee/SafeExec.coffee index 0244f14783..217aab4748 100644 --- a/services/filestore/app/coffee/SafeExec.coffee +++ b/services/filestore/app/coffee/SafeExec.coffee @@ -9,6 +9,7 @@ child_process = require('child_process') # group, then we can kill everything in that process group. module.exports = (command, options, callback = (err, stdout, stderr) ->) -> + # options are {timeout: number-of-milliseconds, killSignal: signal-name} [cmd, args...] = command.split(' ') child = child_process.spawn cmd, args, {detached:true} @@ -22,6 +23,7 @@ module.exports = (command, options, callback = (err, stdout, stderr) ->) -> if options.timeout? killTimer = setTimeout () -> try + # use negative process id to kill process group process.kill -child.pid, options.killSignal || "SIGTERM" catch error logger.log process: child.pid, kill_error: error, "error killing process" From a7b9376919af9b943ebd7c22048bdc13b3959a9e Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 13 Mar 2015 10:15:37 +0000 Subject: [PATCH 108/555] use SIGTERM instead of SIGKILL to allow process to shut down cleanly --- services/filestore/app/coffee/FileConverter.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/app/coffee/FileConverter.coffee b/services/filestore/app/coffee/FileConverter.coffee index bf38833017..c142d3a5e0 100644 --- a/services/filestore/app/coffee/FileConverter.coffee +++ b/services/filestore/app/coffee/FileConverter.coffee @@ -7,7 +7,7 @@ approvedFormats = ["png"] fourtySeconds = 40 * 1000 childProcessOpts = - killSignal: "SIGKILL" + killSignal: "SIGTERM" timeout: fourtySeconds From 143d44e54bfb49a7b034c033e24c21596b70291c Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 13 Mar 2015 10:15:53 +0000 Subject: [PATCH 109/555] add tests for SafeExec module --- .../test/unit/coffee/SafeExec.coffee | 42 +++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 services/filestore/test/unit/coffee/SafeExec.coffee diff --git a/services/filestore/test/unit/coffee/SafeExec.coffee b/services/filestore/test/unit/coffee/SafeExec.coffee new file mode 100644 index 0000000000..bdfcec3a70 --- /dev/null +++ b/services/filestore/test/unit/coffee/SafeExec.coffee @@ -0,0 +1,42 @@ +assert = require("chai").assert +sinon = require('sinon') +chai = require('chai') +should = chai.should() +expect = chai.expect +modulePath = "../../../app/js/SafeExec.js" +SandboxedModule = require('sandboxed-module') + +describe "SafeExec", -> + + beforeEach -> + + @safe_exec = SandboxedModule.require modulePath, requires: + "logger-sharelatex": + log:-> + err:-> + @options = {timeout: 10*1000, killSignal: "SIGTERM" } + + describe "safe_exec", -> + + it "should execute a valid command", (done) -> + @safe_exec "/bin/echo hello", @options, (err, stdout, stderr) => + stdout.should.equal "hello\n" + should.not.exist(err) + done() + + it "should execute a command with non-zero exit status", (done) -> + @safe_exec "/bin/false", @options, (err, stdout, stderr) => + stdout.should.equal "" + stderr.should.equal "" + err.message.should.equal "exit status 1" + done() + + it "should handle an invalid command", (done) -> + @safe_exec "/bin/foobar", @options, (err, stdout, stderr) => + err.code.should.equal "ENOENT" + done() + + it "should handle a command that runs too long", (done) -> + @safe_exec "/bin/sleep 10", {timeout: 500, killSignal: "SIGTERM"}, (err, stdout, stderr) => + err.should.equal "SIGTERM" + done() From a370739722b7129b26f2c58d430c14217b3b6c8f Mon Sep 17 00:00:00 2001 From: James Allen Date: Fri, 20 Mar 2015 14:20:13 +0000 Subject: [PATCH 110/555] Release version 0.1.4 --- services/filestore/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/package.json b/services/filestore/package.json index e8f0bffe52..c2ba0e4952 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -1,6 +1,6 @@ { "name": "filestore-sharelatex", - "version": "0.1.3", + "version": "0.1.4", "description": "An API for CRUD operations on binary files stored in S3", "repository": { "type": "git", From 801b5653e442c0abf63cf8d51bbd02b103c3aa73 Mon Sep 17 00:00:00 2001 From: James Allen Date: Fri, 20 Mar 2015 14:27:25 +0000 Subject: [PATCH 111/555] Fix unit test when false is not at /bin/false --- services/filestore/test/unit/coffee/SafeExec.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/test/unit/coffee/SafeExec.coffee b/services/filestore/test/unit/coffee/SafeExec.coffee index bdfcec3a70..b63851aa57 100644 --- a/services/filestore/test/unit/coffee/SafeExec.coffee +++ b/services/filestore/test/unit/coffee/SafeExec.coffee @@ -25,7 +25,7 @@ describe "SafeExec", -> done() it "should execute a command with non-zero exit status", (done) -> - @safe_exec "/bin/false", @options, (err, stdout, stderr) => + @safe_exec "/usr/bin/env false", @options, (err, stdout, stderr) => stdout.should.equal "" stderr.should.equal "" err.message.should.equal "exit status 1" From 07e53bbb2fef5eb7e92a717e50490426583ca683 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 31 Mar 2015 10:35:33 +0100 Subject: [PATCH 112/555] handle error when listing bucket --- services/filestore/app/coffee/S3PersistorManager.coffee | 3 +++ 1 file changed, 3 insertions(+) diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index ef1824491e..d5cb06074e 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -104,6 +104,9 @@ module.exports = secret: settings.filestore.s3.secret bucket: bucketName s3Client.list prefix:key, (err, data)-> + if err? + logger.err err:err, bucketName:bucketName, key:key, "something went wrong listing prefix in aws" + return callback(err) keys = _.map data.Contents, (entry)-> return entry.Key s3Client.deleteMultiple keys, callback From b3432f016cdeb7fe0a1010385d8bd799e1610913 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Thu, 9 Apr 2015 17:14:47 +0100 Subject: [PATCH 113/555] allow proxying of template images though filestore --- services/filestore/app.coffee | 1 + services/filestore/app/coffee/FileConverter.coffee | 2 +- services/filestore/app/coffee/KeyBuilder.coffee | 4 ++-- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index 293f5da8e8..f42cf4f1b4 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -74,6 +74,7 @@ app.put "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileContro app.del "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.deleteFile app.get "/template/:template_id/v/:version/:format", keyBuilder.templateFileKey, fileController.getFile +app.get "/template/:template_id/v/:version/:format/:sub_type", keyBuilder.templateFileKey, fileController.getFile app.post "/template/:template_id/v/:version/:format", keyBuilder.templateFileKey, fileController.insertFile app.get "/heapdump", (req, res)-> diff --git a/services/filestore/app/coffee/FileConverter.coffee b/services/filestore/app/coffee/FileConverter.coffee index 0d6eb0d9f3..da10711f99 100644 --- a/services/filestore/app/coffee/FileConverter.coffee +++ b/services/filestore/app/coffee/FileConverter.coffee @@ -40,7 +40,7 @@ module.exports = args = "nice convert -flatten -background white -density 300 -define pdf:fit-page=#{width} #{sourcePath} -resize #{width} #{destPath}" exec args, childProcessOpts, (err, stdout, stderr)-> if err? - logger.err err:err, stderr:stderr, sourcePath:sourcePath, "something went wrong converting file to preview" + logger.err err:err, stderr:stderr, sourcePath:sourcePath, "something went wrong converting file to thumbnail" else logger.log sourcePath:sourcePath, destPath:destPath, "finished thumbnailing file" callback(err, destPath) diff --git a/services/filestore/app/coffee/KeyBuilder.coffee b/services/filestore/app/coffee/KeyBuilder.coffee index 71f6c05785..03a65a8337 100644 --- a/services/filestore/app/coffee/KeyBuilder.coffee +++ b/services/filestore/app/coffee/KeyBuilder.coffee @@ -24,8 +24,8 @@ module.exports = next() templateFileKey: (req, res, next)-> - {template_id, format, version} = req.params - req.key = "#{template_id}/v/#{version}/#{format}" + {template_id, format, version, sub_type} = req.params + req.key = "#{template_id}/v/#{version}/#{format}/#{sub_type}" req.bucket = settings.filestore.stores.template_files req.version = version opts = req.query From 7c2e4b00ed1de139c24fd6b9c49e02ad15e67d6e Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Mon, 13 Apr 2015 12:01:49 +0100 Subject: [PATCH 114/555] no file subtype can be an issue, added a check on it in the keybuilder --- services/filestore/app/coffee/KeyBuilder.coffee | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/services/filestore/app/coffee/KeyBuilder.coffee b/services/filestore/app/coffee/KeyBuilder.coffee index 03a65a8337..9b2a101e6b 100644 --- a/services/filestore/app/coffee/KeyBuilder.coffee +++ b/services/filestore/app/coffee/KeyBuilder.coffee @@ -25,7 +25,9 @@ module.exports = templateFileKey: (req, res, next)-> {template_id, format, version, sub_type} = req.params - req.key = "#{template_id}/v/#{version}/#{format}/#{sub_type}" + req.key = "#{template_id}/v/#{version}/#{format}" + if sub_type? + req.key = "#{req.key}/#{sub_type}" req.bucket = settings.filestore.stores.template_files req.version = version opts = req.query From 33e654accf91dba9481ecffc00faa1cd7e3cd05e Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Wed, 22 Apr 2015 18:27:34 +0100 Subject: [PATCH 115/555] get the logging locally to work with bunyan --- services/filestore/Gruntfile.coffee | 9 +++++++-- services/filestore/package.json | 3 +++ 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/services/filestore/Gruntfile.coffee b/services/filestore/Gruntfile.coffee index c91476c201..ebf94e2fc9 100644 --- a/services/filestore/Gruntfile.coffee +++ b/services/filestore/Gruntfile.coffee @@ -52,7 +52,10 @@ module.exports = (grunt) -> options: ext:"*.coffee" - + execute: + app: + src: "app.js" + concurrent: dev: tasks: ['nodemon', 'watch'] @@ -79,13 +82,15 @@ module.exports = (grunt) -> grunt.loadNpmTasks 'grunt-concurrent' grunt.loadNpmTasks 'grunt-mocha-test' grunt.loadNpmTasks 'grunt-forever' + grunt.loadNpmTasks 'grunt-bunyan' + grunt.loadNpmTasks 'grunt-execute' grunt.registerTask "test:unit", ["coffee", "mochaTest:unit"] grunt.registerTask "test:acceptence", ["coffee", "mochaTest:acceptence"] grunt.registerTask "test:acceptance", ["test:acceptence"] grunt.registerTask "ci", "test:unit" - grunt.registerTask 'default', ['coffee', 'concurrent'] + grunt.registerTask 'default', ['coffee', 'bunyan','execute'] grunt.registerTask "compile", "coffee" grunt.registerTask "install", "compile" diff --git a/services/filestore/package.json b/services/filestore/package.json index c2ba0e4952..dd84289f30 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -8,8 +8,11 @@ }, "dependencies": { "async": "~0.2.10", + "bunyan": "^1.3.5", "coffee-script": "~1.7.1", "express": "~3.4.8", + "grunt-bunyan": "^0.5.0", + "grunt-execute": "^0.2.2", "grunt-mocha-test": "~0.8.2", "heapdump": "^0.3.2", "knox": "~0.9.1", From d5f98c9800945fd2adefb0b8d2564edd26abd331 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Thu, 30 Apr 2015 15:04:55 +0100 Subject: [PATCH 116/555] make startup message consistent --- services/filestore/app.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index f42cf4f1b4..aaacf704b2 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -126,7 +126,7 @@ beginShutdown = () -> logger.log "server will stop accepting connections" server.listen port, host, -> - logger.log("filestore listening on #{host}:#{port}") + logger.info "Filestore starting up, listening on #{host}:#{port}" process.on 'SIGTERM', () -> logger.log("filestore got SIGTERM, shutting down gracefully") From 7e9f8b885b90f396904657f738d9fe5b0c1b56bc Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 8 May 2015 15:15:48 +0100 Subject: [PATCH 117/555] add support for public files --- services/filestore/app.coffee | 8 ++++++++ services/filestore/app/coffee/KeyBuilder.coffee | 9 +++++++++ services/filestore/config/settings.defaults.coffee | 1 + 3 files changed, 18 insertions(+) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index aaacf704b2..11c323667d 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -77,6 +77,14 @@ app.get "/template/:template_id/v/:version/:format", keyBuilder.templateFileKey app.get "/template/:template_id/v/:version/:format/:sub_type", keyBuilder.templateFileKey, fileController.getFile app.post "/template/:template_id/v/:version/:format", keyBuilder.templateFileKey, fileController.insertFile + +app.get "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.getFile +app.post "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.insertFile + +app.put "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.copyFile +app.del "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.deleteFile + + app.get "/heapdump", (req, res)-> require('heapdump').writeSnapshot '/tmp/' + Date.now() + '.filestore.heapsnapshot', (err, filename)-> res.send filename diff --git a/services/filestore/app/coffee/KeyBuilder.coffee b/services/filestore/app/coffee/KeyBuilder.coffee index 9b2a101e6b..0b5e41525f 100644 --- a/services/filestore/app/coffee/KeyBuilder.coffee +++ b/services/filestore/app/coffee/KeyBuilder.coffee @@ -23,6 +23,15 @@ module.exports = req.bucket = settings.filestore.stores.user_files next() + publicFileKey: (req, res, next)-> + {project_id, public_file_id} = req.params + if not settings.filestore.stores.public_files? + res.status(501).send("public files not available") + else + req.key = "#{project_id}/#{public_file_id}" + req.bucket = settings.filestore.stores.public_files + next() + templateFileKey: (req, res, next)-> {template_id, format, version, sub_type} = req.params req.key = "#{template_id}/v/#{version}/#{format}" diff --git a/services/filestore/config/settings.defaults.coffee b/services/filestore/config/settings.defaults.coffee index dccefea525..f84928598e 100644 --- a/services/filestore/config/settings.defaults.coffee +++ b/services/filestore/config/settings.defaults.coffee @@ -21,6 +21,7 @@ module.exports = # Must contain full path, e.g. "/var/lib/sharelatex/data". # This path must exist, not be tmpfs and be writable to by the user sharelatex is run as. user_files: Path.resolve(__dirname + "/../user_files") + public_files: Path.resolve(__dirname + "/../public_files") template_files: Path.resolve(__dirname + "/../template_files") # if you are using S3, then fill in your S3 details below # s3: From 8ebbacee5ac806f93d8a4ff50be13808221321dd Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 8 May 2015 15:19:45 +0100 Subject: [PATCH 118/555] move pipe after error handler setup --- services/filestore/app/coffee/LocalFileWriter.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/app/coffee/LocalFileWriter.coffee b/services/filestore/app/coffee/LocalFileWriter.coffee index 12d505b05b..2ecb3885df 100644 --- a/services/filestore/app/coffee/LocalFileWriter.coffee +++ b/services/filestore/app/coffee/LocalFileWriter.coffee @@ -14,7 +14,6 @@ module.exports = fsPath = @_getPath(key) logger.log fsPath:fsPath, "writing file locally" writeStream = fs.createWriteStream(fsPath) - stream.pipe writeStream writeStream.on "finish", -> timer.done() logger.log fsPath:fsPath, "finished writing file locally" @@ -25,6 +24,7 @@ module.exports = stream.on "error", (err)-> logger.log err:err, fsPath:fsPath, "problem writing file locally, with read stream" callback err + stream.pipe writeStream deleteFile: (fsPath, callback)-> logger.log fsPath:fsPath, "removing local temp file" From 9b844220929f338124914f18f3c02aa5e118f1ff Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 23 Jun 2015 13:41:14 +0100 Subject: [PATCH 119/555] added event loop monitor --- services/filestore/app.coffee | 1 + 1 file changed, 1 insertion(+) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index 11c323667d..9c744e75ab 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -13,6 +13,7 @@ streamBuffers = require("stream-buffers") Metrics = require "metrics-sharelatex" Metrics.initialize("filestore") Metrics.open_sockets.monitor(logger) +Metrics.event_loop?.monitor(logger) app.configure -> app.use express.bodyParser() From 566e69c6cce7769b21845cc37ff63eb9ddc467e7 Mon Sep 17 00:00:00 2001 From: Xavier Trochu Date: Tue, 21 Jul 2015 10:00:59 +0200 Subject: [PATCH 120/555] Make S3 Key/Secret pair optional. When using Instance Role in AWS, the credentials are unneeded. So make them optional to use. --- .../app/coffee/S3PersistorManager.coffee | 36 +++++++++---------- 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index d5cb06074e..c0fab76747 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -16,8 +16,8 @@ thirtySeconds = 30 * 1000 buildDefaultOptions = (bucketName, method, key)-> return { aws: - key: settings.filestore.s3.key - secret: settings.filestore.s3.secret + key: settings.filestore.s3?.key + secret: settings.filestore.s3?.secret bucket: bucketName method: method timeout: thirtySeconds @@ -28,10 +28,10 @@ module.exports = sendFile: (bucketName, key, fsPath, callback)-> s3Client = knox.createClient - key: settings.filestore.s3.key - secret: settings.filestore.s3.secret + key: settings.filestore.s3?.key + secret: settings.filestore.s3?.secret bucket: bucketName - putEventEmiter = s3Client.putFile fsPath, key, (err, res)-> + putEventEmitter = s3Client.putFile fsPath, key, (err, res)-> if err? logger.err err:err, bucketName:bucketName, key:key, fsPath:fsPath,"something went wrong uploading file to s3" return callback(err) @@ -44,8 +44,8 @@ module.exports = LocalFileWriter.deleteFile fsPath, (err)-> logger.log res:res, bucketName:bucketName, key:key, fsPath:fsPath,"file uploaded to s3" callback(err) - putEventEmiter.on "error", (err)-> - logger.err err:err, bucketName:bucketName, key:key, fsPath:fsPath, "error emmited on put of file" + putEventEmitter.on "error", (err)-> + logger.err err:err, bucketName:bucketName, key:key, fsPath:fsPath, "error received uploading file to s3" callback err sendStream: (bucketName, key, readStream, callback)-> @@ -62,8 +62,8 @@ module.exports = callback = _.once callback logger.log bucketName:bucketName, key:key, "getting file from s3" s3Client = knox.createClient - key: settings.filestore.s3.key - secret: settings.filestore.s3.secret + key: settings.filestore.s3?.key + secret: settings.filestore.s3?.secret bucket: bucketName s3Stream = s3Client.get(key) s3Stream.end() @@ -76,12 +76,12 @@ module.exports = copyFile: (bucketName, sourceKey, destKey, callback)-> logger.log bucketName:bucketName, sourceKey:sourceKey, destKey:destKey, "copying file in s3" s3Client = knox.createClient - key: settings.filestore.s3.key - secret: settings.filestore.s3.secret + key: settings.filestore.s3?.key + secret: settings.filestore.s3?.secret bucket: bucketName s3Client.copyFile sourceKey, destKey, (err)-> if err? - logger.err err:err, bucketName:bucketName, sourceKey:sourceKey, destKey:destKey, "something went wrong copying file in aws" + logger.err err:err, bucketName:bucketName, sourceKey:sourceKey, destKey:destKey, "something went wrong copying file in s3" callback(err) deleteFile: (bucketName, key, callback)-> @@ -89,7 +89,7 @@ module.exports = options = buildDefaultOptions(bucketName, "delete", key) request options, (err, res)-> if err? - logger.err err:err, res:res, bucketName:bucketName, key:key, "something went wrong deleting file in aws" + logger.err err:err, res:res, bucketName:bucketName, key:key, "something went wrong deleting file in s3" callback(err) deleteDirectory: (bucketName, key, _callback)-> @@ -100,12 +100,12 @@ module.exports = logger.log key: key, bucketName: bucketName, "deleting directory" s3Client = knox.createClient - key: settings.filestore.s3.key - secret: settings.filestore.s3.secret + key: settings.filestore.s3?.key + secret: settings.filestore.s3?.secret bucket: bucketName s3Client.list prefix:key, (err, data)-> if err? - logger.err err:err, bucketName:bucketName, key:key, "something went wrong listing prefix in aws" + logger.err err:err, bucketName:bucketName, key:key, "something went wrong listing prefix in s3" return callback(err) keys = _.map data.Contents, (entry)-> return entry.Key @@ -116,13 +116,13 @@ module.exports = options = buildDefaultOptions(bucketName, "head", key) request options, (err, res)-> if err? - logger.err err:err, res:res, bucketName:bucketName, key:key, "something went wrong checking file in aws" + logger.err err:err, res:res, bucketName:bucketName, key:key, "something went wrong checking file in s3" return callback(err) if !res? logger.err err:err, res:res, bucketName:bucketName, key:key, "no response object returned when checking if file exists" err = new Error("no response from s3 #{bucketName} #{key}") return callback(err) exists = res.statusCode == 200 - logger.log bucketName:bucketName, key:key, exists:exists, "checked if file exsists in s3" + logger.log bucketName:bucketName, key:key, exists:exists, "checked if file exists in s3" callback(err, exists) From 3289451ecef2244cea81a2b3e9c3906d46849d00 Mon Sep 17 00:00:00 2001 From: Xavier Trochu Date: Wed, 22 Jul 2015 16:41:45 +0200 Subject: [PATCH 121/555] Revert "Make S3 Key/Secret pair optional." This reverts commit 65122e603099a4f9ef5c5186b7eb8832340a993f. knox requires the key and secret arguments. --- .../app/coffee/S3PersistorManager.coffee | 36 +++++++++---------- 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index c0fab76747..d5cb06074e 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -16,8 +16,8 @@ thirtySeconds = 30 * 1000 buildDefaultOptions = (bucketName, method, key)-> return { aws: - key: settings.filestore.s3?.key - secret: settings.filestore.s3?.secret + key: settings.filestore.s3.key + secret: settings.filestore.s3.secret bucket: bucketName method: method timeout: thirtySeconds @@ -28,10 +28,10 @@ module.exports = sendFile: (bucketName, key, fsPath, callback)-> s3Client = knox.createClient - key: settings.filestore.s3?.key - secret: settings.filestore.s3?.secret + key: settings.filestore.s3.key + secret: settings.filestore.s3.secret bucket: bucketName - putEventEmitter = s3Client.putFile fsPath, key, (err, res)-> + putEventEmiter = s3Client.putFile fsPath, key, (err, res)-> if err? logger.err err:err, bucketName:bucketName, key:key, fsPath:fsPath,"something went wrong uploading file to s3" return callback(err) @@ -44,8 +44,8 @@ module.exports = LocalFileWriter.deleteFile fsPath, (err)-> logger.log res:res, bucketName:bucketName, key:key, fsPath:fsPath,"file uploaded to s3" callback(err) - putEventEmitter.on "error", (err)-> - logger.err err:err, bucketName:bucketName, key:key, fsPath:fsPath, "error received uploading file to s3" + putEventEmiter.on "error", (err)-> + logger.err err:err, bucketName:bucketName, key:key, fsPath:fsPath, "error emmited on put of file" callback err sendStream: (bucketName, key, readStream, callback)-> @@ -62,8 +62,8 @@ module.exports = callback = _.once callback logger.log bucketName:bucketName, key:key, "getting file from s3" s3Client = knox.createClient - key: settings.filestore.s3?.key - secret: settings.filestore.s3?.secret + key: settings.filestore.s3.key + secret: settings.filestore.s3.secret bucket: bucketName s3Stream = s3Client.get(key) s3Stream.end() @@ -76,12 +76,12 @@ module.exports = copyFile: (bucketName, sourceKey, destKey, callback)-> logger.log bucketName:bucketName, sourceKey:sourceKey, destKey:destKey, "copying file in s3" s3Client = knox.createClient - key: settings.filestore.s3?.key - secret: settings.filestore.s3?.secret + key: settings.filestore.s3.key + secret: settings.filestore.s3.secret bucket: bucketName s3Client.copyFile sourceKey, destKey, (err)-> if err? - logger.err err:err, bucketName:bucketName, sourceKey:sourceKey, destKey:destKey, "something went wrong copying file in s3" + logger.err err:err, bucketName:bucketName, sourceKey:sourceKey, destKey:destKey, "something went wrong copying file in aws" callback(err) deleteFile: (bucketName, key, callback)-> @@ -89,7 +89,7 @@ module.exports = options = buildDefaultOptions(bucketName, "delete", key) request options, (err, res)-> if err? - logger.err err:err, res:res, bucketName:bucketName, key:key, "something went wrong deleting file in s3" + logger.err err:err, res:res, bucketName:bucketName, key:key, "something went wrong deleting file in aws" callback(err) deleteDirectory: (bucketName, key, _callback)-> @@ -100,12 +100,12 @@ module.exports = logger.log key: key, bucketName: bucketName, "deleting directory" s3Client = knox.createClient - key: settings.filestore.s3?.key - secret: settings.filestore.s3?.secret + key: settings.filestore.s3.key + secret: settings.filestore.s3.secret bucket: bucketName s3Client.list prefix:key, (err, data)-> if err? - logger.err err:err, bucketName:bucketName, key:key, "something went wrong listing prefix in s3" + logger.err err:err, bucketName:bucketName, key:key, "something went wrong listing prefix in aws" return callback(err) keys = _.map data.Contents, (entry)-> return entry.Key @@ -116,13 +116,13 @@ module.exports = options = buildDefaultOptions(bucketName, "head", key) request options, (err, res)-> if err? - logger.err err:err, res:res, bucketName:bucketName, key:key, "something went wrong checking file in s3" + logger.err err:err, res:res, bucketName:bucketName, key:key, "something went wrong checking file in aws" return callback(err) if !res? logger.err err:err, res:res, bucketName:bucketName, key:key, "no response object returned when checking if file exists" err = new Error("no response from s3 #{bucketName} #{key}") return callback(err) exists = res.statusCode == 200 - logger.log bucketName:bucketName, key:key, exists:exists, "checked if file exists in s3" + logger.log bucketName:bucketName, key:key, exists:exists, "checked if file exsists in s3" callback(err, exists) From 41397821b15d2f371c9dca88de41f4bb3bcda171 Mon Sep 17 00:00:00 2001 From: Xavier Trochu Date: Wed, 22 Jul 2015 16:42:45 +0200 Subject: [PATCH 122/555] Add a manager using the aws-sdk library The knox library does not support the AWS Instance Role. So use the official AWS SDK to connect to S3 --- .../app/coffee/AWSSDKPersistorManager.coffee | 74 +++++++++++++++++++ .../app/coffee/PersistorManager.coffee | 2 + services/filestore/package.json | 1 + 3 files changed, 77 insertions(+) create mode 100644 services/filestore/app/coffee/AWSSDKPersistorManager.coffee diff --git a/services/filestore/app/coffee/AWSSDKPersistorManager.coffee b/services/filestore/app/coffee/AWSSDKPersistorManager.coffee new file mode 100644 index 0000000000..808b0b5219 --- /dev/null +++ b/services/filestore/app/coffee/AWSSDKPersistorManager.coffee @@ -0,0 +1,74 @@ +logger = require "logger-sharelatex" +aws = require "aws-sdk" +_ = require "underscore" +fs = require "fs" + +s3 = aws.S3() + +module.exports = + sendFile: (bucketName, key, fsPath, callback)-> + logger.log bucketName:bucketName, key, "send file data to s3" + stream = fs.createReadStream fsPath + s3.putObject Bucket: bucketName, Key: key, Body: stream, (err, data) -> + if err? + logger.err err: err, Bucket: bucketName, Key: key, "error sending file data to s3" + callback err + + sendStream: (bucketName, key, stream, callback)-> + logger.log bucketName:bucketName, key, "send file stream to s3" + s3.putObject Bucket: bucketName, Key: key, Body: stream, (err, data) -> + if err? + logger.err err: err, Bucket: bucketName, Key: key, "error sending file stream to s3" + callback err + + getFileStream: (bucketName, key, callback = (err, res)->)-> + logger.log bucketName:bucketName, key, "get file stream from s3" + callback = _.once callback + stream = s3.getObject(Bucket:bucketName, Key: key).createReadStream() + stream.on 'response', (res) -> + callback null, res + stream.on 'error', (err) -> + logger.err err:err, bucketName:bucketName, key:key, "error getting file stream from s3" + callback err + + copyFile: (bucketName, sourceKey, destKey, callback)-> + logger.log bucketName:bucketName, sourceKey:sourceKey, destKey: destKey, "copying file in s3" + source = bucketName + '/' + sourceKey + s3.copyObject {Bucket: bucketName, Key: destKey, CopySource: source}, (err) -> + if err? + logger.err err:err, bucketName:bucketName, sourceKey:sourceKey, destKey:destKey, "something went wrong copying file in s3" + callback err + + deleteFile: (bucketName, key, callback)-> + logger.log bucketName:bucketName, key:key, "delete file in s3" + s3.deleteObject {Bucket: bucketName, Key: key}, (err) -> + if err? + logger.err err:err, bucketName:bucketName, key:key, "something went wrong deleting file in s3" + callback err + + deleteDirectory: (bucketName, key, callback)-> + logger.log bucketName:bucketName, key:key, "delete directory in s3" + s3.listObjects {Bucket: bucketName, prefix: key}, (err, data) -> + if err? + logger.err err:err, bucketName:bucketName, key:key, "something went wrong listing prefix in s3" + return callback err + keys = _.map data.Contents, (entry)-> + Key: entry.Key + s3.deleteMultiple + Bucket: bucketName + Delete: + Objects: keys + Quiet: true + , (err) -> + if err? + logger.err err:err, bucketName:bucketName, key:key, "something went wrong deleting directory in s3" + callback err + + checkIfFileExists:(bucketName, key, callback)-> + logger.log bucketName:bucketName, key:key, "check file existence in s3" + s3.headObject {Bucket: bucketName, Key: key}, (err, data) -> + if err? + logger.err err:err, bucketName:bucketName, key:key, "something went wrong checking head in s3" + return callback err + callback null, data.ETag? + diff --git a/services/filestore/app/coffee/PersistorManager.coffee b/services/filestore/app/coffee/PersistorManager.coffee index 1dad923098..aa5c80599d 100644 --- a/services/filestore/app/coffee/PersistorManager.coffee +++ b/services/filestore/app/coffee/PersistorManager.coffee @@ -7,6 +7,8 @@ settings.filestore.backend ||= "s3" logger.log backend:settings.filestore.backend, "Loading backend" module.exports = switch settings.filestore.backend + when "aws-sdk" + require "./AWSSDKPersistorManager" when "s3" require("./S3PersistorManager") when "fs" diff --git a/services/filestore/package.json b/services/filestore/package.json index dd84289f30..441d4ad7e3 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -8,6 +8,7 @@ }, "dependencies": { "async": "~0.2.10", + "aws-sdk": "^2.1.39", "bunyan": "^1.3.5", "coffee-script": "~1.7.1", "express": "~3.4.8", From 7ef46a79a0cbd4823eb94c5fefad18bf79715154 Mon Sep 17 00:00:00 2001 From: Xavier Trochu Date: Fri, 24 Jul 2015 09:06:52 +0200 Subject: [PATCH 123/555] Fix aws-sdk persistor. Also fix some typos. --- .../app/coffee/AWSSDKPersistorManager.coffee | 22 +++++++----- .../app/coffee/FileController.coffee | 2 +- .../filestore/app/coffee/FileHandler.coffee | 36 +++++++++---------- 3 files changed, 32 insertions(+), 28 deletions(-) diff --git a/services/filestore/app/coffee/AWSSDKPersistorManager.coffee b/services/filestore/app/coffee/AWSSDKPersistorManager.coffee index 808b0b5219..311997ed03 100644 --- a/services/filestore/app/coffee/AWSSDKPersistorManager.coffee +++ b/services/filestore/app/coffee/AWSSDKPersistorManager.coffee @@ -7,7 +7,7 @@ s3 = aws.S3() module.exports = sendFile: (bucketName, key, fsPath, callback)-> - logger.log bucketName:bucketName, key, "send file data to s3" + logger.log bucketName:bucketName, key:key, "send file data to s3" stream = fs.createReadStream fsPath s3.putObject Bucket: bucketName, Key: key, Body: stream, (err, data) -> if err? @@ -15,18 +15,19 @@ module.exports = callback err sendStream: (bucketName, key, stream, callback)-> - logger.log bucketName:bucketName, key, "send file stream to s3" + logger.log bucketName:bucketName, key:key, "send file stream to s3" s3.putObject Bucket: bucketName, Key: key, Body: stream, (err, data) -> if err? logger.err err: err, Bucket: bucketName, Key: key, "error sending file stream to s3" callback err getFileStream: (bucketName, key, callback = (err, res)->)-> - logger.log bucketName:bucketName, key, "get file stream from s3" + logger.log bucketName:bucketName, key:key, "get file stream from s3" callback = _.once callback - stream = s3.getObject(Bucket:bucketName, Key: key).createReadStream() - stream.on 'response', (res) -> - callback null, res + request = s3.getObject(Bucket:bucketName, Key: key) + stream = request.createReadStream() + stream.on 'readable', () -> + callback null, stream stream.on 'error', (err) -> logger.err err:err, bucketName:bucketName, key:key, "error getting file stream from s3" callback err @@ -48,20 +49,23 @@ module.exports = deleteDirectory: (bucketName, key, callback)-> logger.log bucketName:bucketName, key:key, "delete directory in s3" - s3.listObjects {Bucket: bucketName, prefix: key}, (err, data) -> + s3.listObjects {Bucket: bucketName, Prefix: key}, (err, data) -> if err? logger.err err:err, bucketName:bucketName, key:key, "something went wrong listing prefix in s3" return callback err + if data.Contents.length == 0 + logger.log bucketName:bucketName, key:key, "the directory is empty" + return callback() keys = _.map data.Contents, (entry)-> Key: entry.Key - s3.deleteMultiple + s3.deleteObjects Bucket: bucketName Delete: Objects: keys Quiet: true , (err) -> if err? - logger.err err:err, bucketName:bucketName, key:key, "something went wrong deleting directory in s3" + logger.err err:err, bucketName:bucketName, key:keys, "something went wrong deleting directory in s3" callback err checkIfFileExists:(bucketName, key, callback)-> diff --git a/services/filestore/app/coffee/FileController.coffee b/services/filestore/app/coffee/FileController.coffee index 3b83e203fd..921faa6c6e 100644 --- a/services/filestore/app/coffee/FileController.coffee +++ b/services/filestore/app/coffee/FileController.coffee @@ -11,7 +11,7 @@ module.exports = metrics.inc "getFile" {key, bucket} = req {format, style} = req.query - logger.log key:key, bucket:bucket, format:format, style:style, "reciving request to get file" + logger.log key:key, bucket:bucket, format:format, style:style, "receiving request to get file" FileHandler.getFile bucket, key, {format:format,style:style}, (err, fileStream)-> if err? logger.err err:err, key:key, bucket:bucket, format:format, style:style, "problem getting file" diff --git a/services/filestore/app/coffee/FileHandler.coffee b/services/filestore/app/coffee/FileHandler.coffee index 51aec4bba6..ece26c6164 100644 --- a/services/filestore/app/coffee/FileHandler.coffee +++ b/services/filestore/app/coffee/FileHandler.coffee @@ -10,16 +10,16 @@ ImageOptimiser = require("./ImageOptimiser") module.exports = insertFile: (bucket, key, stream, callback)-> - convetedKey = KeyBuilder.getConvertedFolderKey(key) - PersistorManager.deleteDirectory bucket, convetedKey, (error) -> + convertedKey = KeyBuilder.getConvertedFolderKey key + PersistorManager.deleteDirectory bucket, convertedKey, (error) -> return callback(error) if error? PersistorManager.sendStream bucket, key, stream, callback deleteFile: (bucket, key, callback)-> - convetedKey = KeyBuilder.getConvertedFolderKey(key) + convertedKey = KeyBuilder.getConvertedFolderKey key async.parallel [ (done)-> PersistorManager.deleteFile bucket, key, done - (done)-> PersistorManager.deleteDirectory bucket, convetedKey, done + (done)-> PersistorManager.deleteDirectory bucket, convertedKey, done ], callback getFile: (bucket, key, opts = {}, callback)-> @@ -36,14 +36,14 @@ module.exports = callback err, fileStream _getConvertedFile: (bucket, key, opts, callback)-> - convetedKey = KeyBuilder.addCachingToKey(key, opts) - PersistorManager.checkIfFileExists bucket, convetedKey, (err, exists)=> + convertedKey = KeyBuilder.addCachingToKey key, opts + PersistorManager.checkIfFileExists bucket, convertedKey, (err, exists)=> if exists - PersistorManager.getFileStream bucket, convetedKey, callback + PersistorManager.getFileStream bucket, convertedKey, callback else - @_getConvertedFileAndCache bucket, key, convetedKey, opts, callback + @_getConvertedFileAndCache bucket, key, convertedKey, opts, callback - _getConvertedFileAndCache: (bucket, key, convetedKey, opts, callback)-> + _getConvertedFileAndCache: (bucket, key, convertedKey, opts, callback)-> self = @ convertedFsPath = "" async.series [ @@ -54,27 +54,27 @@ module.exports = (cb)-> ImageOptimiser.compressPng convertedFsPath, cb (cb)-> - PersistorManager.sendFile bucket, convetedKey, convertedFsPath, cb + PersistorManager.sendFile bucket, convertedKey, convertedFsPath, cb ], (err)-> if err? return callback(err) - PersistorManager.getFileStream bucket, convetedKey, callback + PersistorManager.getFileStream bucket, convertedKey, callback - _convertFile: (bucket, origonalKey, opts, callback)-> - @_writeS3FileToDisk bucket, origonalKey, (err, origonalFsPath)-> + _convertFile: (bucket, originalKey, opts, callback)-> + @_writeS3FileToDisk bucket, originalKey, (err, originalFsPath)-> done = (err, destPath)-> if err? - logger.err err:err, bucket:bucket, origonalKey:origonalKey, opts:opts, "error converting file" + logger.err err:err, bucket:bucket, originalKey:originalKey, opts:opts, "error converting file" return callback(err) - LocalFileWriter.deleteFile origonalFsPath, -> + LocalFileWriter.deleteFile originalFsPath, -> callback(err, destPath) if opts.format? - FileConverter.convert origonalFsPath, opts.format, done + FileConverter.convert originalFsPath, opts.format, done else if opts.style == "thumbnail" - FileConverter.thumbnail origonalFsPath, done + FileConverter.thumbnail originalFsPath, done else if opts.style == "preview" - FileConverter.preview origonalFsPath, done + FileConverter.preview originalFsPath, done else throw new Error("should have specified opts to convert file with #{JSON.stringify(opts)}") From f417b9b171ec7b48f183d2b827c7295665f0574a Mon Sep 17 00:00:00 2001 From: Xavier Trochu Date: Fri, 24 Jul 2015 15:48:46 +0200 Subject: [PATCH 124/555] Fix the AWS-SDK Persistor --- services/filestore/app/coffee/AWSSDKPersistorManager.coffee | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/filestore/app/coffee/AWSSDKPersistorManager.coffee b/services/filestore/app/coffee/AWSSDKPersistorManager.coffee index 311997ed03..e1474fa009 100644 --- a/services/filestore/app/coffee/AWSSDKPersistorManager.coffee +++ b/services/filestore/app/coffee/AWSSDKPersistorManager.coffee @@ -3,20 +3,20 @@ aws = require "aws-sdk" _ = require "underscore" fs = require "fs" -s3 = aws.S3() +s3 = new aws.S3() module.exports = sendFile: (bucketName, key, fsPath, callback)-> logger.log bucketName:bucketName, key:key, "send file data to s3" stream = fs.createReadStream fsPath - s3.putObject Bucket: bucketName, Key: key, Body: stream, (err, data) -> + s3.upload Bucket: bucketName, Key: key, Body: stream, (err, data) -> if err? logger.err err: err, Bucket: bucketName, Key: key, "error sending file data to s3" callback err sendStream: (bucketName, key, stream, callback)-> logger.log bucketName:bucketName, key:key, "send file stream to s3" - s3.putObject Bucket: bucketName, Key: key, Body: stream, (err, data) -> + s3.upload Bucket: bucketName, Key: key, Body: stream, (err, data) -> if err? logger.err err: err, Bucket: bucketName, Key: key, "error sending file stream to s3" callback err From e1683b8e1c2b894f59c7643050aecd149e864a61 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 4 Aug 2015 11:26:51 +0100 Subject: [PATCH 125/555] added a /gc endpoint to debug filestore memory usage --- services/filestore/app.coffee | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index 9c744e75ab..c06353518b 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -85,6 +85,17 @@ app.post "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey app.put "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.copyFile app.del "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.deleteFile +app.get "/gc", (req, res)-> + if global.gc? + memory_before = process.memoryUsage() + global.gc() + memory_after = process.memoryUsage() + delta = {} + for k of memory_after + delta[k] = memory_after[k] - memory_before[k] + res.send { memory_before, memory_after, delta } + else + res.send(501) # need --expose-gc, send "not implemented" app.get "/heapdump", (req, res)-> require('heapdump').writeSnapshot '/tmp/' + Date.now() + '.filestore.heapsnapshot', (err, filename)-> From 2e08281b88c1b037e537db0c6ce1a91e6f190bc7 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 5 Aug 2015 16:52:01 +0100 Subject: [PATCH 126/555] replace the /gc endpoint with a automatic gc every 3 minutes --- services/filestore/app.coffee | 19 +++++++------------ 1 file changed, 7 insertions(+), 12 deletions(-) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index c06353518b..95fdf146d3 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -85,18 +85,6 @@ app.post "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey app.put "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.copyFile app.del "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.deleteFile -app.get "/gc", (req, res)-> - if global.gc? - memory_before = process.memoryUsage() - global.gc() - memory_after = process.memoryUsage() - delta = {} - for k of memory_after - delta[k] = memory_after[k] - memory_before[k] - res.send { memory_before, memory_after, delta } - else - res.send(501) # need --expose-gc, send "not implemented" - app.get "/heapdump", (req, res)-> require('heapdump').writeSnapshot '/tmp/' + Date.now() + '.filestore.heapsnapshot', (err, filename)-> res.send filename @@ -151,3 +139,10 @@ server.listen port, host, -> process.on 'SIGTERM', () -> logger.log("filestore got SIGTERM, shutting down gracefully") beginShutdown() + +if global.gc? + gcTimer = setInterval () -> + global.gc() + logger.log process.memoryUsage(), "global.gc" + , 3 * oneMinute = 60 * 1000 + gcTimer.unref() From bea9ee6b19b4d6b1c15f6466a514607c16360ff8 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 18 Aug 2015 15:36:07 +0100 Subject: [PATCH 127/555] add gc to filestore --- services/filestore/app.coffee | 1 + 1 file changed, 1 insertion(+) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index 95fdf146d3..5df7c76fc8 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -14,6 +14,7 @@ Metrics = require "metrics-sharelatex" Metrics.initialize("filestore") Metrics.open_sockets.monitor(logger) Metrics.event_loop?.monitor(logger) +Metrics.memory.monitor(logger) app.configure -> app.use express.bodyParser() From 57aedefdd38b0d40a2ba607c0f686a8fb93f18c6 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Thu, 27 Aug 2015 15:21:33 +0100 Subject: [PATCH 128/555] Begin work towards obeying 'Range' headers, thread the `opts` down to persistence layer. --- services/filestore/app/coffee/FSPersistorManager.coffee | 2 +- services/filestore/app/coffee/FileHandler.coffee | 2 +- services/filestore/app/coffee/S3PersistorManager.coffee | 4 ++-- services/filestore/test/unit/coffee/FileHandlerTests.coffee | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/services/filestore/app/coffee/FSPersistorManager.coffee b/services/filestore/app/coffee/FSPersistorManager.coffee index c2f4564987..719dd1d098 100644 --- a/services/filestore/app/coffee/FSPersistorManager.coffee +++ b/services/filestore/app/coffee/FSPersistorManager.coffee @@ -27,7 +27,7 @@ module.exports = return callback err @sendFile location, target, fsPath, callback - getFileStream: (location, name, _callback = (err, res)->) -> + getFileStream: (location, name, opts, _callback = (err, res)->) -> callback = (args...) -> _callback(args...) _callback = () -> diff --git a/services/filestore/app/coffee/FileHandler.coffee b/services/filestore/app/coffee/FileHandler.coffee index 51aec4bba6..16ac56e703 100644 --- a/services/filestore/app/coffee/FileHandler.coffee +++ b/services/filestore/app/coffee/FileHandler.coffee @@ -30,7 +30,7 @@ module.exports = @_getConvertedFile bucket, key, opts, callback _getStandardFile: (bucket, key, opts, callback)-> - PersistorManager.getFileStream bucket, key, (err, fileStream)-> + PersistorManager.getFileStream bucket, key, opts, (err, fileStream)-> if err? logger.err bucket:bucket, key:key, opts:opts, "error getting fileStream" callback err, fileStream diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index d5cb06074e..7e0cb3947e 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -57,8 +57,8 @@ module.exports = logger.err bucketName:bucketName, key:key, fsPath:fsPath, err:err, "something went wrong writing stream to disk" return callback(err) @sendFile bucketName, key, fsPath, callback - - getFileStream: (bucketName, key, callback = (err, res)->)-> + + getFileStream: (bucketName, key, opts, callback = (err, res)->)-> callback = _.once callback logger.log bucketName:bucketName, key:key, "getting file from s3" s3Client = knox.createClient diff --git a/services/filestore/test/unit/coffee/FileHandlerTests.coffee b/services/filestore/test/unit/coffee/FileHandlerTests.coffee index 2a2bf6e17d..5457defd16 100644 --- a/services/filestore/test/unit/coffee/FileHandlerTests.coffee +++ b/services/filestore/test/unit/coffee/FileHandlerTests.coffee @@ -104,7 +104,7 @@ describe "FileHandler", -> beforeEach -> @fileStream = {on:->} - @PersistorManager.getFileStream.callsArgWith(2, "err", @fileStream) + @PersistorManager.getFileStream.callsArgWith(3, "err", @fileStream) it "should get the stream", (done)-> @handler.getFile @bucket, @key, null, => From d88736e3b5a904b7509ef26a09f4edfb3016c61c Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Thu, 27 Aug 2015 16:12:11 +0100 Subject: [PATCH 129/555] Pass the `start` and `end` options down to `fs.createReadStream`, and test for same. --- .../app/coffee/FSPersistorManager.coffee | 2 +- .../coffee/FSPersistorManagerTests.coffee | 21 +++++++++++++++---- 2 files changed, 18 insertions(+), 5 deletions(-) diff --git a/services/filestore/app/coffee/FSPersistorManager.coffee b/services/filestore/app/coffee/FSPersistorManager.coffee index 719dd1d098..283d773e88 100644 --- a/services/filestore/app/coffee/FSPersistorManager.coffee +++ b/services/filestore/app/coffee/FSPersistorManager.coffee @@ -33,7 +33,7 @@ module.exports = _callback = () -> filteredName = filterName name logger.log location:location, name:filteredName, "getting file" - sourceStream = fs.createReadStream "#{location}/#{filteredName}" + sourceStream = fs.createReadStream "#{location}/#{filteredName}", opts sourceStream.on 'error', (err) -> logger.err err:err, location:location, name:name, "Error reading from file" if err.code = 'ENOENT' diff --git a/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee b/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee index 786f6be323..1668b0ff9d 100644 --- a/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee @@ -69,14 +69,27 @@ describe "FSPersistorManagerTests", -> done() describe "getFileStream", -> + beforeEach -> + @opts = {} + it "should use correct file location", (done) -> - @Fs.createReadStream.returns( - on:-> - ) - @FSPersistorManager.getFileStream @location, @name1, (err,res)=> + @Fs.createReadStream.returns({on: ->}) + @FSPersistorManager.getFileStream @location, @name1, @opts, (err,res) => @Fs.createReadStream.calledWith("#{@location}/#{@name1Filtered}").should.equal.true done() + describe "with start and end options", -> + + beforeEach -> + @opts = {start: 0, end: 8} + + it 'should pass the options to createReadStream', (done) -> + @Fs.createReadStream.returns({on: ->}) + @FSPersistorManager.getFileStream @location, @name1, @opts, (err,res)=> + @Fs.createReadStream.calledWith("#{@location}/#{@name1Filtered}", @opts).should.equal true + done() + + describe "copyFile", -> beforeEach -> @ReadStream= From 76506ed91e99ef48e11b078b5640c88fc77d5118 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Fri, 28 Aug 2015 10:09:41 +0100 Subject: [PATCH 130/555] Pass start/end options down to the S3PersistorManager. Test that those options cause a 'Range' header to be attached to the request sent to S3. --- .../app/coffee/FSPersistorManager.coffee | 1 + .../app/coffee/S3PersistorManager.coffee | 7 ++++++- .../coffee/S3PersistorManagerTests.coffee | 21 +++++++++++++++++-- 3 files changed, 26 insertions(+), 3 deletions(-) diff --git a/services/filestore/app/coffee/FSPersistorManager.coffee b/services/filestore/app/coffee/FSPersistorManager.coffee index 283d773e88..6568f81f74 100644 --- a/services/filestore/app/coffee/FSPersistorManager.coffee +++ b/services/filestore/app/coffee/FSPersistorManager.coffee @@ -27,6 +27,7 @@ module.exports = return callback err @sendFile location, target, fsPath, callback + # opts may be {start: Number, end: Number} getFileStream: (location, name, opts, _callback = (err, res)->) -> callback = (args...) -> _callback(args...) diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index 7e0cb3947e..8d40113200 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -58,14 +58,19 @@ module.exports = return callback(err) @sendFile bucketName, key, fsPath, callback + # opts may be {start: Number, end: Number} getFileStream: (bucketName, key, opts, callback = (err, res)->)-> + opts = opts || {} + headers = {} + if opts.start? and opts.end? + headers['Range'] = "bytes=#{opts.start}-#{opts.end}" callback = _.once callback logger.log bucketName:bucketName, key:key, "getting file from s3" s3Client = knox.createClient key: settings.filestore.s3.key secret: settings.filestore.s3.secret bucket: bucketName - s3Stream = s3Client.get(key) + s3Stream = s3Client.get(key, headers) s3Stream.end() s3Stream.on 'response', (res) -> callback null, res diff --git a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee index fe70f1008d..ca324ae212 100644 --- a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee @@ -42,17 +42,34 @@ describe "S3PersistorManagerTests", -> describe "getFileStream", -> beforeEach -> @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires - + @opts = {} it "should use correct key", (done)-> @stubbedKnoxClient.get.returns( on:-> end:-> ) - @S3PersistorManager.getFileStream @bucketName, @key, @fsPath, (err)=> + @S3PersistorManager.getFileStream @bucketName, @key, @opts, (err)=> # empty callback @stubbedKnoxClient.get.calledWith(@key).should.equal true done() + describe "with start and end options", -> + beforeEach -> + @opts = + start: 0 + end: 8 + it "should pass headers to the knox.Client.get()", (done) -> + @stubbedKnoxClient.get.returns( + on:-> + end:-> + ) + @S3PersistorManager.getFileStream @bucketName, @key, @opts, (err)=> # empty callback + @stubbedKnoxClient.get.calledWith(@key, {'Range': 'bytes=0-8'}).should.equal true + done() + + + + describe "sendFile", -> beforeEach -> From b905d9fba11c3469adac4f15b8755d0ed26351b7 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Fri, 28 Aug 2015 11:44:47 +0100 Subject: [PATCH 131/555] Add `range-parser` dependency. --- services/filestore/package.json | 1 + 1 file changed, 1 insertion(+) diff --git a/services/filestore/package.json b/services/filestore/package.json index dd84289f30..3bc567e8a2 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -23,6 +23,7 @@ "node-transloadit": "0.0.4", "node-uuid": "~1.4.1", "pngcrush": "0.0.3", + "range-parser": "^1.0.2", "recluster": "^0.3.7", "request": "2.14.0", "response": "0.14.0", From 97cdc243d3f9204c3e694ded02a359360342dfc0 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Fri, 28 Aug 2015 11:45:16 +0100 Subject: [PATCH 132/555] Parse out the range header and set `start` and `end` properties on options. --- .../app/coffee/FileController.coffee | 33 +++++++++++++++---- .../unit/coffee/FileControllerTests.coffee | 16 ++++++++- 2 files changed, 41 insertions(+), 8 deletions(-) diff --git a/services/filestore/app/coffee/FileController.coffee b/services/filestore/app/coffee/FileController.coffee index 3b83e203fd..8517b3ca52 100644 --- a/services/filestore/app/coffee/FileController.coffee +++ b/services/filestore/app/coffee/FileController.coffee @@ -3,19 +3,41 @@ settings = require("settings-sharelatex") logger = require("logger-sharelatex") FileHandler = require("./FileHandler") metrics = require("metrics-sharelatex") +parseRange = require('range-parser') + oneDayInSeconds = 60 * 60 * 24 +maxSizeInBytes = 1024 * 1024 * 1024 # 1GB + + +get_range = (header) -> + parsed = parseRange(maxSizeInBytes, header) + range_type = parsed.type + range = parsed[0] + {start: range.start, end: range.end} module.exports = getFile: (req, res)-> - metrics.inc "getFile" {key, bucket} = req {format, style} = req.query + opts = { + key: key, + bucket: bucket, + format: format, + style: style, + start: null, + end: null + } + metrics.inc "getFile" + if req.headers.range? + range = get_range(req.headers.range) + opts.start = range.start + opts.end = range.end logger.log key:key, bucket:bucket, format:format, style:style, "reciving request to get file" - FileHandler.getFile bucket, key, {format:format,style:style}, (err, fileStream)-> + FileHandler.getFile bucket, key, opts, (err, fileStream)-> if err? logger.err err:err, key:key, bucket:bucket, format:format, style:style, "problem getting file" - if !res.finished and res?.send? + if !res.finished and res?.send? res.send 500 else if req.query.cacheWarm logger.log key:key, bucket:bucket, format:format, style:style, "request is only for cache warm so not sending stream" @@ -38,7 +60,7 @@ module.exports = oldFile_id = req.body.source.file_id logger.log key:key, bucket:bucket, oldProject_id:oldProject_id, oldFile_id:oldFile_id, "reciving request to copy file" PersistorManager.copyFile bucket, "#{oldProject_id}/#{oldFile_id}", key, (err)-> - if err? + if err? logger.log err:err, oldProject_id:oldProject_id, oldFile_id:oldFile_id, "something went wrong copying file" res.send 500 else @@ -54,6 +76,3 @@ module.exports = res.send 500 else res.send 204 - - - diff --git a/services/filestore/test/unit/coffee/FileControllerTests.coffee b/services/filestore/test/unit/coffee/FileControllerTests.coffee index ecf067976f..e27b8fb127 100644 --- a/services/filestore/test/unit/coffee/FileControllerTests.coffee +++ b/services/filestore/test/unit/coffee/FileControllerTests.coffee @@ -42,6 +42,7 @@ describe "FileController", -> params: project_id:@project_id file_id:@file_id + headers: {} @res = setHeader: -> @fileStream = {} @@ -70,6 +71,19 @@ describe "FileController", -> done() @controller.getFile @req, @res + describe "with a 'Range' header set", -> + + beforeEach -> + @req.headers.range = 'bytes=0-8' + + it "should pass 'start' and 'end' options to FileHandler", (done) -> + @FileHandler.getFile.callsArgWith(3, null, @fileStream) + @fileStream.pipe = (res)=> + expect(@FileHandler.getFile.lastCall.args[2].start).to.equal 0 + expect(@FileHandler.getFile.lastCall.args[2].end).to.equal 8 + done() + @controller.getFile @req, @res + describe "insertFile", -> it "should send bucket name key and res to PersistorManager", (done)-> @@ -101,7 +115,7 @@ describe "FileController", -> @res.send = (code)=> code.should.equal 500 done() - @controller.copyFile @req, @res + @controller.copyFile @req, @res describe "delete file", -> From 387e96ab67187414a3aaa86f6a3bc6f6dfb2336e Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Fri, 28 Aug 2015 12:02:50 +0100 Subject: [PATCH 133/555] Refactor to make _get_range testable. --- .../app/coffee/FileController.coffee | 21 +++++++++---------- .../unit/coffee/FileControllerTests.coffee | 19 +++++++++++++++++ 2 files changed, 29 insertions(+), 11 deletions(-) diff --git a/services/filestore/app/coffee/FileController.coffee b/services/filestore/app/coffee/FileController.coffee index 8517b3ca52..22db596d94 100644 --- a/services/filestore/app/coffee/FileController.coffee +++ b/services/filestore/app/coffee/FileController.coffee @@ -8,14 +8,7 @@ parseRange = require('range-parser') oneDayInSeconds = 60 * 60 * 24 maxSizeInBytes = 1024 * 1024 * 1024 # 1GB - -get_range = (header) -> - parsed = parseRange(maxSizeInBytes, header) - range_type = parsed.type - range = parsed[0] - {start: range.start, end: range.end} - -module.exports = +module.exports = FileController = getFile: (req, res)-> {key, bucket} = req @@ -25,12 +18,10 @@ module.exports = bucket: bucket, format: format, style: style, - start: null, - end: null } metrics.inc "getFile" if req.headers.range? - range = get_range(req.headers.range) + range = FileController._get_range(req.headers.range) opts.start = range.start opts.end = range.end logger.log key:key, bucket:bucket, format:format, style:style, "reciving request to get file" @@ -76,3 +67,11 @@ module.exports = res.send 500 else res.send 204 + + _get_range: (header) -> + parsed = parseRange(maxSizeInBytes, header) + if parsed == -1 or parsed == -2 or parsed.type != 'bytes' + null + else + range = parsed[0] + {start: range.start, end: range.end} diff --git a/services/filestore/test/unit/coffee/FileControllerTests.coffee b/services/filestore/test/unit/coffee/FileControllerTests.coffee index e27b8fb127..1a2c3e81ea 100644 --- a/services/filestore/test/unit/coffee/FileControllerTests.coffee +++ b/services/filestore/test/unit/coffee/FileControllerTests.coffee @@ -133,3 +133,22 @@ describe "FileController", -> code.should.equal 500 done() @controller.deleteFile @req, @res + + describe "_get_range", -> + + it "should parse a valid Range header", (done) -> + result = @controller._get_range('bytes=0-200') + expect(result).to.not.equal null + expect(result.start).to.equal 0 + expect(result.end).to.equal 200 + done() + + it "should return null for an invalid Range header", (done) -> + result = @controller._get_range('wat') + expect(result).to.equal null + done() + + it "should return null for any type other than 'bytes'", (done) -> + result = @controller._get_range('carrots=0-200') + expect(result).to.equal null + done() From 8a631e70f732f7d4fc95d39fe719e93d39a59b02 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Fri, 28 Aug 2015 12:16:41 +0100 Subject: [PATCH 134/555] Refactor, and log when we have a 'range' header. --- services/filestore/app/coffee/FileController.coffee | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/services/filestore/app/coffee/FileController.coffee b/services/filestore/app/coffee/FileController.coffee index 22db596d94..9cb01b73ea 100644 --- a/services/filestore/app/coffee/FileController.coffee +++ b/services/filestore/app/coffee/FileController.coffee @@ -13,19 +13,20 @@ module.exports = FileController = getFile: (req, res)-> {key, bucket} = req {format, style} = req.query - opts = { + options = { key: key, bucket: bucket, format: format, style: style, } metrics.inc "getFile" + logger.log key:key, bucket:bucket, format:format, style: style, "reciving request to get file" if req.headers.range? range = FileController._get_range(req.headers.range) - opts.start = range.start - opts.end = range.end - logger.log key:key, bucket:bucket, format:format, style:style, "reciving request to get file" - FileHandler.getFile bucket, key, opts, (err, fileStream)-> + options.start = range.start + options.end = range.end + logger.log start: range.start, end: range.end, "getting range of bytes from file" + FileHandler.getFile bucket, key, options, (err, fileStream)-> if err? logger.err err:err, key:key, bucket:bucket, format:format, style:style, "problem getting file" if !res.finished and res?.send? From 64f70eddbd002dfb538b421660f7d8c184ba4403 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Fri, 28 Aug 2015 13:38:54 +0100 Subject: [PATCH 135/555] Add cluster.js to .gitignore. --- services/filestore/.gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/services/filestore/.gitignore b/services/filestore/.gitignore index 7d881c3ca7..6ab729d0ae 100644 --- a/services/filestore/.gitignore +++ b/services/filestore/.gitignore @@ -54,6 +54,7 @@ public/stylesheets/mainStyle.css public/minjs/ test/unit/js/ test/acceptence/js +cluster.js user_files/* template_files/* From ed980a21e19caa0088d3d78a5b58c681fc048d59 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Fri, 28 Aug 2015 13:50:40 +0100 Subject: [PATCH 136/555] Test the FileHandler layer. --- .../test/unit/coffee/FileHandlerTests.coffee | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/services/filestore/test/unit/coffee/FileHandlerTests.coffee b/services/filestore/test/unit/coffee/FileHandlerTests.coffee index 5457defd16..74ca9975da 100644 --- a/services/filestore/test/unit/coffee/FileHandlerTests.coffee +++ b/services/filestore/test/unit/coffee/FileHandlerTests.coffee @@ -93,6 +93,13 @@ describe "FileHandler", -> @handler._getConvertedFile.called.should.equal false done() + it "should pass options to _getStandardFile", (done) -> + options = {start: 0, end: 8} + @handler.getFile @bucket, @key, options, => + expect(@handler._getStandardFile.lastCall.args[2].start).to.equal 0 + expect(@handler._getStandardFile.lastCall.args[2].end).to.equal 8 + done() + it "should call _getConvertedFile if a format is defined", (done)-> @handler.getFile @bucket, @key, format:"png", => @handler._getStandardFile.called.should.equal false @@ -117,6 +124,13 @@ describe "FileHandler", -> stream.should.equal @fileStream done() + it "should pass options to PersistorManager", (done) -> + @handler.getFile @bucket, @key, {start: 0, end: 8}, => + expect(@PersistorManager.getFileStream.lastCall.args[2].start).to.equal 0 + expect(@PersistorManager.getFileStream.lastCall.args[2].end).to.equal 8 + done() + + describe "_getConvertedFile", -> it "should getFileStream if it does exists", (done)-> @@ -178,7 +192,3 @@ describe "FileHandler", -> @FileConverter.convert.calledWith(@stubbedPath, @format).should.equal true @LocalFileWriter.deleteFile.calledWith(@stubbedPath).should.equal true done() - - - - From 959191ae5019095751eeab659b50aa8aa1a3cacc Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Fri, 28 Aug 2015 14:20:14 +0100 Subject: [PATCH 137/555] Add an acceptance test for using range header when getting a file. --- .../acceptence/coffee/SendingFileTest.coffee | 25 ++++++++++++++++--- 1 file changed, 21 insertions(+), 4 deletions(-) diff --git a/services/filestore/test/acceptence/coffee/SendingFileTest.coffee b/services/filestore/test/acceptence/coffee/SendingFileTest.coffee index 11668f230e..6408df7477 100644 --- a/services/filestore/test/acceptence/coffee/SendingFileTest.coffee +++ b/services/filestore/test/acceptence/coffee/SendingFileTest.coffee @@ -32,7 +32,7 @@ describe "Filestore", -> it "should send a 200 for status endpoing", (done)-> - request "#{@filestoreUrl}/status", (err, response, body)-> + request "#{@filestoreUrl}/status", (err, response, body)-> response.statusCode.should.equal 200 body.indexOf("filestore").should.not.equal -1 body.indexOf("up").should.not.equal -1 @@ -56,6 +56,26 @@ describe "Filestore", -> body.should.equal @constantFileContent done() + it "should be able to get back the first 8 bytes of the file", (done) -> + @timeout(1000 * 10) + options = + uri: @fileUrl + headers: + 'Range': 'bytes=0-8' + request.get options, (err, response, body)=> + body.should.equal 'hello wor' + done() + + it "should be able to get back bytes 4 through 10 of the file", (done) -> + @timeout(1000 * 10) + options = + uri: @fileUrl + headers: + 'Range': 'bytes=4-10' + request.get options, (err, response, body)=> + body.should.equal 'o world' + done() + it "should be able to delete the file", (done)-> @timeout(1000 * 20) request.del @fileUrl, (err, response, body)=> @@ -84,6 +104,3 @@ describe "Filestore", -> request.get newFileUrl, (err, response, body)=> body.should.equal @constantFileContent done() - - - From 47fdf3c5c9eefa8f3082cc2f27678019b9922216 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Mon, 31 Aug 2015 16:47:16 +0100 Subject: [PATCH 138/555] Ensure that Filestore responds with 404 when a file does not exist. --- services/filestore/app/coffee/Errors.coffee | 9 +++++++++ .../filestore/app/coffee/FSPersistorManager.coffee | 6 +++--- services/filestore/app/coffee/FileController.coffee | 5 ++++- .../test/acceptence/coffee/SendingFileTest.coffee | 10 +++++++++- 4 files changed, 25 insertions(+), 5 deletions(-) create mode 100644 services/filestore/app/coffee/Errors.coffee diff --git a/services/filestore/app/coffee/Errors.coffee b/services/filestore/app/coffee/Errors.coffee new file mode 100644 index 0000000000..3bd9479abe --- /dev/null +++ b/services/filestore/app/coffee/Errors.coffee @@ -0,0 +1,9 @@ +NotFoundError = (message) -> + error = new Error(message) + error.name = "NotFoundError" + error.__proto__ = NotFoundError.prototype + return error +NotFoundError.prototype.__proto__ = Error.prototype + +module.exports = Errors = + NotFoundError: NotFoundError diff --git a/services/filestore/app/coffee/FSPersistorManager.coffee b/services/filestore/app/coffee/FSPersistorManager.coffee index 6568f81f74..00ab6f43b4 100644 --- a/services/filestore/app/coffee/FSPersistorManager.coffee +++ b/services/filestore/app/coffee/FSPersistorManager.coffee @@ -1,12 +1,12 @@ logger = require("logger-sharelatex") fs = require("fs") LocalFileWriter = require("./LocalFileWriter") +Errors = require('./Errors') rimraf = require("rimraf") -response = require ("response") filterName = (key) -> return key.replace /\//g, "_" - + module.exports = sendFile: ( location, target, source, callback = (err)->) -> @@ -38,7 +38,7 @@ module.exports = sourceStream.on 'error', (err) -> logger.err err:err, location:location, name:name, "Error reading from file" if err.code = 'ENOENT' - callback null, response().html('NoSuchKey: file not found\n') + callback new Errors.NotFoundError(err.message), null else callback err sourceStream.on 'readable', () -> diff --git a/services/filestore/app/coffee/FileController.coffee b/services/filestore/app/coffee/FileController.coffee index 9cb01b73ea..876227c029 100644 --- a/services/filestore/app/coffee/FileController.coffee +++ b/services/filestore/app/coffee/FileController.coffee @@ -4,6 +4,7 @@ logger = require("logger-sharelatex") FileHandler = require("./FileHandler") metrics = require("metrics-sharelatex") parseRange = require('range-parser') +Errors = require('./Errors') oneDayInSeconds = 60 * 60 * 24 maxSizeInBytes = 1024 * 1024 * 1024 # 1GB @@ -29,8 +30,10 @@ module.exports = FileController = FileHandler.getFile bucket, key, options, (err, fileStream)-> if err? logger.err err:err, key:key, bucket:bucket, format:format, style:style, "problem getting file" + if err instanceof Errors.NotFoundError + return res.send 404 if !res.finished and res?.send? - res.send 500 + return res.send 500 else if req.query.cacheWarm logger.log key:key, bucket:bucket, format:format, style:style, "request is only for cache warm so not sending stream" res.send 200 diff --git a/services/filestore/test/acceptence/coffee/SendingFileTest.coffee b/services/filestore/test/acceptence/coffee/SendingFileTest.coffee index 6408df7477..7abdc22773 100644 --- a/services/filestore/test/acceptence/coffee/SendingFileTest.coffee +++ b/services/filestore/test/acceptence/coffee/SendingFileTest.coffee @@ -50,6 +50,14 @@ describe "Filestore", -> writeStream.on "end", done fs.createReadStream(@localFileReadPath).pipe writeStream + it "should return 404 for a non-existant id", (done) -> + @timeout(1000 * 20) + options = + uri: @fileUrl + '___this_is_clearly_wrong___' + request.get options, (err, response, body) => + response.statusCode.should.equal 404 + done() + it "should be able get the file back", (done)-> @timeout(1000 * 10) request.get @fileUrl, (err, response, body)=> @@ -81,7 +89,7 @@ describe "Filestore", -> request.del @fileUrl, (err, response, body)=> response.statusCode.should.equal 204 request.get @fileUrl, (err, response, body)=> - body.indexOf("NoSuchKey").should.not.equal -1 + response.statusCode.should.equal 404 done() it "should be able to copy files", (done)-> From 8bbf81f4bcf87f8f0eaaa1f521275255a0640224 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Tue, 1 Sep 2015 11:36:28 +0100 Subject: [PATCH 139/555] If the S3 service produces an error, handle it. --- .../app/coffee/S3PersistorManager.coffee | 8 +++- .../coffee/S3PersistorManagerTests.coffee | 45 +++++++++++++++++++ 2 files changed, 52 insertions(+), 1 deletion(-) diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index 8d40113200..ede27b20be 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -9,6 +9,7 @@ fs = require("fs") knox = require("knox") path = require("path") LocalFileWriter = require("./LocalFileWriter") +Errors = require("./Errors") _ = require("underscore") thirtySeconds = 30 * 1000 @@ -73,6 +74,12 @@ module.exports = s3Stream = s3Client.get(key, headers) s3Stream.end() s3Stream.on 'response', (res) -> + if res.statusCode == 404 + logger.log bucketName:bucketName, key:key, "file not found in s3" + return callback new Errors.NotFoundError("File not found in S3: #{bucketName}:#{key}"), null + if res.stausCode != 200 + logger.log bucketName:bucketName, key:key, "error getting file from s3" + return callback new Error("Got non-200 response from S3: #{res.statusCode}"), null callback null, res s3Stream.on 'error', (err) -> logger.err err:err, bucketName:bucketName, key:key, "error getting file stream from s3" @@ -130,4 +137,3 @@ module.exports = exists = res.statusCode == 200 logger.log bucketName:bucketName, key:key, exists:exists, "checked if file exsists in s3" callback(err, exists) - diff --git a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee index ca324ae212..78fec6cea5 100644 --- a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee @@ -35,6 +35,8 @@ describe "S3PersistorManagerTests", -> "logger-sharelatex": log:-> err:-> + "./Errors": @Errors = + NotFoundError: sinon.stub() @key = "my/key" @bucketName = "my-bucket" @error = "my errror" @@ -67,8 +69,51 @@ describe "S3PersistorManagerTests", -> @stubbedKnoxClient.get.calledWith(@key, {'Range': 'bytes=0-8'}).should.equal true done() + describe "error conditions", -> + beforeEach -> + @fakeResponse = + statusCode: 500 + @stubbedKnoxClient.get.returns( + on: (key, callback) => + if key == 'response' + callback(@fakeResponse) + end: -> + ) + describe "when the file doesn't exist", -> + + beforeEach -> + @fakeResponse = + statusCode: 404 + + it "should produce a NotFoundError", (done) -> + @S3PersistorManager.getFileStream @bucketName, @key, @opts, (err, stream)=> # empty callback + expect(stream).to.equal null + expect(err).to.not.equal null + expect(err instanceof @Errors.NotFoundError).to.equal true + done() + + it "should have bucket and key in the Error message", (done) -> + @S3PersistorManager.getFileStream @bucketName, @key, @opts, (err, stream)=> # empty callback + error_message = @Errors.NotFoundError.lastCall.args[0] + expect(error_message).to.not.equal null + error_message.should.match(new RegExp(".*#{@bucketName}.*")) + error_message.should.match(new RegExp(".*#{@key}.*")) + done() + + describe "when the S3 service produces an error", -> + beforeEach -> + @fakeResponse = + statusCode: 500 + + it "should produce an error", (done) -> + @S3PersistorManager.getFileStream @bucketName, @key, @opts, (err, stream)=> # empty callback + expect(stream).to.equal null + expect(err).to.not.equal null + expect(err instanceof Error).to.equal true + @Errors.NotFoundError.called.should.equal false + done() describe "sendFile", -> From edc06c82e531233ca1e977dc7c465d9161c9ac1f Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Tue, 1 Sep 2015 12:10:05 +0100 Subject: [PATCH 140/555] Correct usage of '==' rather than '=' when checking err.code. --- services/filestore/app/coffee/FSPersistorManager.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/app/coffee/FSPersistorManager.coffee b/services/filestore/app/coffee/FSPersistorManager.coffee index 00ab6f43b4..9b2b04dfe6 100644 --- a/services/filestore/app/coffee/FSPersistorManager.coffee +++ b/services/filestore/app/coffee/FSPersistorManager.coffee @@ -37,7 +37,7 @@ module.exports = sourceStream = fs.createReadStream "#{location}/#{filteredName}", opts sourceStream.on 'error', (err) -> logger.err err:err, location:location, name:name, "Error reading from file" - if err.code = 'ENOENT' + if err.code == 'ENOENT' callback new Errors.NotFoundError(err.message), null else callback err From 82af1be756f0aff4016fabc5db127c17aa329863 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Tue, 1 Sep 2015 12:10:30 +0100 Subject: [PATCH 141/555] Improve testing of error conditions in the FSPersistorManager. --- .../app/coffee/FSPersistorManager.coffee | 2 +- .../coffee/FSPersistorManagerTests.coffee | 40 ++++++++++++++++++- 2 files changed, 39 insertions(+), 3 deletions(-) diff --git a/services/filestore/app/coffee/FSPersistorManager.coffee b/services/filestore/app/coffee/FSPersistorManager.coffee index 9b2b04dfe6..2ade1f3a5b 100644 --- a/services/filestore/app/coffee/FSPersistorManager.coffee +++ b/services/filestore/app/coffee/FSPersistorManager.coffee @@ -40,7 +40,7 @@ module.exports = if err.code == 'ENOENT' callback new Errors.NotFoundError(err.message), null else - callback err + callback err, null sourceStream.on 'readable', () -> # This can be called multiple times, but the callback wrapper # ensures the callback is only called once diff --git a/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee b/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee index 1668b0ff9d..75a4376b8c 100644 --- a/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee @@ -29,6 +29,8 @@ describe "FSPersistorManagerTests", -> err:-> "response":response "rimraf":@Rimraf + "./Errors": @Errors = + NotFoundError: sinon.stub() @location = "/tmp" @name1 = "530f2407e7ef165704000007/530f838b46d9a9e859000008" @name1Filtered ="530f2407e7ef165704000007_530f838b46d9a9e859000008" @@ -89,6 +91,42 @@ describe "FSPersistorManagerTests", -> @Fs.createReadStream.calledWith("#{@location}/#{@name1Filtered}", @opts).should.equal true done() + describe "error conditions", -> + + beforeEach -> + @fakeCode = 'ENOENT' + @Fs.createReadStream.returns( + on: (key, callback) => + err = new Error() + err.code = @fakeCode + callback(err, null) + ) + + describe "when the file does not exist", -> + + beforeEach -> + @fakeCode = 'ENOENT' + + it "should give a NotFoundError", (done) -> + @FSPersistorManager.getFileStream @location, @name1, @opts, (err,res)=> + expect(res).to.equal null + expect(err).to.not.equal null + expect(err instanceof @Errors.NotFoundError).to.equal true + done() + + describe "when some other error happens", -> + + beforeEach -> + @fakeCode = 'SOMETHINGHORRIBLE' + + it "should give an Error", (done) -> + @FSPersistorManager.getFileStream @location, @name1, @opts, (err,res)=> + expect(res).to.equal null + expect(err).to.not.equal null + expect(err instanceof Error).to.equal true + done() + + describe "copyFile", -> beforeEach -> @@ -170,5 +208,3 @@ describe "FSPersistorManagerTests", -> @FSPersistorManager.checkIfFileExists @location, @name1, (err,exists) => exists.should.be.false done() - - From 9b8f11f517c396df9a26abdbf65435a15fc37810 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Tue, 1 Sep 2015 12:25:10 +0100 Subject: [PATCH 142/555] Fix the check for a success response from S3. We need to handle both a 200 and 206 (partial content) response. --- services/filestore/app/coffee/S3PersistorManager.coffee | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index ede27b20be..15798185a3 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -77,8 +77,8 @@ module.exports = if res.statusCode == 404 logger.log bucketName:bucketName, key:key, "file not found in s3" return callback new Errors.NotFoundError("File not found in S3: #{bucketName}:#{key}"), null - if res.stausCode != 200 - logger.log bucketName:bucketName, key:key, "error getting file from s3" + if res.statusCode not in [200, 206] + logger.log bucketName:bucketName, key:key, "error getting file from s3: #{res.statusCode}" return callback new Error("Got non-200 response from S3: #{res.statusCode}"), null callback null, res s3Stream.on 'error', (err) -> From da5a538095627906978f31bb8467cae777a09da4 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Tue, 8 Sep 2015 09:23:08 +0100 Subject: [PATCH 143/555] Fix calls to `PersistorManager.getFileStream` which were missed in initial refactor. This should fix a breakage in the template publishing workflow. Ref: b70b837b7ba29b0631f3105ec1dd5f6cdebbd195 --- services/filestore/app/coffee/FileHandler.coffee | 11 +++++------ .../test/unit/coffee/FileHandlerTests.coffee | 6 +++--- 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/services/filestore/app/coffee/FileHandler.coffee b/services/filestore/app/coffee/FileHandler.coffee index 16ac56e703..8803f55e22 100644 --- a/services/filestore/app/coffee/FileHandler.coffee +++ b/services/filestore/app/coffee/FileHandler.coffee @@ -39,7 +39,7 @@ module.exports = convetedKey = KeyBuilder.addCachingToKey(key, opts) PersistorManager.checkIfFileExists bucket, convetedKey, (err, exists)=> if exists - PersistorManager.getFileStream bucket, convetedKey, callback + PersistorManager.getFileStream bucket, convetedKey, opts, callback else @_getConvertedFileAndCache bucket, key, convetedKey, opts, callback @@ -58,10 +58,10 @@ module.exports = ], (err)-> if err? return callback(err) - PersistorManager.getFileStream bucket, convetedKey, callback + PersistorManager.getFileStream bucket, convetedKey, opts, callback _convertFile: (bucket, origonalKey, opts, callback)-> - @_writeS3FileToDisk bucket, origonalKey, (err, origonalFsPath)-> + @_writeS3FileToDisk bucket, origonalKey, opts, (err, origonalFsPath)-> done = (err, destPath)-> if err? logger.err err:err, bucket:bucket, origonalKey:origonalKey, opts:opts, "error converting file" @@ -79,7 +79,6 @@ module.exports = throw new Error("should have specified opts to convert file with #{JSON.stringify(opts)}") - _writeS3FileToDisk: (bucket, key, callback)-> - PersistorManager.getFileStream bucket, key, (err, fileStream)-> + _writeS3FileToDisk: (bucket, key, opts, callback)-> + PersistorManager.getFileStream bucket, key, opts, (err, fileStream)-> LocalFileWriter.writeStream fileStream, key, callback - diff --git a/services/filestore/test/unit/coffee/FileHandlerTests.coffee b/services/filestore/test/unit/coffee/FileHandlerTests.coffee index 74ca9975da..e67a8d38b7 100644 --- a/services/filestore/test/unit/coffee/FileHandlerTests.coffee +++ b/services/filestore/test/unit/coffee/FileHandlerTests.coffee @@ -135,7 +135,7 @@ describe "FileHandler", -> it "should getFileStream if it does exists", (done)-> @PersistorManager.checkIfFileExists.callsArgWith(2, null, true) - @PersistorManager.getFileStream.callsArgWith(2) + @PersistorManager.getFileStream.callsArgWith(3) @handler._getConvertedFile @bucket, @key, {}, => @PersistorManager.getFileStream.calledWith(@bucket).should.equal true done() @@ -152,7 +152,7 @@ describe "FileHandler", -> it "should _convertFile ", (done)-> @stubbedStream = {"something":"here"} @PersistorManager.sendFile = sinon.stub().callsArgWith(3) - @PersistorManager.getFileStream = sinon.stub().callsArgWith(2, null, @stubbedStream) + @PersistorManager.getFileStream = sinon.stub().callsArgWith(3, null, @stubbedStream) @convetedKey = @key+"converted" @handler._convertFile = sinon.stub().callsArgWith(3, null, @stubbedPath) @ImageOptimiser.compressPng = sinon.stub().callsArgWith(1) @@ -169,7 +169,7 @@ describe "FileHandler", -> @FileConverter.convert.callsArgWith(2, null, @formattedStubbedPath) @FileConverter.thumbnail.callsArgWith(1, null, @formattedStubbedPath) @FileConverter.preview.callsArgWith(1, null, @formattedStubbedPath) - @handler._writeS3FileToDisk = sinon.stub().callsArgWith(2, null, @stubbedPath) + @handler._writeS3FileToDisk = sinon.stub().callsArgWith(3, null, @stubbedPath) @LocalFileWriter.deleteFile.callsArgWith(1) it "should call thumbnail on the writer path if style was thumbnail was specified", (done)-> From 297ad78b1eb01bcf6cff0ecb95808d1c9dc3a47f Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Tue, 8 Sep 2015 10:32:41 +0100 Subject: [PATCH 144/555] Add an acceptance test to check the preview endpoint doesn't time out. This endpoint needs a lot more testing. --- .../test/acceptence/coffee/SendingFileTest.coffee | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/services/filestore/test/acceptence/coffee/SendingFileTest.coffee b/services/filestore/test/acceptence/coffee/SendingFileTest.coffee index 7abdc22773..74064f9693 100644 --- a/services/filestore/test/acceptence/coffee/SendingFileTest.coffee +++ b/services/filestore/test/acceptence/coffee/SendingFileTest.coffee @@ -112,3 +112,14 @@ describe "Filestore", -> request.get newFileUrl, (err, response, body)=> body.should.equal @constantFileContent done() + + describe "getting the preview image", -> + + beforeEach -> + @fileUrl = @fileUrl + '?style=preview&cacheWarm=true' + + it "should not time out", (done) -> + @timeout(1000 * 20) + request.get @fileUrl, (err, response, body) => + expect(response).to.not.equal null + done() From b8042ae771a8f9256adf83cc92128c0d370232bc Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Tue, 8 Sep 2015 11:43:19 +0100 Subject: [PATCH 145/555] Add an acceptance test that uses a pdf file. --- .../acceptence/coffee/SendingFileTest.coffee | 29 +++++++++++++++++- services/filestore/test/fixtures/test.pdf | Bin 0 -> 35938 bytes 2 files changed, 28 insertions(+), 1 deletion(-) create mode 100644 services/filestore/test/fixtures/test.pdf diff --git a/services/filestore/test/acceptence/coffee/SendingFileTest.coffee b/services/filestore/test/acceptence/coffee/SendingFileTest.coffee index 74064f9693..0b946312d1 100644 --- a/services/filestore/test/acceptence/coffee/SendingFileTest.coffee +++ b/services/filestore/test/acceptence/coffee/SendingFileTest.coffee @@ -113,13 +113,40 @@ describe "Filestore", -> body.should.equal @constantFileContent done() + describe "with a pdf file", -> + + beforeEach (done)-> + @timeout(1000 * 10) + @file_id = Math.random() + @fileUrl = "#{@filestoreUrl}/project/acceptence_tests/file/#{@file_id}" + @localFileReadPath = __dirname + '/../../fixtures/test.pdf' + + writeStream = request.post(@fileUrl) + + writeStream.on "end", done + fs.createReadStream(@localFileReadPath).pipe writeStream + + it "should be able get the file back", (done)-> + @timeout(1000 * 10) + request.get @fileUrl, (err, response, body)=> + expect(body.substring(0, 8)).to.equal '%PDF-1.5' + done() + describe "getting the preview image", -> beforeEach -> - @fileUrl = @fileUrl + '?style=preview&cacheWarm=true' + @fileUrl = @fileUrl + '?style=preview' it "should not time out", (done) -> @timeout(1000 * 20) request.get @fileUrl, (err, response, body) => expect(response).to.not.equal null done() + + it "should respond with image data", (done) -> + # note: this test relies of the imagemagick conversion working + @timeout(1000 * 20) + request.get @fileUrl, (err, response, body) => + expect(response.statusCode).to.equal 200 + expect(new Buffer(body.substring(0, 8)).toString('hex')).to.equal 'efbfbd504e470d0a1a0a' + done() diff --git a/services/filestore/test/fixtures/test.pdf b/services/filestore/test/fixtures/test.pdf new file mode 100644 index 0000000000000000000000000000000000000000..b021cc12b36b2d80cf274a12d14e2eb81e26298b GIT binary patch literal 35938 zcma&NQ*bU|)3q7fwr$(ov6CHZ$9`hlwr$(CZQHh!|DBq7Yo=GGVtes2%M2r&F zhE678CdPLEOke~AU>uzQCWbaJZX4_~{qloMa1eW*xdWp|NT|HwnL3pIZ1Ra?@D5mX zby9`mHtxUev^qwUEj@yr8~l7qAQ$n};$^vg@yyad@j+SG7tnfg&E6a~dR4GF)B77S zj@q6qpS6Zr8Qzt%FwjVbQsEj@rc<(ef34mfPaYlG2rvIKxy|D+jWkv3Z_F9YaU79lrrDM=7gq3OU7A=p?rMKU5_ z@#@s)%I6UAwne;TXwXvB?(_P;y>`{Fx{WJS-Y-5c0-m{#SB*Dytd}@<)Iv%eOloKg z(F$?;LNJO8>TKv}AYfoY@8DoTckAo$`?gSP`Yv8B##kHZF(T66k@-O({@XP+69%nT z1spKY=53@v!iGYMveJq=(n3H;C`fT%kzpb-z!L_DTxfGAkZg*7(r5)bQ6|=L;aeMB zJM~U$CH+tbz@en2BOZScAknf6%(=j#z?%APUzkcS(TNSf4EzH63~hIQrTVFjZ% zz(L$SJRSnGb>5ZCdAS+%`cQ975zhv749yX-(B76=@<3ezzSnUC1$Yh25naCr4gDIN zAHfO@`;vL#;+>j(-HrJr@e&pWf(dN{ z=3wIGLix?TS~X3(CVcz&nzv`{)7jJV3GnNyo!wPPS%nEKh=aQIJ?o9dDl)AoDJdI$ zG@JD0MMqcC^JB~{W9p+Mqk)2gfsp|@C;;)>^?f`G*!Nu_zEh~;S-=4Q^P_b0Eby(? zFq(_A_vI+m=J$KDAgoJ^31R<3>I4g{1U&Yfy7$|-@Cg+e)J53U<(% z`-nL3JBsKMK-%^DPltAO``QDhNolpw?@PY|`YLu_O^i72_fo|bRDU)AWC7v+m196x zZC{V6jA0Aj=r>g7rw;cUhYbu5dQrT6z*mY)|Zg`e+N zR~Z~ySlCxCJ+2H3NTRTR=M7|483hF$+zY&R6dC66quK~)C|0apHxy`f`W?81itVQs z6&({o=%QD)H@=7rDBvsFXBjl$2Q~tP=uJ@ZZX@yw6AetLtT%tmx#TkPR^JvGDCh?h z4OGbXH{bwh?3?pT!4=7s?Zacl_wu&mx%ciC798R+K=`hdQW5D~G4wS*T5yME&hw!f ze!|B{pH*uA;4xc*vppC2kOGk03#TVWNK5wOMU<_rw$InWIp}oWsiqN*tCX9!YX9># zz@z8;uNv6bGOfb#Y&u0m9P*GJLSWGIGL})702h+mXxXsEyw0qnO$O=GH1YTDtL#K= zUWsj!P@uk~Rga-W%3g_W;&q8?goMgBihuHCnF}5^2A7bOfO9!$GC2){^GtbqsR2dz%&w=tOOogD-oz;ldLfJZkqq9+T!8fV!;a#n- z`;D47laBL|L%$Al-sKVgo^ba&BxEVht#%p_p3rs@8^M_AsV=}1{5qYr$brD7m*f0~ zjUgkkcdRQSL>X)r2iB46_q^oan3)0E0f3&Vs(?RGDqUL_QyIS zFi8UU!8G^89j!7>_(l5>Qyae)&1UTAQ>KYIP$~a*coMd6k5ejs9nYy5*(LOyvLi}4 zFqt&CPywn(Xc9etmN2#q6aF@6yY<1VKu%em56oCieFxo3jm|nDZV_jESI}#&pkINU zLz82KD$(dq<@RHAu`~mk+@O?p0qU0cJ5RVhc@Vp9&B#>#^_oCuX$0l)S*h9k@~hfc zq;yN$?DZfm#v}#Lmvnp9NFhci&*ddtx*VZpupO##EcMteX_b`jCQS37vz2@VREBZ4 z4SW}D3Z<5HiY;Q3V1kpGQ_MBritc%gXXRl`&fY?Tb1P#MJ|e-jIziSGk?1m?;H50nFCrj0P~Kb}i63L>ZKVB8o|B52E!n z)d;a*&4Y`PL*djlRI79f4d?TZ6freMazUfa^kP;X{t(4N5z?Ju@Mo|}ZLZ);gP>1R z6@4$=O$xr5BJ{ev?xtlDS;^h3jJEVOtf#%hWWk-OuRa%R4+~moiWD|eDl^8>DIOI? zK<0HPQURXxYtgH0Z{lJ=5mA#*9N*{3==pp~6D-Fm{{z^GA;w5M z=y(K{w~+dl^=|Fmu}&8GYziV>)CkrpU-K&levAsbKI`j&Pj0`SczSYs2`o`<5A zh9%}i>}j&o3cx`2)y4&#ujp&yR(Ck0HhMtPJ^56rFx=1HQ|AGRod`#ranDqAi__l2 z#LD9nI(DV9=t?ke(C*8;ixig&(wcAM$j*o`o1!fh(ft8NzlD`rKl><8wXT>ky|Sm~ z`$@SqD7Ip`Ay*K9WO-ltTu1xtvCv1!DSf38v>Y#E7TvC6HbpuU@+;Lxgk_wfnQ)ut zZv>fHOL9}n;wtYn}m?q|}Q(~*y1xuTW^>cj8 znnzObRFwn<-#{E19^%Aq0~+O^Z6fKJFYDMjRcidx>IwYl!?lAG9OcMzW|>7gn2e7i z-ei+=%tff1jd~xC@=!Q6F96&O|G{W@v)1!zvaX70ZD-OKZ3mKMsU*45KlnM-eoj#3 zygb&vnDo>F*j|g7TMLZ(9imx+nxeXT#<{GrS2X7OVs&-3ZV|GWe!6KV(8bMQGxzQ{ z-uW^K6C=L?433#<=U7xoBo8;Q6O%CPLIG3`z09LSlH6@{%Y;CZq~vky~imX~yC z*c@s(+1*)jC-gCWw~fsvuq{FKkjl4U2#xf_BOdQZZmFfxul|v@bC5|La+RS}zN^B< zSb?#lCs^j)>X1B;x@Mji54C_qZxQ4vm)yd74*S6^@@@*k*x%)#1V7$Dr5EVK){;(f ztHv%E(pJ9rziwhQYZ!oIiOZd$v4)iX8w)_@mbq#;oolDX_F(D!Q$b{3hyxQP$!Q@CQ{hVtI zmU4braYAIGpIt9q*aqm4UQM z`FWun03VMx=)u$}r9Zv7z0=-yr$S1eE1V@!)3@}7A=auvhE^4XW4E6wu9M+|IA<7G zF+P$barP0jQUR=@b}#pb+}@b46RcIl_&*#&1DHPa^Zr17R`-KWr8|nzM(A(T#d-zM zXOu10%V6l81uD;z*tKx9mNm`7B$SOXFHoxSgLb$Y7U0#Vd;%w%R(F89W_2*+DRX&6NL`zbyF}U}@A`Rm3lpO6u#ouV5TRdS3FCjoat9pD311rE&DgjygNA zy?Eu4<)!N~{~Hl7^-YO3OmKY{Vld_pxB+?=XLS!b{;A9d955%y=C%AaSB!7!GZ)Gd zJ;XPAwgolT_!#q{7E{#l7O;H}B8)HR3^|`GoVbZo%F_;KY+KwjCS6i_nIW?m&K#9$ zq-d&XoJ>hIK_I)zeXQ5{_~Me{!zJ5Mr`ZfW*qk#%jMCBH2SOKr@EW(4ZA(QJkNSf8@d9&6#zFt{b6_IP{COi zEm~`uQ&#Qdxde7NCdd+}wUMyk$9?JgqVXE{1@$*xmT@F({3PM>5U$ zS0OP+cE}ud`qgAWB_$m~$r^k~&EM1w^N#Xbn&aeMy=W`lI%^i+!zew%B>m7loT=se4{^#|vT{<%sDSgJs#f?#;VX)5y8ybqz%4NTq5^+QaRNt zD^gd*Nq9_A@HE&C4Bw_okaZz$PI#MXqs+Z@24sFd@p3 zhL7aDVxfDS@zb%m$NyX@C!trT?azt#xcwUM^EAuzGU2UexB<(%uoblxeAcocD0kTV zX#zEJl+sK+pRet4n4(ATrJSC<*C*2ZSUD!==cU~@#x+kj3C`GS2qGm7gHl5u5bV8uFu_59C} z@o;~pNEfLodslGp9qxB~HS)eF(*X_vyWTgqrTc*YhkCzA zaIZo~N^N8Sc*su~KdweoQf!QW$VwMyVBqq9ew!URtm=j%&MaH|IP7>Ygez==%6WW? z=zTsIQHA+3$FtO@jO~C{e%Jm&o8gNHa7M7s^h)XhRi{9`6#^MEHmoX9p68yp?{cD- zi6F?By{zoeFNwUczrk*E--m7$^e4HHYCNH-HVEsqh+7)bC?+QQ9mVojfD z_2%6gplLh(;z7Ig*HfRVijcu|i*8TK}o#HcvXSm?T7{ zH3qpuD;jL+b(8(!XL)>v_u24i>l-4J7Hy-SUmaGmaj9&u>BE__g(QElg9>ZZ++JeC zrrlwgc!J&?<(rAQS$0Ry@t&$gfE#q#Yvr&dI;vpgkWMA8X?!U-Yzt?mM26V7pL2i!cU zg@IcJ7Fu>JseZi^KpBl>C0e_5*t0KDA6z9B|5YgN#vU%sjr0_@sgP?Uo;f>`|OJM>%ohPolIN>ltwnh0&r6H_y zp599x`|KgzC{Vd!27VZ8o1!!Ut5|R%eWHyD;!nFJd=t6WC=kk#TfPfX+Eu(uBO!>l zKI2)7`pi+7@of;af`Dk;n>jP~RP)`3THjU+i1JyMY^M)o zpFK6-e=sRyw@G0YNSk8O?&#AgIqDQ)WQqHhx~&}%?#j@SU&8C4h9+6>Ttef6b*pC^ z8eBanJBCxm$%+KkEdr>Q!Q-kRVmoWEkQK+vzWaL-jJ-1Zj*jnA0nU#{J1d;=9(U%l z`_xHO%&fc~(*PuOg!yr60T%&&Yq>;tN^xNJ7CT#%x@n0qfZq-9H{%tskymR5>Zjaq zJcscuwUPAt#_QSg%TRL~{C3r@kz$Q!#Wa1lA{YPSr1x#nR5qJr(oorOd6jKI zoxF~El~i%ZtaFCKa7^N+%-lEK^O$cHz!>&GeH(xPq`&4i7o>8l8kVTeT4coY#jXv* z8S$$5dq$1PU0B$ijD9-`)7uw_guJCD>o{V0-ekvk(`E@`@mkHRvU(3rP-QY%5SyG9 z8_bXPJCi+f%^aSd^$EOuG}TVUHn_UTv-StZAN`cV^7fc0*qk5YtZ26r4)_+5si?Ks zjwvXhQ&ZqcsN^;$@PsvSx$gL?@W0Y4-YHP!XCwvZKB}nMZDUWVjnIb*uryCX<@(+W zHu?r__#Ju&C;s-Ao1=D(=(SjB>Wz+?m7}i6G1qJ3Oa3D{w_de9FG_ZQZ72fZl0I6h zItqQV<7s%d==F{_!c(_+4jy3E4nYdZ;A*8xP%Y?Jvn`iw0BjlTtYzN^(CVa*feXt`ZIc+?nz*)_riH539lCIb zu9%1ot-Mr5Z*<$wCOz9aSJ?50;?jTiv@4X4h=!x%?>HM|Yp^0h$c~QZv!bDBtlO1S zj<^YgNlm!7z1`QXpPiC-c0X5X)QMIQ*JB%7h9tFVCNPLdQGAyka@r~}uiGnJ)mqK8 zwY!|QxC`9&;@Kitq!>Z&Hs&e;X%JTQNv~2uJBk#SAv! zrO*oN8jpd#$&M_;4@~%Is37eT{$A1rq&r2_mGES>mZ~48XQ+5xSosB8wG+)V7|zGFq^@R z8NYPpf2tlqi_*24ug!=f`y6l4F8H~r3jT?FWhTWQ6xlj>*J825z$vGV&)13M#j9P! z^CMb=IX;yT1kGJ7ugLO~GMJmri(|*>L!N8E`FWweO}R4Em?YeLp78Y$G;u4*;KMhp zX(*$E9qus+>clf)ht~uUK-2PRcxYF_KU5KjU+yO8Feg?~;les7?Rh&49>#gpvT6fz zpavP>wUzRb;g@MK9PaOTlGx1zo5V|9l(zz#ZJfGj(_*Oi9Po=SjyOwN~ z%ZAV}eTb4Q`-J==S*3#Fk%Cul|4kE~dL-JrYi9`v~F0pU%(XENWfkL)Ig2K zg^!}$RF8ECn5iG+)qd>H4ze-2T;*vn9U0O#sma08^>yJkA*Y`h1-RJVBrrlnRwgy+ zW1y)xu5Gaw4!mPd+{va@KXz!5Bo(+Yn!yygFJrT9I=0KFtx4_5lZbK+y(3!S$D*n! zeAG@F1U8Y&x8tvJ!ghS0ytaUaMn(aPt<#Y#4yYmFQZ{&pxy}?GBy&Q-L&%mX z?oK%E()ZKqqx0mvPM>mHYT#=Ed=o{Nk!;rmMGvn4so7lI%#MeTOSZRUB&U8bV9yV- zl83*t9NhE^veqK_H!rPrxo%iP$YbcT1(>5D77n zA&Ec&mxmUBM39JAh#(;qVHQgx$L(Y)C(}^LO<+#8zvg`YT6ynIXxY8wxSPIA<(#f> z;Ym;o(9D=ia-l^Jz=(?V0sJz4449EPTd6@JgMOI<+ot!i?7(oyUkQQO z;qXF+H#tj=81HNF=4`R(@1Ubu7vbRHO z#o1s53bc0~Jcxbwgudgx2Sw7Rfxqz=oQDnplbw@?;^NpN zq<%x&s{56ht;PI~ z3w68eg4zVTf*9ft>IRCk4<_k;Nb03HV-M#ih{rU)0t&~!U$$$|y$eLVH@gjlggn-} z@E!h>4(0!W4(A9E@pKzd1cpkY-3t&J02VFFfm$J?1?y9s5%gy&;>00Dd<-UzW0K_C z(>P`VsxX4lyAQ`D=yd63zAqk$6TQgd>4!lY#(Iw*FT`ku*V)K^4>ob>_2Nhmlp{P zgo`w*&X05p-5=zemv;xw^}Tx(kudNMvZ~+hMi}UBcDGl(#m$4eP-)E1chGlQV@YgV zc~x5N&UN(np}eBt+n>k~nu&;x0vduyP>Ld2Kj@1!8X4}sH0n2OS)x4;czt*Ip!WPW z!uuD=?>3g}&ENNgMd(ly7V_;!4uD%nwg&y{|CL+*Lwoc)dY7yD!)Bk-)PFe-m5$Z$Ih>{QvqPAMUa5VSh^(QrM-`=+k)f@nKO-6XGhrI6gCViZddk6T9HoP&3 zr2HBXPE4c7J)}(~xKB0N${xL{1EXoX= zgwG2H{%|FHSXI+=t+o|dfY=P{ZKIQslydTvBKz2cDy$cu{> zyAR0IhBbPPkpMN*&xmm6WOZQPHu^{YA-~N#hFa8-QZdLxp!%2T(RQF$KHu*fJ9zF8 zd%u#z%IMrAEc3oBqXn?LGtL=Td?hJFcH8XACPRHJ&ffHbB`@010P*RDW8MB{!*#iV zi@Cf$;{Aw$`8rCc(jZzp>yHdW+xlF$k&;K4tk<~Be;b5YK)6civid~V67;uA@j}ta z;W-e>yiId-T6VIyQGjB^@RNZ}yRX@z7Dnz}Ac31WM4TFI`@6zp3hq4#5n?xi*p~Fl zWIbUD;m3oqsK|13Z+ZY~nQI|-gX4ftWMXgY{`xRG+lzwJ0X{U!yeG>kI;9tIa`T8Y zsnJ&{L1JIP3rqe)av=_UuTI)j}Ve>N2{%1#d5yFpav-!pLmpMwD(g zRo{fnxYeqTzEg~Yyv)#f+x246+xGWAM_$&AFZ0qNM)l^;9Z8k*v8>*mEjZ7WE1R^E zaunbF_x=O(O_R~f%#w;8JOcJtYI<2aJjNGxLBV^}0gFn&(hcbuCp|vzWO4YKp2Y=+Q`dXjeo8jBT>r>HlqCCe9=-Ma-D2u7Mjc|LpID@q51{ z8|{a6pVXratF6q;h1WpXH^$-OG8!JeX}V*@J5|!-BR-%wkUGe-{Ug*V@VfkR*?`*N zN-KXqwdcNapzEDY`faAZm)ia69awaNO&GI|*8+^mG~3mY3y5Id<|6V)v*7txOv_R@ z`Bt3SD;_4?D9H(hQRyP1###wIG=jr4?Mg16%Gn_|g~Vt5)21rD85Rlqe)xDlskM(5 z0)A;a@{H`J@A_p($x5AQd&?Dz)p{v9+R&EL$^P@chKXi|e`s)nK9-%+Jq6?aa}I}P zZW}h9MhrMTwlb(jj!@W>SGENISs1d~Y^Hk$*E462TNCZ--zbSxrjt~+DK0=DWiApd z9xtD6?@~@8OD@U9LpMF@`jAZ#aSG|EY0ZFLK?P+;fnVWnLQ#LYN%_$<;XjgNnZO;+ z#CzS^+&VG|VW|XesYB=-B(Vvw?JOZmmFA{QmOJ`&?JFI7Rc_d2c!|rz^~ET7!Ir!X zL1QV)0;sX7niY!<}O=2 z+07IWZti-7Z^Xbj(tvcFa9)}d>t!XnVErs6jc&XA36uuQncuyGD5+Jp04XXnUaymg(w;Y zcs?~|Kg@TxX+r9t_fmLJ%O`IxVO@AULc}K_rw=?BLsPLL7i-fqCl!v^UWO6$^Ya+k z(U_;SgrL|b#?WHx#jTsfCEEo$4IPbz(@{I@t6=%rrX9ZS$vJym(R;52oF85jmug}&vwdP|lCw*H znyx6Hm1*Xz&n^{Ps^fcYz2l9HmWaeu~> zfphRCkLy2Tx_gv6uH_>gX=U_8iob~3q*k$~z^kmWMME^ckAAmmDyo6$SuLxew@d;) z`{$jTBJtxm*-0&e^GZ|!O3B}mkqBy5#p(qR+_s|!S;@78XcI_qTO487;j7-_Y5a7# zt6wH$;MfTKC#1xgf0))HJ>{u-@9O*BuS_muW8{-7gI1bfa6aZ@quxEYY~GITo7 z&TDgo#H||E2Spo!vYI%MncdYH6i#}+d7cj9xR{`%LOsCoszVy~79DVFEksmyrHTU?S+W^a3YH2K(#Rsv69yv}m?j+p66141wK<}F- zl0EOGs9DDb`kZ-j|3E-~pQqR=sfE9AF1}5ThuM+E-K-lyE6R1!^uEnoZ+Zv_WlK@0 zi^Ciul>Ix5h3^acEDAl%2H<>|L2(IjCe4tfZbosVtw4je!=`;zu~c9%Xn#zZQDM!f zhJ7#Ibl=JP*ZEw?GCr9upnV@BJE&DDx7qWcWh5ebvM8qXQYIA}?b3 z>J?N$m|P8_O1NXL;{6=IbMrjLvAT@W^^6n$Y2v|Js?_k^lsm{7UzoCfsVBGP`NZr( z13<3aBb;L&ykNkmI@8)kIN#EGzhs1f zaA;4A#t7M?(!)5@27JnoMBDx;3h>O~aWF;BXWpp4BA75!Si1NALgGwMo)E&PvuN(Nqd*@-WU2 zXZjVWSbmhL<3kRh0}@Xl%_z#KO%0j0w)2kjd?lhaj*L>x^YU&oVe+yhtB-IuuO)}~ zw6FWH{(e|Z678Pme5IQzO6nWVR9$A>vB#K7Cr@)+%Dz5gUaK{4A5rO(n6<^L$^BJk zGRUoCEeC*-x>ne9N?_wk8g{3Nkh)|m%ehf=k3_KcG%?6J6pLw9GeV2_^kEv%kO6!4nHE^B2?BVrs1+ zCQ{7mXPCx>QJuH9=pJCcS~gaMKB;jGji$>?@m%rU3cn=lMoT@6JD4b56#-mdM*vR4 z{!@=q|Bct3?Y-1QYXex$ru%SxJx0Y}(}TChxTqMJy8hF@S*dNhHkn{49D%qRXBW+% zSdbp+C>r{nSy;&$TLI%Dj(W_P&UBx!x$K>)J)pNl?W*x3a;g8tBNB*MwNARLufS<@ z<-D(_bUL_M(&fynsd3zhM&PC^Do6JoWE}5d8HYCMBGeH3&OAnI(Y0+=Mx6lR8A{r?f?@D zdt#TFlNLB^QJg}zRe{TpcqhdM>%KYPv++_L7qkM0b>x<7`D}g&+mAd7mCtb=whY+6 zW2Ml&VJ`)VZrFftAw1_)rSxpJsSQq#6GHI2^c;Lvwbk&bjxK!OvR5CNbiGY8m;o_J z@au_BHl087WuTq-O zU2-8&PMEJ?ZV4_6scThIu?^`xt#EXSv{b{ z05UYD<*0R7{L@_70Q)m?cJ&3r?FVBa{Xqu#K5Kov&k%M<@qJ%M?`WDE6U;PAe{K zk8c}>k0a=;`6vUNYt}#LL0K!dFSvtcFBUz@o^6J2X6j|{nXI+IC?3P90#>ls0~vsN z!rm50iGSk7R@)8xJ$5bUyz{bs63j?#=A&(QdhE=8yiyLkDds4^x5msffs>tyXpsRi(ay&&TkWPe}PE*Q}b>YIbMYVh~w%tkR`( zrwQ8Lgeso83;Xx&6Ez*Oi;@d^tltZU%VbCIh|*Q&15oShB1X5R_;Ue6`p0o7s#V{J z&cjFwDE;V|S(R0|n3oQQkonaQEkE&tLEDPiB{w}?|CptE^iMqohskZCR$HX=+af2n z-DzopW+&@b){9%VOZu;)i7jcTmS?Ljc3+QgDCM~ubvrbL*kFsw49dq=gB=iq%-;o< z2kqatpKWsm#v3qRGQx-q$~O-n1{3^-d%Yl?)P71&G(wFuwX|B}OSWcPHDul9Ct{1Eq&fHBXe@UX>Kp*o2#c!~NL%T^}h5-WG&4GF--X zUNAkCXpqv@F2}Y9yMd0lMDtPFgV$F=M*hn~aJY4E-l+Z}WwhlkB3;S_m~0kgyeV(6 z(2RWUpfXACNzWy-6fIO2{3lr;$-i_T(Ok)rd6hHZ5y+?OE*RG0jE@E!Iw5kS9=fzu zz23UL!&U6D`wD-1Wi9`M@}~rsm;L7bule2?3OU#In<)be?6f<#!$a`;@kn1k|0-jR zcU`e3q8Cm6dY}c24O_}6aAR{@Dkx?QI1Zyz`%SJ4m1e|xi$EOJlE+4Bra;f@kd{N>ukMdt2F8W(O7O=33cAJ2tnkG=C%V{hUFipY_HIm9 zYHsLv8-1nCJN-I&EtPTH+2VWXB4Geq;w(KohhCB1HOl)#e4%?@w2F|~hS-PnV-mVH zHuWUVeaj~sIPL6Q(L@G9yTQHm)n*5T^2L7w9J?y;fRy$kFIw3&x$3f?4uR>v6|ZkP z1;4{970NvQkUQVefh~oKRDq4{B|B+xdoMUfedW0&g4&C@it0|M8g=%Le;4ec)c6QzuLXJ^k$dVaYHC<;cEMDAx>q)M;iq{AMb2UEI_BSwoHg&%T-;yS;E*1l4vo385k6X0!DvE&l0 zMHeB}v!KS0m+IOg`~XlDTP=EJ@{d_boNc!pcjUHX&+(jtw#tDfEqdfZ)_Gcw1Nu zW3qiY6b5I-XPnm0MH19nImWxqbR1J2o*DgkMpEi=EJdWBazsVE|rNtl?^X4#sHsPHA#5;olbAR2O} zgE<)OJdL$(B&#;IzlS2N2WO6&z&7?!-}cGyW^-Wp=PhmPs(YQa+X6CkaF_)<<6mXB z5|6?BUpW6kl&EfV($N3mecq?uv@HyEmpM3e)h+jcY zPX`v8$#(PfREu<6^#`T8(iRHvvE+K*5rIA(k!^D|EO?m8_g4)o#~dGQms^li_Iuw1 zKTKvqz_V6RBs9#2BGcDGqdI72*%)7A*Xpjy3UbactaD>%I>jZ(KB__414H|eF(R(M zogbhmV-#MQ!*el_+o=14!+_88!KIBp(9CRDuHuOq4~hy6+yLdP=1qGZF__$1=!DdO zH45ps)i5o-Rf+qLHz;}Om&3>>au#Iwc%P*0KldrtjqeLv^2XUL3<{LNcNJg3kYjr;7&nO^* z!cK*9kft&{JPg-&ER!jV>_amvWzNC$ap;EX%Pz8C7CZ`<)E9T$o=>~Yb7b>usJGT& zfrjJXN&QLHPC)9Me+TtdSjrZ|#am%1X1}J!w=NQM;S#=32i?}0?HAJbjSd!nlUcGo)=cD0Bi@fRwbH%>T-#r+o|ZL(U`$UuT|78X;D z#W(admVd*NGMIuoUr_)Pn_rz+JH};;6%+cjx`m8po>uX5g(EL&pe6;Xi=9m3NdhOn z<@j_i5db?*yw{9N@?_FA-SyK^>TKA_e$8<^lxbwrrqc5uLXg0s<=+$vU~b<j(hxql1r(3{SU?(*Z*X!v2$|# zzs4FDI~(VJyZ>*Tmz$mYe{Za{fGcL5qcB9FZf!%jV(A3%lC=d1#Q{TL{qr-lcXfql zAa1jY1L}uH+2TinLK3kT*-O~w@ARDc)!5cnVSnWRc6wdizFo9~XYEd5Xst?41phk@ z4Rd;O2pj==wCw!$1}u~Zgord2J3dtf>Cfx;%VNxe>t|SlL|OLMi+5ybfb=zA0u&(l zQO+w2Y-x)CBmfMQTgR0f#)Sm62MIF#b9e@Z3Q6U+MXV0wOyHM+igpz-PIr5B1s=Gz zHbAKVm@5gCNzUu1AS)Ym8EfxXMT!Mc4<-zJLDWXSc7Gq8LImkolSmE*_xe$bW{uKf z-`2>*SQ{D&i8wZ&JPK_}Iy?h@=htTg%wwRzG=*;s{Z2_6DWppP)8=8Z7t1R?tCZk!X&m(85wk8tsJhCR)A0CYsuD4{t<7Hz{RiOAX82>=2s{WZB1EJpCW{2* z0_?|6Z{zM;y7~x!_!WNm<`8t*J3YKQf~)hf2DyT)hYIdSQg1_s9Jl2t_`A^`(?`*y{kM>?vd@%s&`5UI`v zGV;av^Ha1*X+I5 z;>#CS8Q^ID7HIkz`1?CYBpDXCs*ezx;T$M&1p%nASy{MDldwB;qV_iG=UX=3r~ z=GZ1c^MDBR)yDPvEiGPFoVa`oe~5_!;b*`Uhe>g#^yf=N1cbuhh>u9?t~dmUs?VXsKnF}XeNCX$?r9k}Tcx#FNC zJ2(?;aC3(8ju+!sZ@-RrERYT2KA~$>BRYEra1k0MLMJ#4at0%G7N8Dm7Aeum+%R0Xtmmz6Y=0O;0dCHxHJtr zyIbF)cRtQ+iC{vp5V%^2&7xf9dGa8zhQ7qmw#E;&GxIt)8=qB#Q!NXuho@|e+D;;w zZaM*5Y@D!o0^nAj&%{-^LzPWsmb*&{x8U43aKn3Zv`sl+U2mg0H8_;gh)d~NSwW<4 zSDQRn&Sm2cgVmkX3Co`drxUqLo%X}p&df?7H{$N~ zkErcnIT@A|?dWa8rw3>E6mRSog510@vb{YLWG}fBzY|GS*hz)HvSWC?M2w4AUo$^x ztI;+DMId=pMWq|HSDRU}zXm1L*lnG?IYx?6GF_;$9GdShLXwULZ+RRav6?B@xz%ml zkTl#%f7!@8=E($$D&MESD)sbzCg-RkQYT7FQ*(jgzGlJY`L=1A8N8b1UzITM1JwBgHk%luQh4u7FZOp^*-c`4%3?p6vaATh;Byc+BK_i?l-`H}zf2vyXg=vKPXJ45LhxJ#TZ)dI_!4wLcDcdiD zs>#RH8JEz0Dq(Zrm^vi$>402LlyY=|gN=S>`iU((!`d7!rBsx@*@QE{sCkABJb#EN zkK1VRk|+VM7w_Vwj`OThim}xZqS9@*%+6Bu%6V%VhXDaoiGaeGKbR@}SOde0ZY47s@R>}6K^lzl5X5_-8cCJh9>s}#fuol) z&SbV4xzuFt>^)06;;=tM*u}@95h>=N5h&)v%|NnRNm$TlkI5bDyODWZbc1cYMt5u= z0}?Etk)&-N8gEQwU1mlP-7b!CrDYaIzf-l^b%u>M%h>wiuP~TiaNq0}pf!Dpc$bR| zC{u?Gu{BAo^PWhqMY!j!@=8_0VJhxRQQDwP&uOaS-5`4Eav_DY_Ezl3H1-t>UvbaW zVIyEg$SNbj7H+u24idx6&kg;UO3f-nNhOPSarL3@bGjUjMH3gVGMnqRJRJd?psTzn zi{N5xU9#K_W@%(1%4j8>ZVPDig^=CzZV^~cFT*8wC{T9hbfNmla;O}?GqQaI2=^Hg*{ zX@KLL7-8a4HnAk^RY|K%YmJIndEDf%caR^BtdNA9%adz-TD6hG#mTh=?@4%fNJ`N2 zn6F%$b_p-GrPGh2S#u8Wllm(=c*F2qp|xZ>By{m}>*HsM+8pS!3||tev3mTgIH=Nv zTF&xdZS&%2!K5tN#MiJCGwWufN-JNct-P~$IFVJOrQH|Sp#$g~(%cCfPHLe$a+ zmq354L;5AIsIU_1eN!w~kmzPqoefrJkRod&fWitzJMXQh=fmt_XX=ipqH_sAq7`I% zy2-)1wsEa%o-Jo;MDKcF*cmz}&t)uo&{a*)+%22zz> z;VX5l1Es7yEDZD{gC5>&My*=Xl?-_RRaEu5mb`eNO-=kX@ZgMNTjkKuc6ypMji65u z=DJ)Xo0#Hgs1jBBRvIBLyAbU7}(N;VY$}pqo@6+m?|G$muQ2l!%I0> zP#J@f94=p8tnDaQakjfgxXP#35$#MVN7J)Unwr4iE$%tT);E`r7*DyHY{}~mmPNqMBU>N%FS6I&ai$(q=g4`K|6%OzyVk; zu&T@+L-IlFDhLW41>bPIf(kHmv*zhAJ9S6$#Gy3d6>mWG$qZMbQizw>&K2Pyu1rEu zCR_{>c#lkD!kp`z_EeXK+$HzwXx*LYC}$+$doc0yol5thR`-=0%|ddzhfDBdJah|G;1-P$be`|`3I2q zK8DXcn3F$^-kUs-G6m!|DWeLB6n*2Jqe%HUZ@*a?TfZeo=dBPEz-*riNXe2nEnVJV zpX*rkBz&-k)V05i$m^|wVK2UHFbY(Y5JxHTK=Oow@~sP0r5-S4n`E8UN=yyJDrCRg zxsj}JQ8C05UY3P8dfdgE5pypiDtAiLbLsNVH6%b9(#TwT?3|lW0-sYW8YA!Ap8|J?* z2TtgB{{kcfAOeNMg_~c}v?GVVDRLFQI<4;}cwo%W6e}0dV)W{-=-;`fw>h10{~1JL zhA>N`H&LM2qn2n+h(z!5!JEnVDQkW9u6Wkji_72F_J&g;<(K68&j3eT+4J7|&$hV> z`OjH$jL)y@u7YzyE_u5G77Wl?VXl|{alfd~&_~0W6T{CI1t&)iMGP60^R7#bVl*&! ziPgW$78ZG6%Mx<=_BpK85jYoG!~*#i?+Htc*hL`;y9@-}^0GZZYT~xoHRX%EL9!ds z*6OC2Z`E<5=N&0ova(70h-~pV&}rGh=Iq=b6EBA}Uas#mM-V0z$5R{=e1M~$-R%m! z#16r*(7IWevTI?@&!>#@b9v9M1g0)#e^Zi*#WVeJg5p;F6BaH<=F7`jSs46>oBSZ#Bz{p5;LtxrpgHMHufqHdm^3Q_LJ)oyDUPclBDJM zNAON{=3On!^u|vI0l5JOj>rI)2Xu`(iiV^pwY*es%oVVu#>-v5V_FQ~Ism-oD>vJbFSc^B! z$;#+y90yMyaGJ8pVLxZOcP$3^6X|Zs3y#Yr?H`LFaNdOZxbY|+l9{f?+N+;#7vzCn zo!1i}6FfCah9In2DIL*cbS@S;4Ppc*+RwTt4wgDTqc(~09Y4A6UP{&e!LN5qu%3-#A}zXaLVtMEmbtQbzyrF7Eug*H zR@oQjKNl;>ypiKPNgx&sN(aJDJSurJ{t(Z7#?PCBj%2g$P#V`C&a7z2NDw#mEk}Hg~hWtifC; z38lywI^VIR#n}aQdA7j(Fl8I<6Os+%U~Z|;;JNez*gmk?%4n$uA8SXi1y-fQo2(l4 z)n=po|FWi5GYtwjRNnz0KGTm zXNCH#oLBWgk=sI8Ic?(S2zhf<(K3Op>edF~pvr8(zd7lJcO7R(=)IvXG00QeO&_=> zNqZtU3704N7X}q($HA5=Yv5UV9CM;*nOSaTlVrQ1(fPeGdD>*4xmRXF*b1p@j77L2TEDx6ldXDHF7 z3wsN@n?2IH&Nj~B6f8L4V$yRP;$#EK1jg0~9W2cNM2Fj&6MKnh)j!>h3dIL8PVlnS z6)Dr`uDD6N;93_ZMq>llOvGOcwcGMVud@@q%N=m2 z)JN_jy`Dm3e?qVY{=DHPMPmxQkr_}XW%||FyjQ4^)^2xY4fsT}k1C)eb5f8}%g4Kb z^mLGPjV|AIk+}H5d7;BRAlr=7T{%NV_+;@!aZelbc0$ z=tz<=(`R8etVLFKs1&<_*l2=sf`u-9Tb(!|9q`EN~RnKu85@LJ5JN<$Se;&SA+Wj zNgt*4xR+MdcA57qTynNlZJ*RDXEd)L%LctJ+j@aNeAs^Zf$DUDwyC18aDD4JUFdl8 zXTUb(dtK#+xgW(XY_4+PPhg^9?fKd?ZCRWNWaSy0i-_qal}0_qtOn|0L25@gVh#c5 zX|(PBIjh3(itJ6?no$V4KrZQC#G@3Vx|>kUS|5O#`2F+!Aq<|heUz1pj{6p-Ebl%q*@aM~MsqA+JK#Fdpeq#oxn66^dtIx&5NTDU)i^bf_{eW} ztl?tw<2J3yyl*`~q~w`4Jwb=FmeR@SQp{>X+Vc%T60SYLIQbl9T zhU91Cuuc77)17`yL1PPI!XDy!?ROoSsf#+juOVJe$kjb;w(thIV?eoF|AvtbO`Z!- zwi%wXAw&h+qFSg+qEDsFJbF(~d#Y3BDQeR@biq4kjh0i3CFmq`eB-tlMj*9RIq+O79Hudg4xk@13Ea_p{ayj#OWYn0w|tg( z!9iku>F$ITAHt6(Ss5GthOvlNa|UU(HTmFV6|9ECY~4Xj4rWB5PmGaszlS0>jb>p9 zL@hUW`(rD5i&Z*TEiPyV&Zyx2@-g)u2WB$1V>-s`b}U@0s$%$LFV_R0hu<*Ll3zJxiBqo zfJ>UF-**uq@43ulx|nEjZ%`1|D{jy|>=p*P`~s71l|6SBR}XTX)~BKxHely0x|8r6En2w$6@1anO5azyu&VxF5h33f|AJ(%09@~f=0Joklt_e^#5z_%|?oM&|M6%n$ zz~D#BfW{a00@Ta^WiQoFnX6e1%`4%NC^|?G-pv%?)d`XqU{zTo3pLG1UVs=}n5 z#KkQo*FOJjGH{hQroXRY30W7W$bzxkAPK4NJMubns=jR~t&G|{${Cd`x1YB0gq93*hnyB4{36~*fFq!ZH;qP1mNpfjYZQ-&%-F9^zggOxYA9mn)8E- zc>moy$c<{ZeEm7i^!t5!7gqknP)Jh!(N&=S*R50!%jkJii9{?r5tR4wDg4OJlkJfy z9Mv*mh9NMX6EZf;@9nJuQdK?ku(PQH@9ZZb7L!j&K(R@Pq#C5-WotP`Mo!e*!^mHp ztumq{bm94Tk$@;Nt2OqhZC@+DlRwh&IY}LmsEn_nlZIhOm0kJX&g7x)CTv#(68~OW9)WL{zzrZ4Jsf5#1)K>tAMHM2z6&qC_GG149TrRf z)7#9cF0)N;DZt_Qw?d&c#vCzb((J8=gNOGR-6uE9 zlQSp0e@QBEWc6e`$4>cOfu@!T!!7GgRnZ&)qm?_!%ur zD>}-=-N6NYTX?@G^k|1yfIH4(%^$hn=u#SyuTZU=EbaaX7v&;9JiFRb6vTM#gyyJK zrTNu-`5Oe%>7p+akgmN`i^GP3@9O9-SH-KVu+`2TNz`Mmx`nR6)1aowr1u~)(OQ2i zi=I=!Wgz{&`AFEGt&^)XHaX3lr}9ZhbAv1TU^b`L{aIdN0&;8>;L9CI#@ zo&9kCieXjMT5$ujP|JOMs%V#jF1;v43+I;^ai7KUcMQKLpHxfi1X%_k`L^TDbo2gJ zo}3m>h5V(Y92tsY%8<&+bJ_eZllV{|>X*wxtIdl7k=ZYEZ;Lnov0vIFJi5QwH>o6R z6h!0nM=<-ZGWBK)gVM-&2?ASdm?}~D$CYpNRCDC#VF8-a;Y4g5Vvc)^mmrBQs5ZTQ9wXQZSD=Fi$^cTb?x$acsDQtpxpN zp{TJ&leY+|Fzit}$k)@1P!Bny@~oSbeASApqJ>iEe+-B}cj)Fq)42=D__n>fFAyH| zPCDEQRe6tAmbktzrj4Cv`iz7i;A38|R=b-BDWhEGG=}F|bV_#CYc@=e7If~7Wi#w( zamyM+WCw|D#2B^IW1-G%679JkI*&fmH<{_<2zWQ(Us=eMt9e_lAEBO;wR-(?7kb;c zgrpLTEyZ*8h)*Z0RH+zwGWGHUhQ^rCLW3%w+j93Rmi1A0i4)X~yS_1{T^GOB#Z1!y zCi@(vXzm*pI0!C4@F;p}+6!Yd;3{Wq67emDyfH_{J)?6lXK3J=He1ZF>>w{1BKE~A zriN*!eqYWt{%)Loeh(iR^Ttp@wv@m5$`y{Ohp{|vvdl|6ngq-Q67o$brdR8hGzb$Cxf!xuOC1}0UhW*+x1G_5+OHM z4jIc%`}BvO=uu3(f;s4BUSLBd=;|Kdspbx}nD#R_NDQBGu8g2B9K6ri*&2+SY3IBMeHMj_c|o+4!mTDq(Ow zTN2$g9@*3+<|?nl(|0XJM{du4zM6w)bu1SF$dyZUa2$dqx*jIQ4J9v$IZ&1gej}|a zLPYp(xYjX?OODL-{c=tVZU@nDyv@d>^39K@#c2I+#I!)}dk>ZOU_ zMIe(6a9y~ji81ma8puV#{o}(pxryKHk#SWsS1RT|uC_e4FW`(zt394iGv7MT!_fcw zH^B@5X8JYHCvUS2e0BV!6YQDsoZ_d*0`Fs_%fk&z)GsEwbgb^aPF2YvqML2OG&9mTjR6 z8Do|UWh`SPZiK{-Pe8+rT!NHr6XNgm>p%w-HynblLNov~xK7u0of@jzIgn>)8L6to zt3G|idTeT<*DXamID?k|I}Sk~ZAc!o5tn7?$V+#I6_Ra6MD0Mmu7e?nu`32KL6B13hSJlVjoT|h68pU(@>14oQ z3#8rSG8zB&JZL8Ybswd{VkGPG$foNDBs+f2sy zWxbOSb+9OPj>zVI#X{Q&ff7gPzDY?BmHcF%f_^wx5om0WdE@`EJ+7RmuO13oid`LL@ec9(bYa@5!69bT(o;tA1m_0%Ks4sBthPk ztJE8m5VS*uAtGHXukA#enNR2MTiRbyf536zRs;VN_hMrBUvV#{|G&L~o#TJ?C~&a< zAKDvSL6wuO*4Tg+7mj*P*ffJjnUIX(dX5wLS3jE4w_ zm4P}01pc8I&0qNEWE0w_@AJcurW&tf`F$GQ+0LXAa0J%PVt3ikwV56gh3&6p-xw*MId6~JkC$&%@Q}^Q@ z8UoJ)a0lgV_ow>n!2&B!-wgOUkH!js&o>5e{1L7NYjw0|aR&gx15tp0gmUt`w-3?` z$ohvJ1IT!g`d0#DyoNO_;)7ND^P2(h8=rksZ|Cjy0SV~$<;(~WwD$yYP9p;~fvF4R z>-$$mBXvcEK~wkRn0`#n<6Oe{-Ltzfhy9zO#e?xxvxSRCR0B`vXL&dCnk66)4~k;$ zVg~uF5{cC_z$>MJZAuQ_+6pqDo5kq6ObX-(5P&y&JAOYhzR|aLOJB?DhXQJ9{#*$z zsR9mTgE>5hN=^G*Iu;0iXq(2m2HFP&3h)N%2B;?nxCU%Ict_QpS_1smn)s0Vo)3si z1KtMIf0+ka2R8)ueIL4aWOD-s*3Q-*;M4t6{>C=2cLbs#BtZ8EUB?0%dguNO!mxe; z_vP>=4+rl@c<<+x1-M_kt7AU`_tY3Pke}PwyWm2C)BhM`3vy&tC=bdqV}^6bKCROYv$T`A4~aGxucn?ZloA@E2PG z2LaZkM55@@GtVQ&*8fdI?*9S(60*9@9Gb~rGppb@}q13 z*i1(U@4XJjZyjjeFY7Y!Z(YNro^*KSx3A@q?iAjO0In(YmsK>p31%4Bx&q7raN}Db zmH&9NFBJwVAYf&GHeRm{%0C7f;+uYF$TZ*@&lc_sT=Z8p03YpakFvyop#7u{_UO1`N*4MkMuL< zJpcf%r4JF_U+9XSq#t~r;Sbt@&prY88~z<(>=Qp8zaQ`i+mUZLSd z_z{RB`wvhLfJ57N$d47y(Z$hI{bz64JAKmc{>}>w<`p1_;nZxOpg?~sb3e6;%7MwP z^#(CPqSIzanT)N@qfB)Tf4y8n`i>b`Er)J6iO=PL&PI|sH;Pt%*Td&eJql~}S+wV4 z^lOzx*T}mI)5tcu+^+e)>pe`@@%hYJ(@?Mb4yVTj=sKQ{&r&Ua%EkF)Om&L9-SIBr z<$YeOl>>#Vspj|O2U=u$s}Zsl4$r1&f&5_0Qn;1U3;wehvJA1)P zGQ=GXCa(wMmWV!xwRUZ7sV;S@r9GQl+(H6Yd^}10H#pqNe&N=K^`^6qK{s0#q~YF=3B+3K1LNbfLJ5ixtSCQLz*Qp~ zzN#whRNKI8E5W7L*)CX#-7hDYFPHE2wsP}$iew|?TYGyf%F@EyhGPmdM^90Brcio8 zRx?xL!6>-?1~{ay(9F0wncY^@B~mi^1Yymh)pU#()>sQ!U~EBIQ^oU&GL3K5H5N$2 z@qO4!F8pw=hw7GNxM#3VxK5B&@U;#ysq38GrOa*41IU#wotMp;OjFfk@N)cQCkZ## zTNYt9^)Qcf@obfF*FowIzpgu}OurAdBP5PC%xVpTddhjNX}9+FmHnbs1RS@?(kH#N z`CN=y%iyotBt-D_5FD@QB<}1H3kbq+P}+pK4U}IaD9_DYw6Ej2&Sav24PRrPBj1W~)W3Wy#tWQ+l)d{3yz!OV zV}fZh>yA&upOiusn>f5%9;}sfYVTEv|!P^_@4Sn$dzsl_Ikq<2CyG(IV08oeELp z6Y+bM4A5kLIeB8$vknOa;XJk1+9)OsRPv!{ADS^EViGhyo}h2*kQ00IS%mQoC{od` zc0P19$kk_$PSiuSOgTfv_j1g=DE^Ub+vaWMhDqmKEu5Kehk#`H(uYzBX*rIDa-CnR z8~9x!vk(varS)HvHMKgp3i&fli3avYP^WKA;IP-j>D;6vhaObLt`y7tSq`)(*92&f zZ|lR4^eDyxow8PSSGGLVwxEQ@Wk9Uga+Z>V9vuQ%4c4rI@E=Hd$|hg|gFPq&GtypPJf~-h7!qVDw7l zy9&fCKs063)rT%lKpb9dxm`D30^3d^o~&n|MpQm6Rh77j`p~BmW!km-(Ga!~TbCCS z{7@xcj~k+3b#T4IMDkxqW{I2n~`%~z!5(TD{9e}oBZ1oMrz2{t8^>mXcVuc%{ z){65-&_h@pwYmM@Xve+*_PierO2I;@pSFyruRv$!fXE=l0!@oM27VflsaV)tItv+a+*0ceor3!rSGk6ho}Z0?=O)<| zDxts%ED0~|Maj&(pCM!qo&2>)(Z1{Dt)Tc}+-EMJh|8(*0|ZDQSVeHGtWy&dp6i`g zSdpcb`X|?urYH|Pn~L$2X167U>7lRbo+q&!WJ|nPn++9NlrFioqIWUQXFjV0z7wkz zh}3<1(qYHG#UrTp=arvoWEEI*D5j3)guv_EW!KgqX3CaeNfi1h@x#iKbeGyz_%&!uNguo#ENq>C)TKU4LrJK-odN{k* zl3gYsDe+W~;b-CRu?_+9nYXEL)}WRxbJt}ZwN)nOju`?$pf|M|>zFqQQ5bR1=?5za zE)oc=8|-cNjx#WrJ?#Xe<+giIH=#RJiDZ6bO+$5qh+YKlxCIY4ZT+xtI6zettN8UcN*pzSJTo(bap+`SZzH2vBt3rdR>2?uoLRt zGqm9T1@6l1MF??XMz#Zg0;MZOG@OlGG^2jx7WC6#$}#~huMn3t&*PeGoI$l3yq7Hw z&$q-2&X`GX15e4UW!%D&iYX6q9H#h!08eETH(jNlhks*(!>W^32-)r>yu+)75=Pw~ z%QWmKxG&*xj1oAcx`Ot_Vg_MkLp*6_vxEl=G(O7k)&W*qB_pp5e-WMyr_;Z_H;g;u z1%fTd%5RSm-EoRy*1@F)S|jk-BhM>2?4&%=lDx;@5nZSXlpvh{A=}Ht-u*b{ku|gC z?n}b3>c62YM^0r>d3tY!vXx`(*(amJ?eQ7@fMq7A*_K-ExQ*`aK1w=RCm{PLWeNvY zu=#NZOnRfnyH0&?SEBeAacCL%6@NupOJn8NU?xNx+q=Yn2+l7xnDyxvg6_X4#+UhMAeu4JtZF`xNGz>iK(o zADztn&3N^=r%C%1LcYxjK>+^{t2oE@I?*w5X-G*Ow3gwNkO$$n^@wCUej_d>dUqni zxlLZA%k)1nC6BPo7{L_(>HqTVaEZj)b_;RzqR#^kIcBQT8q7VrgCNMY-(%n)h(u)MNG7@<@ThN`yd; zvlC0@%beE3~;CZ#&Ek z-}LR&*FT^X1{JPgSF*xHvw!tI3S((LzpAfz_B>IOla zv!X!2aNNx{k^T!{;Zz`BZ}T=T6xwr)P!^$U7{K4Tk*Lg~I5H1mBhMD+iOGc3dj{61rq7S1WLFq}&B{qsbT)3Ltsx>n%=Xi>#(J_taVCJcteyxFE6Xf<>Daiv`Yr%vc zbGI&*AHR|;MI9((73oAe1-Qd$eTRwojX{#Cs(Uoh1Y?+}V`2K9ZKs3M1+5+rrda!x zZv2`7g0HkqViq+ccIc@LO1@Cyd+E}KP4=f5^wjK(%HP02gBYx>p%CX78ce23y~&&t zSn$BYPqE|nFQ1i7S+Pl@qp0)ZyXxOP(Ch5c7?fi$Wias3;GD5ai-$EWwI-kXqhF$D zuT60)+=aFKSPx-(p=usee&Se~W5l@0Pi=LZU{Por)@Ay4bwuMC&nLtC$@B8{GFKtG zw_%Rq5qakR80i;-=X1w$JmcEVB)KwNDSZ>SfUW0m_4!b5D`I?#_GF>&LE8Z{NcHe> z(CFCau7z145~@J^SLUL?ZB+nKRGfv)omw+IhJP;qPg^mb+nIooy!Fgu^h8Ac3s3VU zV`wIQL0isv+%2pyA-ZrLriw5|7=DMDC!w&DY;H#1iXZlIB^c$~$&QR-WYNoQ_i68j z_$!JAFSdEiOjfT8vpaE3Wbh)&!-VHg0?!0HPSv?xndgxX8d^BZvrZm@+xH4 zNVq(9F~R7NNp?+_)Q`Zq_rI3u2MDVoVsV`d=ktb>>SeKiuTzf1En`xUcv0cOo8|&> zSM3Kabt#DTiU^Ip%s{a{F2fa9@e6%Z{9Gm_oLRBTi|R-2S~#Hw*mJ$wZW!N2U`qwu z#H&UwLq|EL(LLO=;Wxe$WIgy-EVVK^xI7JF%S{EV>-$iF#ld5pp;~&8AWi zy1eM~QQlRyVd#M;5xpM`1c}QxSDR;G zxx&hmY~Yq^BBILaF=_x9Z#+5uf_!_|404H{|Cagvo~XoFs4?@boz$NWo1@bTg3XU9 zaOYK|GE!=jW_v)Sa+<5K_(Yr7xSL#j-53m;L`2JqD{qm)L@Nv8JQ6w~s;WeMb5+_; zKew?&V#dUY6*5ZwXWfEMwd_!^4&7FYe)@qT7HxyfWa^e=KIgb#7-v)oQIMb?(C>$2 zddCT%Xd@GQgIClw#{ey=dm)ptSFOpQ37;fFofH$#v3}Xs^Lzt? zsf$t0q}U$fXl<7^Z&>=8%&dD7#oEshBy8Cj-8@zF%tLde9o~rEcB0Ui99QkFWRW?D%?40U&{71F_&6i#O*7`zJH3RF zim%oS>|nA=6q=y6tyBw^N1YKv3LvG3H5j_iqo_alBjy&$y-VzFQ%cp3oqS1Qh%QkR z=UvaNd#eo9Y$9jOF)!h}WOTY;hUk5cCEn7!O~$;3P$iL>r!@wq`)eK0aP}z5eG#e7 zi9ap+$If&b)P-V%g+@!#rmiQ*V(h$*iLfQ$|3GeMWxq{mTSCeCyOmlFjMH|@qBc-@Zml-MYf z2;$xKzpbmF$I#h_{?PqT8CHqBB$Y6x?(|5EPlYTxu~=e6?#?|unzoz1oWor zd!I=%G&t|ak~1?G`I#x1EGV7IQ9=-;p$ojvnD5$t zO2?Q<$%*syWQ|WVfrAvPX7vo^d~_n?_uw?_=*H&Hw&e74tUTrO?J`tY3qQ6A0v-`9p}vC z)!gM@i#_V|WfsVy<^(E{x03I@crZRUTykTv=O(Ip0vrtk`0bZzpIfn_pXd4EIDYx+ z_JL=#M{WHzXZIJM0YHCdD?8%2uBSaT;~iZVN?bI;2j5FDDYZQp{Ak^8dE%bZjlc&-+nCqar1a ztQ~T4#oJvx3n~PTgzPo4OO+z?i23N*p&wRow<0kLy^s*x%*HKqZ5=3ta|D|p3$7lQ z5%G=Up4W;uzy8vUNr6T3s6az$_O`qqQh9t;I1Ny*dY}UoEMsv_@8r6Fl&fdu zz*)|_uF+>Pai%Z{in58|CkX=idVvLr$C zNiT!gncsn7;o7MeGZ#-STH+O7BR;Y@EdEJS-wSJcM32erWSB>w(Q)3SAjo}h4Sa0=5UjUem10m|(s0me8 z+>UpuMY@vI1H7>-n;ptVsZWm&n04$zhg#>85**b*!^NEuNw~`_5S2mZ=++=OcQNR| zL6jfNH>y$83Ja$?d2oVp4<@Lc{aEC}-Pd2i3%xr@EgC{R+s{BgBVUp6umyDkSvOBq z3CU*%96qNL*zyV0G$vhO!j=!7Ky`&iA}=4wpuF7%8zRCJ8;{Z|=glL`9w}{s7QqES zEK+GxB_+`JpbxS@HhMuE_U`N97GOKV^yh2(f6pv|6L{>=El6HCZJ*qa9>bRE$kt{xyos?F%cY1$@A+@7oYGKNBS z3mLYPF0H|=Xj7D@u;&{7u@oA{jiufj44t~80r=&@5$)8&lMnwWk>-5U{o)s73!?kC z=2T>=05PdLF(?NxT<4G$Ou|HE zKh41WS@|Q8vX$w85`ix$Bedo1Hm;fF_yblbQ=IarKzOt5u^*8e0}@?=UZ=amyTYG+ zFat2WrLs9lrSM)St~z0;g(0^ZqNf0p9@-F}6VnuSnQNiA(Q3FkchL|9kN1>vVs2Wl zcV^Y#mqe6r6>_UyETY+#oFQn*DD;l@70rw?#-**HK=3*BM>b^a=e!X|wyQrqvJ!jh z7jI;#duk)oIIhAw$KlMRJGon6C*!S5xZv&C$#nR=m5|>}ieAR>rroV9VNh^HZkL0DM`AshQB2r))a=(A(l(R7lXJdYVbS6^^li*RVVN zNtdLrDz9+KWrCY6!xWM&&V(_ffFI&HL^yg?b*~s%=X2vHS{*fPTQ(62ACu2 zTHW8UYwhba^jQ1RzMp$i+RS4RnF*hv&54)0NTwcz=gcyHiI!IqrIehIYZVj6xuka_ z2W3gwif#8FPyJU6;4@5Br=K7db^%6K2?LBDLV242O4tTmLzHJ-9H+j%eIClo^%5qU zt*8;`dU<9tL9*jUO?r9nMVLJOpU0Dayx{j#M%38o>0+|IW6_jYbv;zhYhJ;i?kM-- z=FZMu9VI);hM_8{xLd4l3G~u0H2kJ%ePx&BU&$@UiWjQq3k1WGd40la@mXcZPyo>ugo|tdoHbkbdiM;^8r$ z>3)_POU-UzY;Tetp*Ss-v~$Kk@sj_*7=6MnyW}d8Dwt3P?$GQ>a$5c z7XQQHCsg!2I_CoImF|BLU9rbmh^$MHET@wh1>DTp3nUHmpRa>)>QEyg#&BLC$BUYh z7YV3ch2|#?Ye-1M(RYFwg8iYlC%morXI=B+d!yMChX8r)9l1J_nfi{W*q8{#JU>O> zB9kc+eSHem%l|?X8I4Wh;}oH&|F5z03~D-C!#J$cqAUn8fI<1oQVa-bgc2|cfzX3= z5J3V7fdBzXLQ_OoL6Kgh2kBBa0#a4Ds8rbt5)l+ZiZoXjY0?DrhP!uXcINKPy));_ zIp;m|KIi@L`}CgYh!iJezVl0MNs2_N8Du1;dL`}}vJ}MD>MG8gHMm;KI_eK7d{$^H z+VYi+Z_byluu}96wrt4MQfoQa+%y_2w3Cg{O{*t?N;8If*3;Xz`=YPlyK60z3>(L;g?JYh#TF`)A2BXynYGO) zAKXpOUdRswK~rSr9Cr2x~>HFL6sven^hV;)5{XP+Lm~f?-X6)$e(h7Bjv6W?M?S`Qp}A8wLcGn+ObEh?%Z+se|^n|B(iZjdeWicILaA=lK9 zGn`rbD|k@ul3+!}z3rrdD|YcqTw1%E?#0gUIZ1=LD5U*)FJ*Rj$AGiIhNQ9^$I&Wr z_L)S--I>c{QJxijlOfXj3d@zVW%o1MPo&>#pQ;r~*;?X$%J$Yu1|%`SZA%7Y{)SRn z=k^gYQ-kL>Ix98M0y?ylW<7qQQ#FxVo4PI2DN|F*vB-2nM`WFa$xk<0aE|Vo@wHQ4 z8Q_!53e0;I9>d}sXccj0u)^vNn-qb?zWWhx56RWl{S}-^0t2D*2&Y$Y=R-kB?DLdpo8fm5;0? z7Np-qR^3}+AZ4FTJVvRMN*E@`XpA{%x#rXmwGE)BJp&^?gV!5G_U@xMujiRWiyrsB zGr}i>#JgRl>VxwzCgoGrAHYx4*Wv7UQwbiHaYJH@r-m6IwDnDT>w3CK`pI;M?@upL zTf?Uvl+XGbNuh4$n$wLi57csB=iI2hdz*>KFu=*Z3A{zW-m%2!E;e+nzUqLlJh+Bx zg4VcsKYQFeA<`pOV`z`xszeI2g|1r28efnnX4fPs%M^XCYC87g3rP{E>dt#|{=QH88}g(SpA123yi0Sh7_=Uf`6Ry8 zJa^;urY_pIzG+4&No9Hzz3`3A@RII2n0^|ov?7{jx-0Oa)MI3(#98CE>7iJJMYJb4 zv$u388fFt*6lf85Dw}GiKa474xyQn0!0{HNd;(n=wnf@!)V3BUUmTrzJ=dzT71rig zO&LQZ8g>iFIa=;L$h8-JV3x2syjLtca`z3JuY;g_exlBO7U=ZhhpT4)1cJ4PrSB$B zrLzlAI0MaC1w*Vtrs-Vvr{3$OanXwHZo~C?Rg{q4MUT!c|Mg0WZBRHhCfkEW;|n#u ze__L+M9^dcUp{w^rMmcYT*{8LP*hAQ)SZ*_n&h4>Rq!R6UwKPdPVh|a2S?!_AOmCk z;qv)%y~LB!2QovVE-GR_2v^z)OM>$Ssc9z!G9@^DWH;C4ykB5M;c7LQ*#%~GhnQ>`QdPkp|A0x`7 zW|a+5jZ{*JdSGLLAJToCUWyAJJ7hfKjIQAxqmh$6+1++3>ABg(rr>pY65wEWk2hqHgHJA8B^(M(EsvyJ6Pf7mntiu~e*!gM31bq(vHB&uip;OogAZ}qYCBVW-iL1M z3id63{Fud+6X}a0Odl>MbcuhHQ`i8~pl+*Tt?wh%aSpptXRX@@In*!J`aN&kY2U0G ztC|O6JqHUCdpgEU4L*q#4nMriow}8`HSE2}-{7#xy}ek}I^TTm;)+hBMM;kP1{cOl zzx-{r$kN55_*Tb*%9oK}+v=O`4~Msw-t6?Qqh08Wk+B%DqBN)4Wlcgh7xDPyyCpv_ zI>esl{igevR*8@6ivg{^;p#mR=^1%jK>$&^sQ1p%Ni zt{}LrPdJZNylsKR$N?fPL2aF?rC16a08& zH>7w5kSY75WH5RB_XmZ(J=&{t2JeT}1li_kf7U08#MhVJQ6rMW*$t(?BqvEd1 zio*bjSJr~Rl`d?5wUj7Lu~7+HuX#95?@)W3y0g%f7OFRenGw1mEj<3;HDfSILCnw~ SUX{=oO@uZ`QPIeX0Qwig6~gQQ literal 0 HcmV?d00001 From c6a81c94eb11150599cd6c1591b65267e4e20e11 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Tue, 8 Sep 2015 11:47:43 +0100 Subject: [PATCH 146/555] Test the `cacheWarm` option for preview. --- .../acceptence/coffee/SendingFileTest.coffee | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/services/filestore/test/acceptence/coffee/SendingFileTest.coffee b/services/filestore/test/acceptence/coffee/SendingFileTest.coffee index 0b946312d1..28cd4f3f7e 100644 --- a/services/filestore/test/acceptence/coffee/SendingFileTest.coffee +++ b/services/filestore/test/acceptence/coffee/SendingFileTest.coffee @@ -150,3 +150,22 @@ describe "Filestore", -> expect(response.statusCode).to.equal 200 expect(new Buffer(body.substring(0, 8)).toString('hex')).to.equal 'efbfbd504e470d0a1a0a' done() + + describe "warming the cache", -> + + beforeEach -> + @fileUrl = @fileUrl + '?style=preview&cacheWarm=true' + + it "should not time out", (done) -> + @timeout(1000 * 20) + request.get @fileUrl, (err, response, body) => + expect(response).to.not.equal null + done() + + it "should respond with only an 'OK'", (done) -> + # note: this test relies of the imagemagick conversion working + @timeout(1000 * 20) + request.get @fileUrl, (err, response, body) => + expect(response.statusCode).to.equal 200 + body.should.equal 'OK' + done() From a6fff2d4e4c1b5605e90f4f0c35a847933e3779e Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Tue, 8 Sep 2015 11:50:43 +0100 Subject: [PATCH 147/555] Check for a large response body, rather than exact match on the first few bytes. --- .../filestore/test/acceptence/coffee/SendingFileTest.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/test/acceptence/coffee/SendingFileTest.coffee b/services/filestore/test/acceptence/coffee/SendingFileTest.coffee index 28cd4f3f7e..2731e25565 100644 --- a/services/filestore/test/acceptence/coffee/SendingFileTest.coffee +++ b/services/filestore/test/acceptence/coffee/SendingFileTest.coffee @@ -148,7 +148,7 @@ describe "Filestore", -> @timeout(1000 * 20) request.get @fileUrl, (err, response, body) => expect(response.statusCode).to.equal 200 - expect(new Buffer(body.substring(0, 8)).toString('hex')).to.equal 'efbfbd504e470d0a1a0a' + expect(body.length).to.be.greaterThan 400 done() describe "warming the cache", -> From fcfc595c352a02896d7cd5b1003d721a6b618bbe Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Thu, 10 Sep 2015 09:12:49 +0100 Subject: [PATCH 148/555] Handle potential error before calling `LocalFileWriter.writeStream`. --- services/filestore/app/coffee/FileHandler.coffee | 2 ++ 1 file changed, 2 insertions(+) diff --git a/services/filestore/app/coffee/FileHandler.coffee b/services/filestore/app/coffee/FileHandler.coffee index 8803f55e22..e49c172c92 100644 --- a/services/filestore/app/coffee/FileHandler.coffee +++ b/services/filestore/app/coffee/FileHandler.coffee @@ -81,4 +81,6 @@ module.exports = _writeS3FileToDisk: (bucket, key, opts, callback)-> PersistorManager.getFileStream bucket, key, opts, (err, fileStream)-> + if err? + return callback(err) LocalFileWriter.writeStream fileStream, key, callback From 0d450b4e33d67c39cfb688f0b15b2ce2e0dde1a8 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Thu, 10 Sep 2015 11:10:30 +0100 Subject: [PATCH 149/555] Tighten up error handling. --- services/filestore/app/coffee/FileController.coffee | 3 +++ services/filestore/app/coffee/FileHandler.coffee | 6 +++++- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/services/filestore/app/coffee/FileController.coffee b/services/filestore/app/coffee/FileController.coffee index 876227c029..2b10419fb2 100644 --- a/services/filestore/app/coffee/FileController.coffee +++ b/services/filestore/app/coffee/FileController.coffee @@ -46,6 +46,9 @@ module.exports = FileController = {key, bucket} = req logger.log key:key, bucket:bucket, "reciving request to insert file" FileHandler.insertFile bucket, key, req, (err)-> + if err? + logger.log err: err, key: key, bucket: bucket, "error inserting file" + res.send 500 res.send 200 copyFile: (req, res)-> diff --git a/services/filestore/app/coffee/FileHandler.coffee b/services/filestore/app/coffee/FileHandler.coffee index e49c172c92..698e311ca2 100644 --- a/services/filestore/app/coffee/FileHandler.coffee +++ b/services/filestore/app/coffee/FileHandler.coffee @@ -38,6 +38,8 @@ module.exports = _getConvertedFile: (bucket, key, opts, callback)-> convetedKey = KeyBuilder.addCachingToKey(key, opts) PersistorManager.checkIfFileExists bucket, convetedKey, (err, exists)=> + if err? + return callback(err) if exists PersistorManager.getFileStream bucket, convetedKey, opts, callback else @@ -62,6 +64,8 @@ module.exports = _convertFile: (bucket, origonalKey, opts, callback)-> @_writeS3FileToDisk bucket, origonalKey, opts, (err, origonalFsPath)-> + if err? + return callback(err) done = (err, destPath)-> if err? logger.err err:err, bucket:bucket, origonalKey:origonalKey, opts:opts, "error converting file" @@ -76,7 +80,7 @@ module.exports = else if opts.style == "preview" FileConverter.preview origonalFsPath, done else - throw new Error("should have specified opts to convert file with #{JSON.stringify(opts)}") + return callback(new Error("should have specified opts to convert file with #{JSON.stringify(opts)}")) _writeS3FileToDisk: (bucket, key, opts, callback)-> From cf28ee4f9b4fa9bc62f10a36b5d05a7e262cbd8c Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Thu, 17 Sep 2015 10:32:07 +0100 Subject: [PATCH 150/555] locked down packages --- services/filestore/package.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/filestore/package.json b/services/filestore/package.json index 3bc567e8a2..03443724e8 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -16,10 +16,10 @@ "grunt-mocha-test": "~0.8.2", "heapdump": "^0.3.2", "knox": "~0.9.1", - "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#master", + "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.1.0", "longjohn": "~0.2.2", "lynx": "0.0.11", - "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#master", + "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.3.0", "node-transloadit": "0.0.4", "node-uuid": "~1.4.1", "pngcrush": "0.0.3", @@ -28,7 +28,7 @@ "request": "2.14.0", "response": "0.14.0", "rimraf": "2.2.8", - "settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#master", + "settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.0.0", "stream-buffers": "~0.2.5", "underscore": "~1.5.2" }, From 9b40d34b2a8d33459c24656b07496aa031c891e5 Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 7 Oct 2015 18:39:24 +0100 Subject: [PATCH 151/555] Acceptance tests no longer run on Travis --- services/filestore/.travis.yml | 4 ---- 1 file changed, 4 deletions(-) diff --git a/services/filestore/.travis.yml b/services/filestore/.travis.yml index 6adc08643a..29f5884d60 100644 --- a/services/filestore/.travis.yml +++ b/services/filestore/.travis.yml @@ -10,12 +10,8 @@ install: - npm install - grunt install -before_script: - - grunt forever:app:start - script: - grunt test:unit - - grunt test:acceptance services: - redis-server From f723b1699bd894b47476c162e6d01f4f9eecb988 Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 7 Oct 2015 18:39:47 +0100 Subject: [PATCH 152/555] Mongo/redis no longer needed without acceptance tests --- services/filestore/.travis.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/services/filestore/.travis.yml b/services/filestore/.travis.yml index 29f5884d60..c9b4f1cf1a 100644 --- a/services/filestore/.travis.yml +++ b/services/filestore/.travis.yml @@ -13,6 +13,3 @@ install: script: - grunt test:unit -services: - - redis-server - - mongodb From 63aef6b832cf080ca8cc9d662eeff94f9418a60b Mon Sep 17 00:00:00 2001 From: Xavier Trochu Date: Thu, 5 Nov 2015 12:11:02 +0100 Subject: [PATCH 153/555] Fix postmerge issue --- services/filestore/app/coffee/FileHandler.coffee | 7 ------- 1 file changed, 7 deletions(-) diff --git a/services/filestore/app/coffee/FileHandler.coffee b/services/filestore/app/coffee/FileHandler.coffee index b11036465a..a0e80dde71 100644 --- a/services/filestore/app/coffee/FileHandler.coffee +++ b/services/filestore/app/coffee/FileHandler.coffee @@ -60,19 +60,12 @@ module.exports = ], (err)-> if err? return callback(err) -<<<<<<< HEAD PersistorManager.getFileStream bucket, convertedKey, callback _convertFile: (bucket, originalKey, opts, callback)-> @_writeS3FileToDisk bucket, originalKey, (err, originalFsPath)-> -======= - PersistorManager.getFileStream bucket, convetedKey, opts, callback - - _convertFile: (bucket, origonalKey, opts, callback)-> - @_writeS3FileToDisk bucket, origonalKey, opts, (err, origonalFsPath)-> if err? return callback(err) ->>>>>>> sharelatex/master done = (err, destPath)-> if err? logger.err err:err, bucket:bucket, originalKey:originalKey, opts:opts, "error converting file" From 26523b011a58288ef768d4536995b2b3a2797d92 Mon Sep 17 00:00:00 2001 From: Xavier Trochu Date: Mon, 16 Nov 2015 09:59:27 +0100 Subject: [PATCH 154/555] small fixes --- services/filestore/app/coffee/FSPersistorManager.coffee | 5 ++--- services/filestore/app/coffee/FileHandler.coffee | 2 +- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/services/filestore/app/coffee/FSPersistorManager.coffee b/services/filestore/app/coffee/FSPersistorManager.coffee index 2ade1f3a5b..d1b72806cf 100644 --- a/services/filestore/app/coffee/FSPersistorManager.coffee +++ b/services/filestore/app/coffee/FSPersistorManager.coffee @@ -3,6 +3,7 @@ fs = require("fs") LocalFileWriter = require("./LocalFileWriter") Errors = require('./Errors') rimraf = require("rimraf") +_ = require "underscore" filterName = (key) -> return key.replace /\//g, "_" @@ -29,9 +30,7 @@ module.exports = # opts may be {start: Number, end: Number} getFileStream: (location, name, opts, _callback = (err, res)->) -> - callback = (args...) -> - _callback(args...) - _callback = () -> + callback = _.once _callback filteredName = filterName name logger.log location:location, name:filteredName, "getting file" sourceStream = fs.createReadStream "#{location}/#{filteredName}", opts diff --git a/services/filestore/app/coffee/FileHandler.coffee b/services/filestore/app/coffee/FileHandler.coffee index a0e80dde71..917df653db 100644 --- a/services/filestore/app/coffee/FileHandler.coffee +++ b/services/filestore/app/coffee/FileHandler.coffee @@ -63,7 +63,7 @@ module.exports = PersistorManager.getFileStream bucket, convertedKey, callback _convertFile: (bucket, originalKey, opts, callback)-> - @_writeS3FileToDisk bucket, originalKey, (err, originalFsPath)-> + @_writeS3FileToDisk bucket, originalKey, opts, (err, originalFsPath)-> if err? return callback(err) done = (err, destPath)-> From 476db58c3f77f6c2f9f179ba2ffed940f573c051 Mon Sep 17 00:00:00 2001 From: Xavier Trochu Date: Tue, 17 Nov 2015 12:52:25 +0100 Subject: [PATCH 155/555] Add the opts parameter to getFileStream and implement Range requests --- .../filestore/app/coffee/AWSSDKPersistorManager.coffee | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/services/filestore/app/coffee/AWSSDKPersistorManager.coffee b/services/filestore/app/coffee/AWSSDKPersistorManager.coffee index e1474fa009..13ba460ae3 100644 --- a/services/filestore/app/coffee/AWSSDKPersistorManager.coffee +++ b/services/filestore/app/coffee/AWSSDKPersistorManager.coffee @@ -21,10 +21,15 @@ module.exports = logger.err err: err, Bucket: bucketName, Key: key, "error sending file stream to s3" callback err - getFileStream: (bucketName, key, callback = (err, res)->)-> + getFileStream: (bucketName, key, opts, callback = (err, res)->)-> logger.log bucketName:bucketName, key:key, "get file stream from s3" callback = _.once callback - request = s3.getObject(Bucket:bucketName, Key: key) + params = + Bucket:bucketName + Key: key + if opts.start? and opts.end? + params['Range'] = "bytes=#{opts.start}-#{opts.end}" + request = s3.getObject params stream = request.createReadStream() stream.on 'readable', () -> callback null, stream From bfd41fdaf9c6996bc8b281a70be22da4403f4c63 Mon Sep 17 00:00:00 2001 From: Xavier Trochu Date: Fri, 20 Nov 2015 12:02:22 +0100 Subject: [PATCH 156/555] Add aws-sdk unit test. Fix Aws-Sdk persistor to return a correct error on file not found. Fix FileHandler after some change were lost on a previous merge --- .../app/coffee/AWSSDKPersistorManager.coffee | 3 + .../filestore/app/coffee/FileHandler.coffee | 9 +- .../coffee/AWSSDKPersistorManagerTests.coffee | 249 ++++++++++++++++++ 3 files changed, 256 insertions(+), 5 deletions(-) create mode 100644 services/filestore/test/unit/coffee/AWSSDKPersistorManagerTests.coffee diff --git a/services/filestore/app/coffee/AWSSDKPersistorManager.coffee b/services/filestore/app/coffee/AWSSDKPersistorManager.coffee index 13ba460ae3..b1465c126d 100644 --- a/services/filestore/app/coffee/AWSSDKPersistorManager.coffee +++ b/services/filestore/app/coffee/AWSSDKPersistorManager.coffee @@ -2,6 +2,7 @@ logger = require "logger-sharelatex" aws = require "aws-sdk" _ = require "underscore" fs = require "fs" +Errors = require("./Errors") s3 = new aws.S3() @@ -35,6 +36,8 @@ module.exports = callback null, stream stream.on 'error', (err) -> logger.err err:err, bucketName:bucketName, key:key, "error getting file stream from s3" + if err.code == 'NoSuchKey' + return callback new Errors.NotFoundError "File not found in S3: #{bucketName}:#{key}" callback err copyFile: (bucketName, sourceKey, destKey, callback)-> diff --git a/services/filestore/app/coffee/FileHandler.coffee b/services/filestore/app/coffee/FileHandler.coffee index 917df653db..a7f9ed84b7 100644 --- a/services/filestore/app/coffee/FileHandler.coffee +++ b/services/filestore/app/coffee/FileHandler.coffee @@ -41,16 +41,15 @@ module.exports = if err? return callback err if exists - PersistorManager.getFileStream bucket, convertedKey, callback + PersistorManager.getFileStream bucket, convertedKey, null, callback else @_getConvertedFileAndCache bucket, key, convertedKey, opts, callback _getConvertedFileAndCache: (bucket, key, convertedKey, opts, callback)-> - self = @ convertedFsPath = "" async.series [ - (cb)-> - self._convertFile bucket, key, opts, (err, fileSystemPath)-> + (cb) => + @_convertFile bucket, key, opts, (err, fileSystemPath) -> convertedFsPath = fileSystemPath cb err (cb)-> @@ -60,7 +59,7 @@ module.exports = ], (err)-> if err? return callback(err) - PersistorManager.getFileStream bucket, convertedKey, callback + PersistorManager.getFileStream bucket, convertedKey, opts, callback _convertFile: (bucket, originalKey, opts, callback)-> @_writeS3FileToDisk bucket, originalKey, opts, (err, originalFsPath)-> diff --git a/services/filestore/test/unit/coffee/AWSSDKPersistorManagerTests.coffee b/services/filestore/test/unit/coffee/AWSSDKPersistorManagerTests.coffee new file mode 100644 index 0000000000..a3ec3db5ba --- /dev/null +++ b/services/filestore/test/unit/coffee/AWSSDKPersistorManagerTests.coffee @@ -0,0 +1,249 @@ +sinon = require 'sinon' +chai = require 'chai' + +should = chai.should() +expect = chai.expect + +modulePath = "../../../app/js/AWSSDKPersistorManager.js" +SandboxedModule = require 'sandboxed-module' + +describe "AWSSDKPersistorManager", -> + beforeEach -> + @settings = + filestore: + backend: "aws-sdk" + @s3 = + upload: sinon.stub() + getObject: sinon.stub() + copyObject: sinon.stub() + deleteObject: sinon.stub() + listObjects: sinon.stub() + deleteObjects: sinon.stub() + headObject: sinon.stub() + @awssdk = + S3: sinon.stub().returns @s3 + + @requires = + "aws-sdk": @awssdk + "settings-sharelatex": @settings + "logger-sharelatex": + log:-> + err:-> + "fs": @fs = + createReadStream: sinon.stub() + "./Errors": @Errors = + NotFoundError: sinon.stub() + @key = "my/key" + @bucketName = "my-bucket" + @error = "my error" + @AWSSDKPersistorManager = SandboxedModule.require modulePath, requires: @requires + + describe "sendFile", -> + beforeEach -> + @stream = {} + @fsPath = "/usr/local/some/file" + @fs.createReadStream.returns @stream + + it "should put the file with s3.upload", (done) -> + @s3.upload.callsArgWith 1 + @AWSSDKPersistorManager.sendFile @bucketName, @key, @fsPath, (err) => + expect(err).to.not.be.ok + expect(@s3.upload.calledOnce, "called only once").to.be.true + expect((@s3.upload.calledWith Bucket: @bucketName, Key: @key, Body: @stream) + , "called with correct arguments").to.be.true + done() + + it "should dispatch the error from s3.upload", (done) -> + @s3.upload.callsArgWith 1, @error + @AWSSDKPersistorManager.sendFile @bucketName, @key, @fsPath, (err) => + expect(err).to.equal @error + done() + + + describe "sendStream", -> + beforeEach -> + @stream = {} + + it "should put the file with s3.upload", (done) -> + @s3.upload.callsArgWith 1 + @AWSSDKPersistorManager.sendStream @bucketName, @key, @stream, (err) => + expect(err).to.not.be.ok + expect(@s3.upload.calledOnce, "called only once").to.be.true + expect((@s3.upload.calledWith Bucket: @bucketName, Key: @key, Body: @stream), + "called with correct arguments").to.be.true + done() + + it "should dispatch the error from s3.upload", (done) -> + @s3.upload.callsArgWith 1, @error + @AWSSDKPersistorManager.sendStream @bucketName, @key, @stream, (err) => + expect(err).to.equal @error + done() + + describe "getFileStream", -> + beforeEach -> + @opts = {} + @stream = {} + @read_stream = + on: @read_stream_on = sinon.stub() + @object = + createReadStream: sinon.stub().returns @read_stream + @s3.getObject.returns @object + + it "should return a stream from s3.getObject", (done) -> + @read_stream_on.withArgs('readable').callsArgWith 1 + + @AWSSDKPersistorManager.getFileStream @bucketName, @key, @opts, (err, stream) => + expect(@read_stream_on.calledTwice) + expect(err).to.not.be.ok + expect(stream, "returned the stream").to.equal @read_stream + expect((@s3.getObject.calledWith Bucket: @bucketName, Key: @key), + "called with correct arguments").to.be.true + done() + + describe "with start and end options", -> + beforeEach -> + @opts = + start: 0 + end: 8 + it "should pass headers to the s3.GetObject", (done) -> + @read_stream_on.withArgs('readable').callsArgWith 1 + @AWSSDKPersistorManager.getFileStream @bucketName, @key, @opts, (err, stream) => + expect((@s3.getObject.calledWith Bucket: @bucketName, Key: @key, Range: 'bytes=0-8'), + "called with correct arguments").to.be.true + done() + + describe "error conditions", -> + describe "when the file doesn't exist", -> + beforeEach -> + @error = new Error() + @error.code = 'NoSuchKey' + it "should produce a NotFoundError", (done) -> + @read_stream_on.withArgs('error').callsArgWith 1, @error + @AWSSDKPersistorManager.getFileStream @bucketName, @key, @opts, (err, stream) => + expect(stream).to.not.be.ok + expect(err).to.be.ok + expect(err instanceof @Errors.NotFoundError, "error is a correct instance").to.equal true + done() + + describe "when there is some other error", -> + beforeEach -> + @error = new Error() + it "should dispatch the error from s3 object stream", (done) -> + @read_stream_on.withArgs('error').callsArgWith 1, @error + @AWSSDKPersistorManager.getFileStream @bucketName, @key, @opts, (err, stream) => + expect(stream).to.not.be.ok + expect(err).to.be.ok + expect(err).to.equal @error + done() + + describe "copyFile", -> + beforeEach -> + @destKey = "some/key" + @stream = {} + + it "should copy the file with s3.copyObject", (done) -> + @s3.copyObject.callsArgWith 1 + @AWSSDKPersistorManager.copyFile @bucketName, @key, @destKey, (err) => + expect(err).to.not.be.ok + expect(@s3.copyObject.calledOnce, "called only once").to.be.true + expect((@s3.copyObject.calledWith Bucket: @bucketName, Key: @destKey, CopySource: @bucketName + '/' + @key), + "called with correct arguments").to.be.true + done() + + it "should dispatch the error from s3.copyObject", (done) -> + @s3.copyObject.callsArgWith 1, @error + @AWSSDKPersistorManager.copyFile @bucketName, @key, @destKey, (err) => + expect(err).to.equal @error + done() + + describe "deleteFile", -> + it "should delete the file with s3.deleteObject", (done) -> + @s3.deleteObject.callsArgWith 1 + @AWSSDKPersistorManager.deleteFile @bucketName, @key, (err) => + expect(err).to.not.be.ok + expect(@s3.deleteObject.calledOnce, "called only once").to.be.true + expect((@s3.deleteObject.calledWith Bucket: @bucketName, Key: @key), + "called with correct arguments").to.be.true + done() + + it "should dispatch the error from s3.deleteObject", (done) -> + @s3.deleteObject.callsArgWith 1, @error + @AWSSDKPersistorManager.deleteFile @bucketName, @key, (err) => + expect(err).to.equal @error + done() + + describe "deleteDirectory", -> + + it "should list the directory content using s3.listObjects", (done) -> + @s3.listObjects.callsArgWith 1, null, Contents: [] + @AWSSDKPersistorManager.deleteDirectory @bucketName, @key, (err) => + expect(err).to.not.be.ok + expect(@s3.listObjects.calledOnce, "called only once").to.be.true + expect((@s3.listObjects.calledWith Bucket: @bucketName, Prefix: @key), + "called with correct arguments").to.be.true + done() + + it "should dispatch the error from s3.listObjects", (done) -> + @s3.listObjects.callsArgWith 1, @error + @AWSSDKPersistorManager.deleteDirectory @bucketName, @key, (err) => + expect(err).to.equal @error + done() + + describe "with directory content", -> + beforeEach -> + @fileList = [ + Key: 'foo' + , Key: 'bar' + , Key: 'baz' + ] + + it "should forward the file keys to s3.deleteObjects", (done) -> + @s3.listObjects.callsArgWith 1, null, Contents: @fileList + @s3.deleteObjects.callsArgWith 1 + @AWSSDKPersistorManager.deleteDirectory @bucketName, @key, (err) => + expect(err).to.not.be.ok + expect(@s3.deleteObjects.calledOnce, "called only once").to.be.true + expect((@s3.deleteObjects.calledWith + Bucket: @bucketName + Delete: + Quiet: true + Objects: @fileList), + "called with correct arguments").to.be.true + done() + + it "should dispatch the error from s3.deleteObjects", (done) -> + @s3.listObjects.callsArgWith 1, null, Contents: @fileList + @s3.deleteObjects.callsArgWith 1, @error + @AWSSDKPersistorManager.deleteDirectory @bucketName, @key, (err) => + expect(err).to.equal @error + done() + + + describe "checkIfFileExists", -> + + it "should check for the file with s3.headObject", (done) -> + @s3.headObject.callsArgWith 1, null, {} + @AWSSDKPersistorManager.checkIfFileExists @bucketName, @key, (err, exists) => + expect(err).to.not.be.ok + expect(@s3.headObject.calledOnce, "called only once").to.be.true + expect((@s3.headObject.calledWith Bucket: @bucketName, Key: @key), + "called with correct arguments").to.be.true + done() + + it "should return false on an inexistant file", (done) -> + @s3.headObject.callsArgWith 1, null, {} + @AWSSDKPersistorManager.checkIfFileExists @bucketName, @key, (err, exists) => + expect(exists).to.be.false + done() + + it "should return true on an existing file", (done) -> + @s3.headObject.callsArgWith 1, null, ETag: "etag" + @AWSSDKPersistorManager.checkIfFileExists @bucketName, @key, (err, exists) => + expect(exists).to.be.true + done() + + it "should dispatch the error from s3.headObject", (done) -> + @s3.headObject.callsArgWith 1, @error + @AWSSDKPersistorManager.checkIfFileExists @bucketName, @key, (err, exists) => + expect(err).to.equal @error + done() From 0219900933fa5b5a33bac7866b152760a10259e3 Mon Sep 17 00:00:00 2001 From: Xavier Trochu Date: Fri, 20 Nov 2015 12:07:04 +0100 Subject: [PATCH 157/555] Forward the options argument to getFileStream --- services/filestore/app/coffee/FileHandler.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/app/coffee/FileHandler.coffee b/services/filestore/app/coffee/FileHandler.coffee index a7f9ed84b7..f22285edae 100644 --- a/services/filestore/app/coffee/FileHandler.coffee +++ b/services/filestore/app/coffee/FileHandler.coffee @@ -41,7 +41,7 @@ module.exports = if err? return callback err if exists - PersistorManager.getFileStream bucket, convertedKey, null, callback + PersistorManager.getFileStream bucket, convertedKey, opts, callback else @_getConvertedFileAndCache bucket, key, convertedKey, opts, callback From b6486cb825676e1c0699dd32206b265de325dc87 Mon Sep 17 00:00:00 2001 From: Xavier Trochu Date: Fri, 20 Nov 2015 14:05:54 +0100 Subject: [PATCH 158/555] Cleanup the dependencies --- services/filestore/package.json | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/services/filestore/package.json b/services/filestore/package.json index 8e2dfd084b..184899250e 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -9,17 +9,11 @@ "dependencies": { "async": "~0.2.10", "aws-sdk": "^2.1.39", - "bunyan": "^1.3.5", "coffee-script": "~1.7.1", "express": "~3.4.8", - "grunt-bunyan": "^0.5.0", - "grunt-execute": "^0.2.2", - "grunt-mocha-test": "~0.8.2", "heapdump": "^0.3.2", "knox": "~0.9.1", "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.1.0", - "longjohn": "~0.2.2", - "lynx": "0.0.11", "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.3.0", "node-transloadit": "0.0.4", "node-uuid": "~1.4.1", @@ -37,7 +31,11 @@ "sinon": "", "chai": "", "sandboxed-module": "", + "bunyan": "^1.3.5", "grunt": "0.4.1", + "grunt-bunyan": "^0.5.0", + "grunt-execute": "^0.2.2", + "grunt-mocha-test": "~0.8.2", "grunt-contrib-requirejs": "0.4.1", "grunt-contrib-coffee": "0.7.0", "grunt-contrib-watch": "0.5.3", From 3b6270236c4fd67495b6308ebb87b468088e67af Mon Sep 17 00:00:00 2001 From: Xavier Trochu Date: Fri, 20 Nov 2015 14:38:23 +0100 Subject: [PATCH 159/555] Replace indentation from 2 space to tabs. --- .../app/coffee/AWSSDKPersistorManager.coffee | 142 +++--- .../coffee/AWSSDKPersistorManagerTests.coffee | 416 +++++++++--------- 2 files changed, 279 insertions(+), 279 deletions(-) diff --git a/services/filestore/app/coffee/AWSSDKPersistorManager.coffee b/services/filestore/app/coffee/AWSSDKPersistorManager.coffee index b1465c126d..50e15cfa27 100644 --- a/services/filestore/app/coffee/AWSSDKPersistorManager.coffee +++ b/services/filestore/app/coffee/AWSSDKPersistorManager.coffee @@ -2,85 +2,85 @@ logger = require "logger-sharelatex" aws = require "aws-sdk" _ = require "underscore" fs = require "fs" -Errors = require("./Errors") +Errors = require "./Errors" s3 = new aws.S3() module.exports = - sendFile: (bucketName, key, fsPath, callback)-> - logger.log bucketName:bucketName, key:key, "send file data to s3" - stream = fs.createReadStream fsPath - s3.upload Bucket: bucketName, Key: key, Body: stream, (err, data) -> - if err? - logger.err err: err, Bucket: bucketName, Key: key, "error sending file data to s3" - callback err + sendFile: (bucketName, key, fsPath, callback)-> + logger.log bucketName:bucketName, key:key, "send file data to s3" + stream = fs.createReadStream fsPath + s3.upload Bucket: bucketName, Key: key, Body: stream, (err, data) -> + if err? + logger.err err: err, Bucket: bucketName, Key: key, "error sending file data to s3" + callback err - sendStream: (bucketName, key, stream, callback)-> - logger.log bucketName:bucketName, key:key, "send file stream to s3" - s3.upload Bucket: bucketName, Key: key, Body: stream, (err, data) -> - if err? - logger.err err: err, Bucket: bucketName, Key: key, "error sending file stream to s3" - callback err + sendStream: (bucketName, key, stream, callback)-> + logger.log bucketName:bucketName, key:key, "send file stream to s3" + s3.upload Bucket: bucketName, Key: key, Body: stream, (err, data) -> + if err? + logger.err err: err, Bucket: bucketName, Key: key, "error sending file stream to s3" + callback err - getFileStream: (bucketName, key, opts, callback = (err, res)->)-> - logger.log bucketName:bucketName, key:key, "get file stream from s3" - callback = _.once callback - params = - Bucket:bucketName - Key: key - if opts.start? and opts.end? - params['Range'] = "bytes=#{opts.start}-#{opts.end}" - request = s3.getObject params - stream = request.createReadStream() - stream.on 'readable', () -> - callback null, stream - stream.on 'error', (err) -> - logger.err err:err, bucketName:bucketName, key:key, "error getting file stream from s3" - if err.code == 'NoSuchKey' - return callback new Errors.NotFoundError "File not found in S3: #{bucketName}:#{key}" - callback err + getFileStream: (bucketName, key, opts, callback = (err, res)->)-> + logger.log bucketName:bucketName, key:key, "get file stream from s3" + callback = _.once callback + params = + Bucket:bucketName + Key: key + if opts.start? and opts.end? + params['Range'] = "bytes=#{opts.start}-#{opts.end}" + request = s3.getObject params + stream = request.createReadStream() + stream.on 'readable', () -> + callback null, stream + stream.on 'error', (err) -> + logger.err err:err, bucketName:bucketName, key:key, "error getting file stream from s3" + if err.code == 'NoSuchKey' + return callback new Errors.NotFoundError "File not found in S3: #{bucketName}:#{key}" + callback err - copyFile: (bucketName, sourceKey, destKey, callback)-> - logger.log bucketName:bucketName, sourceKey:sourceKey, destKey: destKey, "copying file in s3" - source = bucketName + '/' + sourceKey - s3.copyObject {Bucket: bucketName, Key: destKey, CopySource: source}, (err) -> - if err? - logger.err err:err, bucketName:bucketName, sourceKey:sourceKey, destKey:destKey, "something went wrong copying file in s3" - callback err + copyFile: (bucketName, sourceKey, destKey, callback)-> + logger.log bucketName:bucketName, sourceKey:sourceKey, destKey: destKey, "copying file in s3" + source = bucketName + '/' + sourceKey + s3.copyObject {Bucket: bucketName, Key: destKey, CopySource: source}, (err) -> + if err? + logger.err err:err, bucketName:bucketName, sourceKey:sourceKey, destKey:destKey, "something went wrong copying file in s3" + callback err - deleteFile: (bucketName, key, callback)-> - logger.log bucketName:bucketName, key:key, "delete file in s3" - s3.deleteObject {Bucket: bucketName, Key: key}, (err) -> - if err? - logger.err err:err, bucketName:bucketName, key:key, "something went wrong deleting file in s3" - callback err + deleteFile: (bucketName, key, callback)-> + logger.log bucketName:bucketName, key:key, "delete file in s3" + s3.deleteObject {Bucket: bucketName, Key: key}, (err) -> + if err? + logger.err err:err, bucketName:bucketName, key:key, "something went wrong deleting file in s3" + callback err - deleteDirectory: (bucketName, key, callback)-> - logger.log bucketName:bucketName, key:key, "delete directory in s3" - s3.listObjects {Bucket: bucketName, Prefix: key}, (err, data) -> - if err? - logger.err err:err, bucketName:bucketName, key:key, "something went wrong listing prefix in s3" - return callback err - if data.Contents.length == 0 - logger.log bucketName:bucketName, key:key, "the directory is empty" - return callback() - keys = _.map data.Contents, (entry)-> - Key: entry.Key - s3.deleteObjects - Bucket: bucketName - Delete: - Objects: keys - Quiet: true - , (err) -> - if err? - logger.err err:err, bucketName:bucketName, key:keys, "something went wrong deleting directory in s3" - callback err + deleteDirectory: (bucketName, key, callback)-> + logger.log bucketName:bucketName, key:key, "delete directory in s3" + s3.listObjects {Bucket: bucketName, Prefix: key}, (err, data) -> + if err? + logger.err err:err, bucketName:bucketName, key:key, "something went wrong listing prefix in s3" + return callback err + if data.Contents.length == 0 + logger.log bucketName:bucketName, key:key, "the directory is empty" + return callback() + keys = _.map data.Contents, (entry)-> + Key: entry.Key + s3.deleteObjects + Bucket: bucketName + Delete: + Objects: keys + Quiet: true + , (err) -> + if err? + logger.err err:err, bucketName:bucketName, key:keys, "something went wrong deleting directory in s3" + callback err - checkIfFileExists:(bucketName, key, callback)-> - logger.log bucketName:bucketName, key:key, "check file existence in s3" - s3.headObject {Bucket: bucketName, Key: key}, (err, data) -> - if err? - logger.err err:err, bucketName:bucketName, key:key, "something went wrong checking head in s3" - return callback err - callback null, data.ETag? + checkIfFileExists:(bucketName, key, callback)-> + logger.log bucketName:bucketName, key:key, "check file existence in s3" + s3.headObject {Bucket: bucketName, Key: key}, (err, data) -> + if err? + logger.err err:err, bucketName:bucketName, key:key, "something went wrong checking head in s3" + return callback err + callback null, data.ETag? diff --git a/services/filestore/test/unit/coffee/AWSSDKPersistorManagerTests.coffee b/services/filestore/test/unit/coffee/AWSSDKPersistorManagerTests.coffee index a3ec3db5ba..0ca8c65ffc 100644 --- a/services/filestore/test/unit/coffee/AWSSDKPersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/AWSSDKPersistorManagerTests.coffee @@ -8,242 +8,242 @@ modulePath = "../../../app/js/AWSSDKPersistorManager.js" SandboxedModule = require 'sandboxed-module' describe "AWSSDKPersistorManager", -> - beforeEach -> - @settings = - filestore: - backend: "aws-sdk" - @s3 = - upload: sinon.stub() - getObject: sinon.stub() - copyObject: sinon.stub() - deleteObject: sinon.stub() - listObjects: sinon.stub() - deleteObjects: sinon.stub() - headObject: sinon.stub() - @awssdk = - S3: sinon.stub().returns @s3 + beforeEach -> + @settings = + filestore: + backend: "aws-sdk" + @s3 = + upload: sinon.stub() + getObject: sinon.stub() + copyObject: sinon.stub() + deleteObject: sinon.stub() + listObjects: sinon.stub() + deleteObjects: sinon.stub() + headObject: sinon.stub() + @awssdk = + S3: sinon.stub().returns @s3 - @requires = - "aws-sdk": @awssdk - "settings-sharelatex": @settings - "logger-sharelatex": - log:-> - err:-> - "fs": @fs = - createReadStream: sinon.stub() - "./Errors": @Errors = - NotFoundError: sinon.stub() - @key = "my/key" - @bucketName = "my-bucket" - @error = "my error" - @AWSSDKPersistorManager = SandboxedModule.require modulePath, requires: @requires + @requires = + "aws-sdk": @awssdk + "settings-sharelatex": @settings + "logger-sharelatex": + log:-> + err:-> + "fs": @fs = + createReadStream: sinon.stub() + "./Errors": @Errors = + NotFoundError: sinon.stub() + @key = "my/key" + @bucketName = "my-bucket" + @error = "my error" + @AWSSDKPersistorManager = SandboxedModule.require modulePath, requires: @requires - describe "sendFile", -> - beforeEach -> - @stream = {} - @fsPath = "/usr/local/some/file" - @fs.createReadStream.returns @stream + describe "sendFile", -> + beforeEach -> + @stream = {} + @fsPath = "/usr/local/some/file" + @fs.createReadStream.returns @stream - it "should put the file with s3.upload", (done) -> - @s3.upload.callsArgWith 1 - @AWSSDKPersistorManager.sendFile @bucketName, @key, @fsPath, (err) => - expect(err).to.not.be.ok - expect(@s3.upload.calledOnce, "called only once").to.be.true - expect((@s3.upload.calledWith Bucket: @bucketName, Key: @key, Body: @stream) - , "called with correct arguments").to.be.true - done() + it "should put the file with s3.upload", (done) -> + @s3.upload.callsArgWith 1 + @AWSSDKPersistorManager.sendFile @bucketName, @key, @fsPath, (err) => + expect(err).to.not.be.ok + expect(@s3.upload.calledOnce, "called only once").to.be.true + expect((@s3.upload.calledWith Bucket: @bucketName, Key: @key, Body: @stream) + , "called with correct arguments").to.be.true + done() - it "should dispatch the error from s3.upload", (done) -> - @s3.upload.callsArgWith 1, @error - @AWSSDKPersistorManager.sendFile @bucketName, @key, @fsPath, (err) => - expect(err).to.equal @error - done() + it "should dispatch the error from s3.upload", (done) -> + @s3.upload.callsArgWith 1, @error + @AWSSDKPersistorManager.sendFile @bucketName, @key, @fsPath, (err) => + expect(err).to.equal @error + done() - describe "sendStream", -> - beforeEach -> - @stream = {} + describe "sendStream", -> + beforeEach -> + @stream = {} - it "should put the file with s3.upload", (done) -> - @s3.upload.callsArgWith 1 - @AWSSDKPersistorManager.sendStream @bucketName, @key, @stream, (err) => - expect(err).to.not.be.ok - expect(@s3.upload.calledOnce, "called only once").to.be.true - expect((@s3.upload.calledWith Bucket: @bucketName, Key: @key, Body: @stream), - "called with correct arguments").to.be.true - done() + it "should put the file with s3.upload", (done) -> + @s3.upload.callsArgWith 1 + @AWSSDKPersistorManager.sendStream @bucketName, @key, @stream, (err) => + expect(err).to.not.be.ok + expect(@s3.upload.calledOnce, "called only once").to.be.true + expect((@s3.upload.calledWith Bucket: @bucketName, Key: @key, Body: @stream), + "called with correct arguments").to.be.true + done() - it "should dispatch the error from s3.upload", (done) -> - @s3.upload.callsArgWith 1, @error - @AWSSDKPersistorManager.sendStream @bucketName, @key, @stream, (err) => - expect(err).to.equal @error - done() + it "should dispatch the error from s3.upload", (done) -> + @s3.upload.callsArgWith 1, @error + @AWSSDKPersistorManager.sendStream @bucketName, @key, @stream, (err) => + expect(err).to.equal @error + done() - describe "getFileStream", -> - beforeEach -> - @opts = {} - @stream = {} - @read_stream = - on: @read_stream_on = sinon.stub() - @object = - createReadStream: sinon.stub().returns @read_stream - @s3.getObject.returns @object + describe "getFileStream", -> + beforeEach -> + @opts = {} + @stream = {} + @read_stream = + on: @read_stream_on = sinon.stub() + @object = + createReadStream: sinon.stub().returns @read_stream + @s3.getObject.returns @object - it "should return a stream from s3.getObject", (done) -> - @read_stream_on.withArgs('readable').callsArgWith 1 + it "should return a stream from s3.getObject", (done) -> + @read_stream_on.withArgs('readable').callsArgWith 1 - @AWSSDKPersistorManager.getFileStream @bucketName, @key, @opts, (err, stream) => - expect(@read_stream_on.calledTwice) - expect(err).to.not.be.ok - expect(stream, "returned the stream").to.equal @read_stream - expect((@s3.getObject.calledWith Bucket: @bucketName, Key: @key), - "called with correct arguments").to.be.true - done() + @AWSSDKPersistorManager.getFileStream @bucketName, @key, @opts, (err, stream) => + expect(@read_stream_on.calledTwice) + expect(err).to.not.be.ok + expect(stream, "returned the stream").to.equal @read_stream + expect((@s3.getObject.calledWith Bucket: @bucketName, Key: @key), + "called with correct arguments").to.be.true + done() - describe "with start and end options", -> - beforeEach -> - @opts = - start: 0 - end: 8 - it "should pass headers to the s3.GetObject", (done) -> - @read_stream_on.withArgs('readable').callsArgWith 1 - @AWSSDKPersistorManager.getFileStream @bucketName, @key, @opts, (err, stream) => - expect((@s3.getObject.calledWith Bucket: @bucketName, Key: @key, Range: 'bytes=0-8'), - "called with correct arguments").to.be.true - done() + describe "with start and end options", -> + beforeEach -> + @opts = + start: 0 + end: 8 + it "should pass headers to the s3.GetObject", (done) -> + @read_stream_on.withArgs('readable').callsArgWith 1 + @AWSSDKPersistorManager.getFileStream @bucketName, @key, @opts, (err, stream) => + expect((@s3.getObject.calledWith Bucket: @bucketName, Key: @key, Range: 'bytes=0-8'), + "called with correct arguments").to.be.true + done() - describe "error conditions", -> - describe "when the file doesn't exist", -> - beforeEach -> - @error = new Error() - @error.code = 'NoSuchKey' - it "should produce a NotFoundError", (done) -> - @read_stream_on.withArgs('error').callsArgWith 1, @error - @AWSSDKPersistorManager.getFileStream @bucketName, @key, @opts, (err, stream) => - expect(stream).to.not.be.ok - expect(err).to.be.ok - expect(err instanceof @Errors.NotFoundError, "error is a correct instance").to.equal true - done() + describe "error conditions", -> + describe "when the file doesn't exist", -> + beforeEach -> + @error = new Error() + @error.code = 'NoSuchKey' + it "should produce a NotFoundError", (done) -> + @read_stream_on.withArgs('error').callsArgWith 1, @error + @AWSSDKPersistorManager.getFileStream @bucketName, @key, @opts, (err, stream) => + expect(stream).to.not.be.ok + expect(err).to.be.ok + expect(err instanceof @Errors.NotFoundError, "error is a correct instance").to.equal true + done() - describe "when there is some other error", -> - beforeEach -> - @error = new Error() - it "should dispatch the error from s3 object stream", (done) -> - @read_stream_on.withArgs('error').callsArgWith 1, @error - @AWSSDKPersistorManager.getFileStream @bucketName, @key, @opts, (err, stream) => - expect(stream).to.not.be.ok - expect(err).to.be.ok - expect(err).to.equal @error - done() + describe "when there is some other error", -> + beforeEach -> + @error = new Error() + it "should dispatch the error from s3 object stream", (done) -> + @read_stream_on.withArgs('error').callsArgWith 1, @error + @AWSSDKPersistorManager.getFileStream @bucketName, @key, @opts, (err, stream) => + expect(stream).to.not.be.ok + expect(err).to.be.ok + expect(err).to.equal @error + done() - describe "copyFile", -> - beforeEach -> - @destKey = "some/key" - @stream = {} + describe "copyFile", -> + beforeEach -> + @destKey = "some/key" + @stream = {} - it "should copy the file with s3.copyObject", (done) -> - @s3.copyObject.callsArgWith 1 - @AWSSDKPersistorManager.copyFile @bucketName, @key, @destKey, (err) => - expect(err).to.not.be.ok - expect(@s3.copyObject.calledOnce, "called only once").to.be.true - expect((@s3.copyObject.calledWith Bucket: @bucketName, Key: @destKey, CopySource: @bucketName + '/' + @key), - "called with correct arguments").to.be.true - done() + it "should copy the file with s3.copyObject", (done) -> + @s3.copyObject.callsArgWith 1 + @AWSSDKPersistorManager.copyFile @bucketName, @key, @destKey, (err) => + expect(err).to.not.be.ok + expect(@s3.copyObject.calledOnce, "called only once").to.be.true + expect((@s3.copyObject.calledWith Bucket: @bucketName, Key: @destKey, CopySource: @bucketName + '/' + @key), + "called with correct arguments").to.be.true + done() - it "should dispatch the error from s3.copyObject", (done) -> - @s3.copyObject.callsArgWith 1, @error - @AWSSDKPersistorManager.copyFile @bucketName, @key, @destKey, (err) => - expect(err).to.equal @error - done() + it "should dispatch the error from s3.copyObject", (done) -> + @s3.copyObject.callsArgWith 1, @error + @AWSSDKPersistorManager.copyFile @bucketName, @key, @destKey, (err) => + expect(err).to.equal @error + done() - describe "deleteFile", -> - it "should delete the file with s3.deleteObject", (done) -> - @s3.deleteObject.callsArgWith 1 - @AWSSDKPersistorManager.deleteFile @bucketName, @key, (err) => - expect(err).to.not.be.ok - expect(@s3.deleteObject.calledOnce, "called only once").to.be.true - expect((@s3.deleteObject.calledWith Bucket: @bucketName, Key: @key), - "called with correct arguments").to.be.true - done() + describe "deleteFile", -> + it "should delete the file with s3.deleteObject", (done) -> + @s3.deleteObject.callsArgWith 1 + @AWSSDKPersistorManager.deleteFile @bucketName, @key, (err) => + expect(err).to.not.be.ok + expect(@s3.deleteObject.calledOnce, "called only once").to.be.true + expect((@s3.deleteObject.calledWith Bucket: @bucketName, Key: @key), + "called with correct arguments").to.be.true + done() - it "should dispatch the error from s3.deleteObject", (done) -> - @s3.deleteObject.callsArgWith 1, @error - @AWSSDKPersistorManager.deleteFile @bucketName, @key, (err) => - expect(err).to.equal @error - done() + it "should dispatch the error from s3.deleteObject", (done) -> + @s3.deleteObject.callsArgWith 1, @error + @AWSSDKPersistorManager.deleteFile @bucketName, @key, (err) => + expect(err).to.equal @error + done() - describe "deleteDirectory", -> + describe "deleteDirectory", -> - it "should list the directory content using s3.listObjects", (done) -> - @s3.listObjects.callsArgWith 1, null, Contents: [] - @AWSSDKPersistorManager.deleteDirectory @bucketName, @key, (err) => - expect(err).to.not.be.ok - expect(@s3.listObjects.calledOnce, "called only once").to.be.true - expect((@s3.listObjects.calledWith Bucket: @bucketName, Prefix: @key), - "called with correct arguments").to.be.true - done() + it "should list the directory content using s3.listObjects", (done) -> + @s3.listObjects.callsArgWith 1, null, Contents: [] + @AWSSDKPersistorManager.deleteDirectory @bucketName, @key, (err) => + expect(err).to.not.be.ok + expect(@s3.listObjects.calledOnce, "called only once").to.be.true + expect((@s3.listObjects.calledWith Bucket: @bucketName, Prefix: @key), + "called with correct arguments").to.be.true + done() - it "should dispatch the error from s3.listObjects", (done) -> - @s3.listObjects.callsArgWith 1, @error - @AWSSDKPersistorManager.deleteDirectory @bucketName, @key, (err) => - expect(err).to.equal @error - done() + it "should dispatch the error from s3.listObjects", (done) -> + @s3.listObjects.callsArgWith 1, @error + @AWSSDKPersistorManager.deleteDirectory @bucketName, @key, (err) => + expect(err).to.equal @error + done() - describe "with directory content", -> - beforeEach -> - @fileList = [ - Key: 'foo' - , Key: 'bar' - , Key: 'baz' - ] + describe "with directory content", -> + beforeEach -> + @fileList = [ + Key: 'foo' + , Key: 'bar' + , Key: 'baz' + ] - it "should forward the file keys to s3.deleteObjects", (done) -> - @s3.listObjects.callsArgWith 1, null, Contents: @fileList - @s3.deleteObjects.callsArgWith 1 - @AWSSDKPersistorManager.deleteDirectory @bucketName, @key, (err) => - expect(err).to.not.be.ok - expect(@s3.deleteObjects.calledOnce, "called only once").to.be.true - expect((@s3.deleteObjects.calledWith - Bucket: @bucketName - Delete: - Quiet: true - Objects: @fileList), - "called with correct arguments").to.be.true - done() + it "should forward the file keys to s3.deleteObjects", (done) -> + @s3.listObjects.callsArgWith 1, null, Contents: @fileList + @s3.deleteObjects.callsArgWith 1 + @AWSSDKPersistorManager.deleteDirectory @bucketName, @key, (err) => + expect(err).to.not.be.ok + expect(@s3.deleteObjects.calledOnce, "called only once").to.be.true + expect((@s3.deleteObjects.calledWith + Bucket: @bucketName + Delete: + Quiet: true + Objects: @fileList), + "called with correct arguments").to.be.true + done() - it "should dispatch the error from s3.deleteObjects", (done) -> - @s3.listObjects.callsArgWith 1, null, Contents: @fileList - @s3.deleteObjects.callsArgWith 1, @error - @AWSSDKPersistorManager.deleteDirectory @bucketName, @key, (err) => - expect(err).to.equal @error - done() + it "should dispatch the error from s3.deleteObjects", (done) -> + @s3.listObjects.callsArgWith 1, null, Contents: @fileList + @s3.deleteObjects.callsArgWith 1, @error + @AWSSDKPersistorManager.deleteDirectory @bucketName, @key, (err) => + expect(err).to.equal @error + done() - describe "checkIfFileExists", -> + describe "checkIfFileExists", -> - it "should check for the file with s3.headObject", (done) -> - @s3.headObject.callsArgWith 1, null, {} - @AWSSDKPersistorManager.checkIfFileExists @bucketName, @key, (err, exists) => - expect(err).to.not.be.ok - expect(@s3.headObject.calledOnce, "called only once").to.be.true - expect((@s3.headObject.calledWith Bucket: @bucketName, Key: @key), - "called with correct arguments").to.be.true - done() + it "should check for the file with s3.headObject", (done) -> + @s3.headObject.callsArgWith 1, null, {} + @AWSSDKPersistorManager.checkIfFileExists @bucketName, @key, (err, exists) => + expect(err).to.not.be.ok + expect(@s3.headObject.calledOnce, "called only once").to.be.true + expect((@s3.headObject.calledWith Bucket: @bucketName, Key: @key), + "called with correct arguments").to.be.true + done() - it "should return false on an inexistant file", (done) -> - @s3.headObject.callsArgWith 1, null, {} - @AWSSDKPersistorManager.checkIfFileExists @bucketName, @key, (err, exists) => - expect(exists).to.be.false - done() + it "should return false on an inexistant file", (done) -> + @s3.headObject.callsArgWith 1, null, {} + @AWSSDKPersistorManager.checkIfFileExists @bucketName, @key, (err, exists) => + expect(exists).to.be.false + done() - it "should return true on an existing file", (done) -> - @s3.headObject.callsArgWith 1, null, ETag: "etag" - @AWSSDKPersistorManager.checkIfFileExists @bucketName, @key, (err, exists) => - expect(exists).to.be.true - done() + it "should return true on an existing file", (done) -> + @s3.headObject.callsArgWith 1, null, ETag: "etag" + @AWSSDKPersistorManager.checkIfFileExists @bucketName, @key, (err, exists) => + expect(exists).to.be.true + done() - it "should dispatch the error from s3.headObject", (done) -> - @s3.headObject.callsArgWith 1, @error - @AWSSDKPersistorManager.checkIfFileExists @bucketName, @key, (err, exists) => - expect(err).to.equal @error - done() + it "should dispatch the error from s3.headObject", (done) -> + @s3.headObject.callsArgWith 1, @error + @AWSSDKPersistorManager.checkIfFileExists @bucketName, @key, (err, exists) => + expect(err).to.equal @error + done() From 81a72033155df267646f657bbcbbff055be242c3 Mon Sep 17 00:00:00 2001 From: Xavier Trochu Date: Wed, 25 Nov 2015 15:40:35 +0100 Subject: [PATCH 160/555] Fix the checkIfFileExists API. --- services/filestore/app/coffee/AWSSDKPersistorManager.coffee | 1 + 1 file changed, 1 insertion(+) diff --git a/services/filestore/app/coffee/AWSSDKPersistorManager.coffee b/services/filestore/app/coffee/AWSSDKPersistorManager.coffee index 50e15cfa27..d0101b93a4 100644 --- a/services/filestore/app/coffee/AWSSDKPersistorManager.coffee +++ b/services/filestore/app/coffee/AWSSDKPersistorManager.coffee @@ -80,6 +80,7 @@ module.exports = logger.log bucketName:bucketName, key:key, "check file existence in s3" s3.headObject {Bucket: bucketName, Key: key}, (err, data) -> if err? + return (callback null, false) if err.code == 'NotFound' logger.err err:err, bucketName:bucketName, key:key, "something went wrong checking head in s3" return callback err callback null, data.ETag? From bc6c8d6afc385d816ac20106321d090abacad2a0 Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 25 Nov 2015 16:07:51 +0000 Subject: [PATCH 161/555] Don't send multiple status codes on insertFile error --- services/filestore/app/coffee/FileController.coffee | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/services/filestore/app/coffee/FileController.coffee b/services/filestore/app/coffee/FileController.coffee index 2b10419fb2..fed469e700 100644 --- a/services/filestore/app/coffee/FileController.coffee +++ b/services/filestore/app/coffee/FileController.coffee @@ -49,7 +49,8 @@ module.exports = FileController = if err? logger.log err: err, key: key, bucket: bucket, "error inserting file" res.send 500 - res.send 200 + else + res.send 200 copyFile: (req, res)-> metrics.inc "copyFile" From 8ddfbb99de1609a3e57f2dc4b45b2f64015c540f Mon Sep 17 00:00:00 2001 From: James Allen Date: Wed, 25 Nov 2015 17:01:19 +0000 Subject: [PATCH 162/555] Only parse body when we need to for copyFile request --- services/filestore/app.coffee | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index 5df7c76fc8..e78355f4e6 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -17,7 +17,6 @@ Metrics.event_loop?.monitor(logger) Metrics.memory.monitor(logger) app.configure -> - app.use express.bodyParser() app.use Metrics.http.monitor(logger) app.configure 'development', -> @@ -72,7 +71,7 @@ app.use (req, res, next) -> app.get "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.getFile app.post "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.insertFile -app.put "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.copyFile +app.put "/project/:project_id/file/:file_id", keyBuilder.userFileKey, express.bodyParser(), fileController.copyFile app.del "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.deleteFile app.get "/template/:template_id/v/:version/:format", keyBuilder.templateFileKey, fileController.getFile @@ -83,7 +82,7 @@ app.post "/template/:template_id/v/:version/:format", keyBuilder.templateFileKey app.get "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.getFile app.post "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.insertFile -app.put "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.copyFile +app.put "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, express.bodyParser(), fileController.copyFile app.del "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.deleteFile app.get "/heapdump", (req, res)-> From 099d0fef2f7337a0dfcc32fe0aaea86dd25dffd3 Mon Sep 17 00:00:00 2001 From: Henrique Santos Date: Fri, 11 Mar 2016 22:31:46 -0300 Subject: [PATCH 163/555] scaffolding project size --- services/filestore/app.coffee | 2 ++ .../filestore/app/coffee/KeyBuilder.coffee | 5 +++++ .../app/coffee/ProjectController.coffee | 18 ++++++++++++++++++ 3 files changed, 25 insertions(+) create mode 100644 services/filestore/app/coffee/ProjectController.coffee diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index e78355f4e6..93e86164a7 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -85,6 +85,8 @@ app.post "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey app.put "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, express.bodyParser(), fileController.copyFile app.del "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.deleteFile +app.get "project/:project_id/size", keyBuilder.publicProjectKey, projectController.projectSize + app.get "/heapdump", (req, res)-> require('heapdump').writeSnapshot '/tmp/' + Date.now() + '.filestore.heapsnapshot', (err, filename)-> res.send filename diff --git a/services/filestore/app/coffee/KeyBuilder.coffee b/services/filestore/app/coffee/KeyBuilder.coffee index 0b5e41525f..18f1d96952 100644 --- a/services/filestore/app/coffee/KeyBuilder.coffee +++ b/services/filestore/app/coffee/KeyBuilder.coffee @@ -42,4 +42,9 @@ module.exports = opts = req.query next() + publicProjectKey: (req, res, next)-> + {project_id} = req.params + req.project_id = project_id + req.bucket = settings.filestore.stores.user_files + next() diff --git a/services/filestore/app/coffee/ProjectController.coffee b/services/filestore/app/coffee/ProjectController.coffee new file mode 100644 index 0000000000..5ac64dd632 --- /dev/null +++ b/services/filestore/app/coffee/ProjectController.coffee @@ -0,0 +1,18 @@ +settings = require("settings-sharelatex") +logger = require("logger-sharelatex") +metrics = require("metrics-sharelatex") +Errors = require('./Errors') + + +module.exports = projectController = + + projectSize: (req, res)-> + metrics.inc "projectSize" + {project_id, bucket} = req + logger.log project_id:project_id, bucket:bucket, "reciving request to project size" + FileHandler.getFile bucket, project_id, req, (err)-> + if err? + logger.log err: err, project_id: project_id, bucket: bucket, "error inserting file" + res.send 500 + else + res.send 200 \ No newline at end of file From 5d2fc89137e9df4bad3e6d941a943ba9bcb42709 Mon Sep 17 00:00:00 2001 From: Henrique Santos Date: Sat, 12 Mar 2016 04:35:49 -0300 Subject: [PATCH 164/555] scaffolding project methods --- .../app/coffee/AWSSDKPersistorManager.coffee | 4 ++++ .../filestore/app/coffee/FSPersistorManager.coffee | 4 ++++ .../filestore/app/coffee/ProjectController.coffee | 4 ++-- services/filestore/app/coffee/ProjectHandler.coffee | 13 +++++++++++++ .../filestore/app/coffee/S3PersistorManager.coffee | 4 ++++ 5 files changed, 27 insertions(+), 2 deletions(-) create mode 100644 services/filestore/app/coffee/ProjectHandler.coffee diff --git a/services/filestore/app/coffee/AWSSDKPersistorManager.coffee b/services/filestore/app/coffee/AWSSDKPersistorManager.coffee index d0101b93a4..0c5cb959f9 100644 --- a/services/filestore/app/coffee/AWSSDKPersistorManager.coffee +++ b/services/filestore/app/coffee/AWSSDKPersistorManager.coffee @@ -85,3 +85,7 @@ module.exports = return callback err callback null, data.ETag? + getProjectSize:(bucketName, key, callback)-> + logger.log bucketName:bucketName, key:key, "get project size in s3" + callback null, 1024 + diff --git a/services/filestore/app/coffee/FSPersistorManager.coffee b/services/filestore/app/coffee/FSPersistorManager.coffee index d1b72806cf..17f11217c1 100644 --- a/services/filestore/app/coffee/FSPersistorManager.coffee +++ b/services/filestore/app/coffee/FSPersistorManager.coffee @@ -87,3 +87,7 @@ module.exports = fs.exists "#{location}/#{filteredName}", (exists) -> logger.log location:location, name:filteredName, exists:exists, "checked if file exists" callback null, exists + + getProjectSize:(location, name, callback)-> + logger.log location:location, name:name, "get project size in file system" + callback null, 1024 diff --git a/services/filestore/app/coffee/ProjectController.coffee b/services/filestore/app/coffee/ProjectController.coffee index 5ac64dd632..21f30f117b 100644 --- a/services/filestore/app/coffee/ProjectController.coffee +++ b/services/filestore/app/coffee/ProjectController.coffee @@ -10,9 +10,9 @@ module.exports = projectController = metrics.inc "projectSize" {project_id, bucket} = req logger.log project_id:project_id, bucket:bucket, "reciving request to project size" - FileHandler.getFile bucket, project_id, req, (err)-> + ProjectHandler.getSize bucket, project_id, req, (err, size)-> if err? logger.log err: err, project_id: project_id, bucket: bucket, "error inserting file" res.send 500 else - res.send 200 \ No newline at end of file + res.json {'total bytes' : size} \ No newline at end of file diff --git a/services/filestore/app/coffee/ProjectHandler.coffee b/services/filestore/app/coffee/ProjectHandler.coffee new file mode 100644 index 0000000000..e6022c4aa3 --- /dev/null +++ b/services/filestore/app/coffee/ProjectHandler.coffee @@ -0,0 +1,13 @@ +settings = require("settings-sharelatex") +PersistorManager = require("./PersistorManager") +logger = require("logger-sharelatex") +async = require("async") + +module.exports = + + getSize: (bucket, project_id, opts = {}, callback)-> + logger.log bucket:bucket, project_id:project_id, opts:opts, "getting project size" + PersistorManager.getProjectSize bucket, project_id, opts, (err, size)-> + if err? + logger.err bucket:bucket, project_id:project_id, opts:opts, "error getting size" + callback err, size \ No newline at end of file diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index 15798185a3..b6380eceb8 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -137,3 +137,7 @@ module.exports = exists = res.statusCode == 200 logger.log bucketName:bucketName, key:key, exists:exists, "checked if file exsists in s3" callback(err, exists) + + getProjectSize:(bucketName, key, callback)-> + logger.log bucketName:bucketName, key:key, "get project size in s3" + callback null, 1024 From 1f20744a68d2ca80f204721310affdbd5d4c5999 Mon Sep 17 00:00:00 2001 From: Henrique Santos Date: Sat, 12 Mar 2016 11:08:07 -0300 Subject: [PATCH 165/555] add size function for each persitor --- services/filestore/app.coffee | 3 ++- .../app/coffee/AWSSDKPersistorManager.coffee | 12 +++++++++++- .../app/coffee/FSPersistorManager.coffee | 16 ++++++++++++++-- .../app/coffee/ProjectController.coffee | 4 ++-- .../filestore/app/coffee/ProjectHandler.coffee | 8 ++++---- .../app/coffee/S3PersistorManager.coffee | 14 +++++++++++++- 6 files changed, 46 insertions(+), 11 deletions(-) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index 93e86164a7..ad05bd5124 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -4,6 +4,7 @@ logger.initialize("filestore") settings = require("settings-sharelatex") request = require("request") fileController = require("./app/js/FileController") +projectController = require("./app/js/ProjectController") keyBuilder = require("./app/js/KeyBuilder") domain = require("domain") appIsOk = true @@ -85,7 +86,7 @@ app.post "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey app.put "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, express.bodyParser(), fileController.copyFile app.del "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.deleteFile -app.get "project/:project_id/size", keyBuilder.publicProjectKey, projectController.projectSize +app.get "/project/:project_id/size", keyBuilder.publicProjectKey, projectController.projectSize app.get "/heapdump", (req, res)-> require('heapdump').writeSnapshot '/tmp/' + Date.now() + '.filestore.heapsnapshot', (err, filename)-> diff --git a/services/filestore/app/coffee/AWSSDKPersistorManager.coffee b/services/filestore/app/coffee/AWSSDKPersistorManager.coffee index 0c5cb959f9..ca72901f8f 100644 --- a/services/filestore/app/coffee/AWSSDKPersistorManager.coffee +++ b/services/filestore/app/coffee/AWSSDKPersistorManager.coffee @@ -87,5 +87,15 @@ module.exports = getProjectSize:(bucketName, key, callback)-> logger.log bucketName:bucketName, key:key, "get project size in s3" - callback null, 1024 + s3.listObjects {Bucket: bucketName, Prefix: key}, (err, data) -> + if err? + logger.err err:err, bucketName:bucketName, key:key, "something went wrong listing prefix in s3" + return callback err + if data.Contents.length == 0 + logger.log bucketName:bucketName, key:key, "the directory is empty" + return callback() + totalSize = 0 + _.each data.Contents, (entry)-> + totalSize += entry.Size + callback null, totalSize diff --git a/services/filestore/app/coffee/FSPersistorManager.coffee b/services/filestore/app/coffee/FSPersistorManager.coffee index 17f11217c1..c146b162ff 100644 --- a/services/filestore/app/coffee/FSPersistorManager.coffee +++ b/services/filestore/app/coffee/FSPersistorManager.coffee @@ -89,5 +89,17 @@ module.exports = callback null, exists getProjectSize:(location, name, callback)-> - logger.log location:location, name:name, "get project size in file system" - callback null, 1024 + filteredName = filterName name.replace(/\/$/,'') + logger.log location:location, name:filteredName, "get project size in file system" + fs.readdir "#{location}/#{filteredName}", (err, files) -> + if err? + logger.err err:err, location:location, name:filteredName, "something went wrong listing prefix in aws" + return callback(err) + totalSize = 0 + _.each files, (entry)-> + fd = fs.openSync "#{location}/#{filteredName}/#{entry}", 'r' + fileStats = fs.fstatSync(fd) + totalSize += fileStats.size + fs.closeSync fd + logger.log totalSize:totalSize, "total size", files:files + callback null, totalSize diff --git a/services/filestore/app/coffee/ProjectController.coffee b/services/filestore/app/coffee/ProjectController.coffee index 21f30f117b..e31d2539b5 100644 --- a/services/filestore/app/coffee/ProjectController.coffee +++ b/services/filestore/app/coffee/ProjectController.coffee @@ -2,7 +2,7 @@ settings = require("settings-sharelatex") logger = require("logger-sharelatex") metrics = require("metrics-sharelatex") Errors = require('./Errors') - +ProjectHandler = require("./ProjectHandler") module.exports = projectController = @@ -10,7 +10,7 @@ module.exports = projectController = metrics.inc "projectSize" {project_id, bucket} = req logger.log project_id:project_id, bucket:bucket, "reciving request to project size" - ProjectHandler.getSize bucket, project_id, req, (err, size)-> + ProjectHandler.getSize bucket, project_id, (err, size)-> if err? logger.log err: err, project_id: project_id, bucket: bucket, "error inserting file" res.send 500 diff --git a/services/filestore/app/coffee/ProjectHandler.coffee b/services/filestore/app/coffee/ProjectHandler.coffee index e6022c4aa3..53ed2df183 100644 --- a/services/filestore/app/coffee/ProjectHandler.coffee +++ b/services/filestore/app/coffee/ProjectHandler.coffee @@ -5,9 +5,9 @@ async = require("async") module.exports = - getSize: (bucket, project_id, opts = {}, callback)-> - logger.log bucket:bucket, project_id:project_id, opts:opts, "getting project size" - PersistorManager.getProjectSize bucket, project_id, opts, (err, size)-> + getSize: (bucket, project_id, callback)-> + logger.log bucket:bucket, project_id:project_id, "getting project size" + PersistorManager.getProjectSize bucket, project_id, (err, size)-> if err? - logger.err bucket:bucket, project_id:project_id, opts:opts, "error getting size" + logger.err bucket:bucket, project_id:project_id, "error getting size" callback err, size \ No newline at end of file diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index b6380eceb8..4accd517cb 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -140,4 +140,16 @@ module.exports = getProjectSize:(bucketName, key, callback)-> logger.log bucketName:bucketName, key:key, "get project size in s3" - callback null, 1024 + s3Client = knox.createClient + key: settings.filestore.s3.key + secret: settings.filestore.s3.secret + bucket: bucketName + s3Client.list prefix:key, (err, data)-> + if err? + logger.err err:err, bucketName:bucketName, key:key, "something went wrong listing prefix in aws" + return callback(err) + totalSize = 0 + _.each data.Contents, (entry)-> + totalSize += entry.Size + logger.log totalSize:totalSize, "total size" + callback null, totalSize From 0bb12f32fa626fa61f2731b8221e2a87f4d22572 Mon Sep 17 00:00:00 2001 From: Henrique Santos Date: Sun, 13 Mar 2016 16:22:14 -0300 Subject: [PATCH 166/555] remove project controller / handler add unit test scaffolding --- services/filestore/app.coffee | 3 +-- .../app/coffee/AWSSDKPersistorManager.coffee | 2 +- .../app/coffee/FSPersistorManager.coffee | 2 +- .../filestore/app/coffee/FileController.coffee | 11 +++++++++++ .../filestore/app/coffee/FileHandler.coffee | 7 +++++++ .../app/coffee/ProjectController.coffee | 18 ------------------ .../filestore/app/coffee/ProjectHandler.coffee | 13 ------------- .../app/coffee/S3PersistorManager.coffee | 2 +- .../coffee/AWSSDKPersistorManagerTests.coffee | 8 ++++++++ .../unit/coffee/FSPersistorManagerTests.coffee | 5 +++++ .../unit/coffee/FileControllerTests.coffee | 5 +++++ .../test/unit/coffee/FileHandlerTests.coffee | 5 +++++ .../unit/coffee/S3PersistorManagerTests.coffee | 5 +++++ 13 files changed, 50 insertions(+), 36 deletions(-) delete mode 100644 services/filestore/app/coffee/ProjectController.coffee delete mode 100644 services/filestore/app/coffee/ProjectHandler.coffee diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index ad05bd5124..04c13eb4b5 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -4,7 +4,6 @@ logger.initialize("filestore") settings = require("settings-sharelatex") request = require("request") fileController = require("./app/js/FileController") -projectController = require("./app/js/ProjectController") keyBuilder = require("./app/js/KeyBuilder") domain = require("domain") appIsOk = true @@ -86,7 +85,7 @@ app.post "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey app.put "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, express.bodyParser(), fileController.copyFile app.del "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.deleteFile -app.get "/project/:project_id/size", keyBuilder.publicProjectKey, projectController.projectSize +app.get "/project/:project_id/size", keyBuilder.publicProjectKey, fileController.directorySize app.get "/heapdump", (req, res)-> require('heapdump').writeSnapshot '/tmp/' + Date.now() + '.filestore.heapsnapshot', (err, filename)-> diff --git a/services/filestore/app/coffee/AWSSDKPersistorManager.coffee b/services/filestore/app/coffee/AWSSDKPersistorManager.coffee index ca72901f8f..5be80506f5 100644 --- a/services/filestore/app/coffee/AWSSDKPersistorManager.coffee +++ b/services/filestore/app/coffee/AWSSDKPersistorManager.coffee @@ -85,7 +85,7 @@ module.exports = return callback err callback null, data.ETag? - getProjectSize:(bucketName, key, callback)-> + directorySize:(bucketName, key, callback)-> logger.log bucketName:bucketName, key:key, "get project size in s3" s3.listObjects {Bucket: bucketName, Prefix: key}, (err, data) -> if err? diff --git a/services/filestore/app/coffee/FSPersistorManager.coffee b/services/filestore/app/coffee/FSPersistorManager.coffee index c146b162ff..0868216a15 100644 --- a/services/filestore/app/coffee/FSPersistorManager.coffee +++ b/services/filestore/app/coffee/FSPersistorManager.coffee @@ -88,7 +88,7 @@ module.exports = logger.log location:location, name:filteredName, exists:exists, "checked if file exists" callback null, exists - getProjectSize:(location, name, callback)-> + directorySize:(location, name, callback)-> filteredName = filterName name.replace(/\/$/,'') logger.log location:location, name:filteredName, "get project size in file system" fs.readdir "#{location}/#{filteredName}", (err, files) -> diff --git a/services/filestore/app/coffee/FileController.coffee b/services/filestore/app/coffee/FileController.coffee index fed469e700..e1c212711a 100644 --- a/services/filestore/app/coffee/FileController.coffee +++ b/services/filestore/app/coffee/FileController.coffee @@ -83,3 +83,14 @@ module.exports = FileController = else range = parsed[0] {start: range.start, end: range.end} + + directorySize: (req, res)-> + metrics.inc "projectSize" + {project_id, bucket} = req + logger.log project_id:project_id, bucket:bucket, "reciving request to project size" + FileHandler.getDirectorySize bucket, project_id, (err, size)-> + if err? + logger.log err: err, project_id: project_id, bucket: bucket, "error inserting file" + res.send 500 + else + res.json {'total bytes' : size} diff --git a/services/filestore/app/coffee/FileHandler.coffee b/services/filestore/app/coffee/FileHandler.coffee index f22285edae..718195a484 100644 --- a/services/filestore/app/coffee/FileHandler.coffee +++ b/services/filestore/app/coffee/FileHandler.coffee @@ -87,3 +87,10 @@ module.exports = if err? return callback(err) LocalFileWriter.writeStream fileStream, key, callback + + getDirectorySize: (bucket, project_id, callback)-> + logger.log bucket:bucket, project_id:project_id, "getting project size" + PersistorManager.directorySize bucket, project_id, (err, size)-> + if err? + logger.err bucket:bucket, project_id:project_id, "error getting size" + callback err, size diff --git a/services/filestore/app/coffee/ProjectController.coffee b/services/filestore/app/coffee/ProjectController.coffee deleted file mode 100644 index e31d2539b5..0000000000 --- a/services/filestore/app/coffee/ProjectController.coffee +++ /dev/null @@ -1,18 +0,0 @@ -settings = require("settings-sharelatex") -logger = require("logger-sharelatex") -metrics = require("metrics-sharelatex") -Errors = require('./Errors') -ProjectHandler = require("./ProjectHandler") - -module.exports = projectController = - - projectSize: (req, res)-> - metrics.inc "projectSize" - {project_id, bucket} = req - logger.log project_id:project_id, bucket:bucket, "reciving request to project size" - ProjectHandler.getSize bucket, project_id, (err, size)-> - if err? - logger.log err: err, project_id: project_id, bucket: bucket, "error inserting file" - res.send 500 - else - res.json {'total bytes' : size} \ No newline at end of file diff --git a/services/filestore/app/coffee/ProjectHandler.coffee b/services/filestore/app/coffee/ProjectHandler.coffee deleted file mode 100644 index 53ed2df183..0000000000 --- a/services/filestore/app/coffee/ProjectHandler.coffee +++ /dev/null @@ -1,13 +0,0 @@ -settings = require("settings-sharelatex") -PersistorManager = require("./PersistorManager") -logger = require("logger-sharelatex") -async = require("async") - -module.exports = - - getSize: (bucket, project_id, callback)-> - logger.log bucket:bucket, project_id:project_id, "getting project size" - PersistorManager.getProjectSize bucket, project_id, (err, size)-> - if err? - logger.err bucket:bucket, project_id:project_id, "error getting size" - callback err, size \ No newline at end of file diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index 4accd517cb..b1a03fb4f4 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -138,7 +138,7 @@ module.exports = logger.log bucketName:bucketName, key:key, exists:exists, "checked if file exsists in s3" callback(err, exists) - getProjectSize:(bucketName, key, callback)-> + directorySize:(bucketName, key, callback)-> logger.log bucketName:bucketName, key:key, "get project size in s3" s3Client = knox.createClient key: settings.filestore.s3.key diff --git a/services/filestore/test/unit/coffee/AWSSDKPersistorManagerTests.coffee b/services/filestore/test/unit/coffee/AWSSDKPersistorManagerTests.coffee index 0ca8c65ffc..262f7aa94c 100644 --- a/services/filestore/test/unit/coffee/AWSSDKPersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/AWSSDKPersistorManagerTests.coffee @@ -247,3 +247,11 @@ describe "AWSSDKPersistorManager", -> @AWSSDKPersistorManager.checkIfFileExists @bucketName, @key, (err, exists) => expect(err).to.equal @error done() + + describe "directorySize", -> + + it "should list the directory content using s3.listObjects", (done) -> + done() + + it "should sum directory files size", (done) -> + done() diff --git a/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee b/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee index 75a4376b8c..1d7eb17e88 100644 --- a/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee @@ -208,3 +208,8 @@ describe "FSPersistorManagerTests", -> @FSPersistorManager.checkIfFileExists @location, @name1, (err,exists) => exists.should.be.false done() + + describe "directorySize", -> + + it "should sum directory files size", (done) -> + done() \ No newline at end of file diff --git a/services/filestore/test/unit/coffee/FileControllerTests.coffee b/services/filestore/test/unit/coffee/FileControllerTests.coffee index 1a2c3e81ea..94d50d67c1 100644 --- a/services/filestore/test/unit/coffee/FileControllerTests.coffee +++ b/services/filestore/test/unit/coffee/FileControllerTests.coffee @@ -152,3 +152,8 @@ describe "FileController", -> result = @controller._get_range('carrots=0-200') expect(result).to.equal null done() + + describe "directorySize", -> + + it "should call the file handler the directory size", (done) -> + done() diff --git a/services/filestore/test/unit/coffee/FileHandlerTests.coffee b/services/filestore/test/unit/coffee/FileHandlerTests.coffee index e67a8d38b7..405ccc2cc1 100644 --- a/services/filestore/test/unit/coffee/FileHandlerTests.coffee +++ b/services/filestore/test/unit/coffee/FileHandlerTests.coffee @@ -192,3 +192,8 @@ describe "FileHandler", -> @FileConverter.convert.calledWith(@stubbedPath, @format).should.equal true @LocalFileWriter.deleteFile.calledWith(@stubbedPath).should.equal true done() + + describe "directorySize", -> + + it "should call the filestore manager to get directory size", (done) -> + done() diff --git a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee index 78fec6cea5..e1f7b7b7a9 100644 --- a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee @@ -256,3 +256,8 @@ describe "S3PersistorManagerTests", -> @S3PersistorManager.checkIfFileExists @bucketName, @key, (err)=> err.should.equal @error done() + + describe "directorySize", -> + + it "should sum directory files size", (done) -> + done() From ffaac4c81ba360e927144756440a3e48ea0dde4a Mon Sep 17 00:00:00 2001 From: Henrique Santos Date: Sun, 13 Mar 2016 20:45:48 -0300 Subject: [PATCH 167/555] add unit tests --- .../coffee/AWSSDKPersistorManagerTests.coffee | 21 ++++++++++++++++--- .../coffee/FSPersistorManagerTests.coffee | 16 +++++++++++++- .../unit/coffee/FileControllerTests.coffee | 17 ++++++++++++--- .../test/unit/coffee/FileHandlerTests.coffee | 12 ++++++++--- .../coffee/S3PersistorManagerTests.coffee | 14 ++++++++++--- 5 files changed, 67 insertions(+), 13 deletions(-) diff --git a/services/filestore/test/unit/coffee/AWSSDKPersistorManagerTests.coffee b/services/filestore/test/unit/coffee/AWSSDKPersistorManagerTests.coffee index 262f7aa94c..92fb968914 100644 --- a/services/filestore/test/unit/coffee/AWSSDKPersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/AWSSDKPersistorManagerTests.coffee @@ -251,7 +251,22 @@ describe "AWSSDKPersistorManager", -> describe "directorySize", -> it "should list the directory content using s3.listObjects", (done) -> - done() + @s3.listObjects.callsArgWith 1, null, Contents: [] + @AWSSDKPersistorManager.directorySize @bucketName, @key, (err) => + expect(err).to.not.be.ok + expect(@s3.listObjects.calledOnce, "called only once").to.be.true + expect((@s3.listObjects.calledWith Bucket: @bucketName, Prefix: @key), + "called with correct arguments").to.be.true + done() - it "should sum directory files size", (done) -> - done() + it "should dispatch the error from s3.listObjects", (done) -> + @s3.listObjects.callsArgWith 1, @error + @AWSSDKPersistorManager.directorySize @bucketName, @key, (err) => + expect(err).to.equal @error + done() + + it "should sum directory files sizes", (done) -> + @s3.listObjects.callsArgWith 1, null, Contents: [ { Size: 1024 }, { Size: 2048 }] + @AWSSDKPersistorManager.directorySize @bucketName, @key, (err, size) => + expect(size).to.equal 3072 + done() diff --git a/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee b/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee index 1d7eb17e88..63fe9eded7 100644 --- a/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee @@ -18,6 +18,10 @@ describe "FSPersistorManagerTests", -> unlink:sinon.stub() rmdir:sinon.stub() exists:sinon.stub() + readdir:sinon.stub() + openSync:sinon.stub() + fstatSync:sinon.stub() + closeSync:sinon.stub() @Rimraf = sinon.stub() @LocalFileWriter = writeStream: sinon.stub() @@ -211,5 +215,15 @@ describe "FSPersistorManagerTests", -> describe "directorySize", -> + it "should propogate the error", (done) -> + @Fs.readdir.callsArgWith(1, @error) + @FSPersistorManager.directorySize @location, @name1, (err, totalsize) => + err.should.equal @error + done() + it "should sum directory files size", (done) -> - done() \ No newline at end of file + @Fs.readdir.callsArgWith(1, null, [ {'file1'}, {'file2'} ]) + @Fs.fstatSync.returns({size : 1024}) + @FSPersistorManager.directorySize @location, @name1, (err, totalsize) => + expect(totalsize).to.equal 2048 + done() \ No newline at end of file diff --git a/services/filestore/test/unit/coffee/FileControllerTests.coffee b/services/filestore/test/unit/coffee/FileControllerTests.coffee index 94d50d67c1..591644de60 100644 --- a/services/filestore/test/unit/coffee/FileControllerTests.coffee +++ b/services/filestore/test/unit/coffee/FileControllerTests.coffee @@ -22,6 +22,7 @@ describe "FileController", -> getFile: sinon.stub() deleteFile: sinon.stub() insertFile: sinon.stub() + getDirectorySize: sinon.stub() @LocalFileWriter = {} @controller = SandboxedModule.require modulePath, requires: "./LocalFileWriter":@LocalFileWriter @@ -153,7 +154,17 @@ describe "FileController", -> expect(result).to.equal null done() - describe "directorySize", -> + describe "directorySize", -> - it "should call the file handler the directory size", (done) -> - done() + it "should return total directory size bytes", (done) -> + @FileHandler.getDirectorySize.callsArgWith(2, null, 1024) + @controller.directorySize @req, json:(result)=> + expect(result['total bytes']).to.equal 1024 + done() + + it "should send a 500 if there was an error", (done)-> + @FileHandler.getDirectorySize.callsArgWith(2, "error") + @res.send = (code)-> + code.should.equal 500 + done() + @controller.directorySize @req, @res diff --git a/services/filestore/test/unit/coffee/FileHandlerTests.coffee b/services/filestore/test/unit/coffee/FileHandlerTests.coffee index 405ccc2cc1..ab757b9360 100644 --- a/services/filestore/test/unit/coffee/FileHandlerTests.coffee +++ b/services/filestore/test/unit/coffee/FileHandlerTests.coffee @@ -20,6 +20,7 @@ describe "FileHandler", -> deleteDirectory: sinon.stub() sendStream: sinon.stub() insertFile: sinon.stub() + directorySize: sinon.stub() @LocalFileWriter = writeStream: sinon.stub() deleteFile: sinon.stub() @@ -193,7 +194,12 @@ describe "FileHandler", -> @LocalFileWriter.deleteFile.calledWith(@stubbedPath).should.equal true done() - describe "directorySize", -> + describe "getDirectorySize", -> - it "should call the filestore manager to get directory size", (done) -> - done() + beforeEach -> + @PersistorManager.directorySize.callsArgWith(2) + + it "should call the filestore manager to get directory size", (done)-> + @handler.getDirectorySize @bucket, @key, => + @PersistorManager.directorySize.calledWith(@bucket, @key).should.equal true + done() diff --git a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee index e1f7b7b7a9..3a3e7b0d86 100644 --- a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee @@ -257,7 +257,15 @@ describe "S3PersistorManagerTests", -> err.should.equal @error done() - describe "directorySize", -> + describe "directorySize", -> - it "should sum directory files size", (done) -> - done() + beforeEach -> + @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires + + it "should sum directory files size", (done) -> + data = + Contents: [ {Size: 1024}, {Size: 2048} ] + @stubbedKnoxClient.list.callsArgWith(1, null, data) + @S3PersistorManager.directorySize @bucketName, @key, (err, totalSize)=> + totalSize.should.equal 3072 + done() From be43330208342c62b26105076e9d6765ff820a41 Mon Sep 17 00:00:00 2001 From: James Allen Date: Mon, 9 May 2016 11:37:35 +0100 Subject: [PATCH 168/555] Allow convert command to be prefixed by security commands --- .../filestore/app/coffee/FileConverter.coffee | 16 ++++++++++------ services/filestore/app/coffee/SafeExec.coffee | 2 +- .../filestore/config/settings.defaults.coffee | 4 ++++ .../unit/coffee/FileConverterTests.coffee | 19 +++++++++++++++---- .../test/unit/coffee/SafeExec.coffee | 8 ++++---- 5 files changed, 34 insertions(+), 15 deletions(-) diff --git a/services/filestore/app/coffee/FileConverter.coffee b/services/filestore/app/coffee/FileConverter.coffee index aaf56388dd..429fa7fccb 100644 --- a/services/filestore/app/coffee/FileConverter.coffee +++ b/services/filestore/app/coffee/FileConverter.coffee @@ -3,6 +3,7 @@ metrics = require("metrics-sharelatex") logger = require("logger-sharelatex") safe_exec = require("./SafeExec") approvedFormats = ["png"] +Settings = require "settings-sharelatex" fourtySeconds = 40 * 1000 @@ -22,8 +23,9 @@ module.exports = err = new Error("invalid format requested") return callback err width = "600x" - args = "nice convert -define pdf:fit-page=#{width} -flatten -density 300 #{sourcePath} #{destPath}" - safe_exec args, childProcessOpts, (err, stdout, stderr)-> + command = ["convert", "-define", "pdf:fit-page=#{width}", "-flatten", "-density", "300", sourcePath, destPath] + command = Settings.commands.convertCommandPrefix.concat(command) + safe_exec command, childProcessOpts, (err, stdout, stderr)-> timer.done() if err? logger.err err:err, stderr:stderr, sourcePath:sourcePath, requestedFormat:requestedFormat, destPath:destPath, "something went wrong converting file" @@ -36,8 +38,9 @@ module.exports = destPath = "#{sourcePath}.png" sourcePath = "#{sourcePath}[0]" width = "260x" - args = "nice convert -flatten -background white -density 300 -define pdf:fit-page=#{width} #{sourcePath} -resize #{width} #{destPath}" - safe_exec args, childProcessOpts, (err, stdout, stderr)-> + command = ["convert", "-flatten", "-background", "white", "-density", "300", "-define", "pdf:fit-page=#{width}", sourcePath, "-resize", width, destPath] + command = Settings.commands.convertCommandPrefix.concat(command) + safe_exec command, childProcessOpts, (err, stdout, stderr)-> if err? logger.err err:err, stderr:stderr, sourcePath:sourcePath, "something went wrong converting file to thumbnail" else @@ -49,8 +52,9 @@ module.exports = destPath = "#{sourcePath}.png" sourcePath = "#{sourcePath}[0]" width = "548x" - args = "nice convert -flatten -background white -density 300 -define pdf:fit-page=#{width} #{sourcePath} -resize #{width} #{destPath}" - safe_exec args, childProcessOpts, (err, stdout, stderr)-> + command = ["convert", "-flatten", "-background", "white", "-density", "300", "-define", "pdf:fit-page=#{width}", sourcePath, "-resize", width, destPath] + command = Settings.commands.convertCommandPrefix.concat(command) + safe_exec command, childProcessOpts, (err, stdout, stderr)-> if err? logger.err err:err, stderr:stderr, sourcePath:sourcePath, destPath:destPath, "something went wrong converting file to preview" else diff --git a/services/filestore/app/coffee/SafeExec.coffee b/services/filestore/app/coffee/SafeExec.coffee index 217aab4748..aa8121a360 100644 --- a/services/filestore/app/coffee/SafeExec.coffee +++ b/services/filestore/app/coffee/SafeExec.coffee @@ -10,7 +10,7 @@ child_process = require('child_process') module.exports = (command, options, callback = (err, stdout, stderr) ->) -> # options are {timeout: number-of-milliseconds, killSignal: signal-name} - [cmd, args...] = command.split(' ') + [cmd, args...] = command child = child_process.spawn cmd, args, {detached:true} stdout = "" diff --git a/services/filestore/config/settings.defaults.coffee b/services/filestore/config/settings.defaults.coffee index f84928598e..bb4a885478 100644 --- a/services/filestore/config/settings.defaults.coffee +++ b/services/filestore/config/settings.defaults.coffee @@ -30,6 +30,10 @@ module.exports = path: uploadFolder: Path.resolve(__dirname + "/../uploads") + + commands: + # Any commands to wrap the convert utility in, for example ["nice"], or ["firejail", "--profile=/etc/firejail/convert.profile"] + convertCommandPrefix: [] # Filestore health check # ---------------------- diff --git a/services/filestore/test/unit/coffee/FileConverterTests.coffee b/services/filestore/test/unit/coffee/FileConverterTests.coffee index f8a8add22f..bdb908be98 100644 --- a/services/filestore/test/unit/coffee/FileConverterTests.coffee +++ b/services/filestore/test/unit/coffee/FileConverterTests.coffee @@ -16,6 +16,9 @@ describe "FileConverter", -> "logger-sharelatex": log:-> err:-> + "settings-sharelatex": @Settings = + commands: + convertCommandPrefix: [] @sourcePath = "/this/path/here.eps" @format = "png" @@ -27,8 +30,8 @@ describe "FileConverter", -> @safe_exec.callsArgWith(2) @converter.convert @sourcePath, @format, (err)=> args = @safe_exec.args[0][0] - args.indexOf(@sourcePath).should.not.equal -1 - args.indexOf(@format).should.not.equal -1 + args.indexOf("#{@sourcePath}[0]").should.not.equal -1 + args.indexOf("#{@sourcePath}.#{@format}").should.not.equal -1 done() it "should return the dest path", (done)-> @@ -48,13 +51,21 @@ describe "FileConverter", -> @converter.convert @sourcePath, "ahhhhh", (err)=> expect(err).to.exist done() + + it "should prefix the command with Settings.commands.convertCommandPrefix", (done) -> + @safe_exec.callsArgWith(2) + @Settings.commands.convertCommandPrefix = ["nice"] + @converter.convert @sourcePath, @format, (err)=> + command = @safe_exec.args[0][0] + command[0].should.equal "nice" + done() describe "thumbnail", -> it "should call converter resize with args", (done)-> @safe_exec.callsArgWith(2) @converter.thumbnail @sourcePath, (err)=> args = @safe_exec.args[0][0] - args.indexOf(@sourcePath).should.not.equal -1 + args.indexOf("#{@sourcePath}[0]").should.not.equal -1 done() describe "preview", -> @@ -62,5 +73,5 @@ describe "FileConverter", -> @safe_exec.callsArgWith(2) @converter.preview @sourcePath, (err)=> args = @safe_exec.args[0][0] - args.indexOf(@sourcePath).should.not.equal -1 + args.indexOf("#{@sourcePath}[0]").should.not.equal -1 done() diff --git a/services/filestore/test/unit/coffee/SafeExec.coffee b/services/filestore/test/unit/coffee/SafeExec.coffee index b63851aa57..10d920df11 100644 --- a/services/filestore/test/unit/coffee/SafeExec.coffee +++ b/services/filestore/test/unit/coffee/SafeExec.coffee @@ -19,24 +19,24 @@ describe "SafeExec", -> describe "safe_exec", -> it "should execute a valid command", (done) -> - @safe_exec "/bin/echo hello", @options, (err, stdout, stderr) => + @safe_exec ["/bin/echo", "hello"], @options, (err, stdout, stderr) => stdout.should.equal "hello\n" should.not.exist(err) done() it "should execute a command with non-zero exit status", (done) -> - @safe_exec "/usr/bin/env false", @options, (err, stdout, stderr) => + @safe_exec ["/usr/bin/env", "false"], @options, (err, stdout, stderr) => stdout.should.equal "" stderr.should.equal "" err.message.should.equal "exit status 1" done() it "should handle an invalid command", (done) -> - @safe_exec "/bin/foobar", @options, (err, stdout, stderr) => + @safe_exec ["/bin/foobar"], @options, (err, stdout, stderr) => err.code.should.equal "ENOENT" done() it "should handle a command that runs too long", (done) -> - @safe_exec "/bin/sleep 10", {timeout: 500, killSignal: "SIGTERM"}, (err, stdout, stderr) => + @safe_exec ["/bin/sleep", "10"], {timeout: 500, killSignal: "SIGTERM"}, (err, stdout, stderr) => err.should.equal "SIGTERM" done() From 92f5234580a064b485bde38f340bd6a975942dbf Mon Sep 17 00:00:00 2001 From: James Allen Date: Mon, 9 May 2016 14:44:49 +0100 Subject: [PATCH 169/555] Add convert firejail profile --- services/filestore/firejail/convert.profile | 39 +++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 services/filestore/firejail/convert.profile diff --git a/services/filestore/firejail/convert.profile b/services/filestore/firejail/convert.profile new file mode 100644 index 0000000000..c1e073aa4b --- /dev/null +++ b/services/filestore/firejail/convert.profile @@ -0,0 +1,39 @@ +# Convert (ImageMagick profile) + +include /etc/firejail/disable-common.inc +include /etc/firejail/disable-devel.inc +include /etc/firejail/disable-mgmt.inc +include /etc/firejail/disable-secret.inc + +read-only /bin +blacklist /boot +blacklist /dev +read-only /etc +read-only /home +read-only /lib +read-only /lib64 +blacklist /media +blacklist /mnt +blacklist /opt +blacklist /root +blacklist /run +blacklist /sbin +blacklist /selinux +blacklist /src +blacklist /sys +read-only /usr +blacklist /var + +caps.drop all +noroot +nogroups +protocol unix +net none +private-tmp +private-dev +shell none +seccomp.keep access,arch_prctl,brk,chown,clone,close,dup,execve,exit_group,fcntl,fstat,futex,getcwd,getdents,getrlimit,getrusage,lseek,mmap,mprotect,munmap,nanosleep,open,openat,prctl,read,readlink,rt_sigaction,rt_sigprocmask,sched_getaffinity,set_robust_list,set_tid_address,stat,symlink,times,unlink,unshare,wait4,write + +rlimit-fsize 524288000 #500Mb +rlimit-nproc 100 +rlimit-nofile 100 \ No newline at end of file From a41a307b58073e0d0fd3c08d5c81122a86141f13 Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 10 May 2016 14:34:48 +0100 Subject: [PATCH 170/555] Update firejail profile to allow pdf previews for templates to work --- services/filestore/firejail/convert.profile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/filestore/firejail/convert.profile b/services/filestore/firejail/convert.profile index c1e073aa4b..b0ab076572 100644 --- a/services/filestore/firejail/convert.profile +++ b/services/filestore/firejail/convert.profile @@ -32,8 +32,8 @@ net none private-tmp private-dev shell none -seccomp.keep access,arch_prctl,brk,chown,clone,close,dup,execve,exit_group,fcntl,fstat,futex,getcwd,getdents,getrlimit,getrusage,lseek,mmap,mprotect,munmap,nanosleep,open,openat,prctl,read,readlink,rt_sigaction,rt_sigprocmask,sched_getaffinity,set_robust_list,set_tid_address,stat,symlink,times,unlink,unshare,wait4,write +seccomp.keep access,arch_prctl,brk,chown,clone,close,dup,execve,exit_group,fcntl,fstat,futex,getcwd,getdents,getegid,geteuid,getgid,getpeername,getpgrp,getpid,getppid,getrlimit,getrusage,getuid,ioctl,lseek,mmap,mprotect,munmap,nanosleep,open,openat,prctl,read,readlink,rt_sigaction,rt_sigprocmask,sched_getaffinity,set_robust_list,set_tid_address,stat,symlink,times,uname,unlink,unshare,wait4,write,madvise rlimit-fsize 524288000 #500Mb -rlimit-nproc 100 +rlimit-nproc 200 rlimit-nofile 100 \ No newline at end of file From a95aae16e6d36d0f6b886a235831c041d5a6bebc Mon Sep 17 00:00:00 2001 From: James Allen Date: Tue, 10 May 2016 17:50:46 +0100 Subject: [PATCH 171/555] Tweak convert profile to work on api-2 --- services/filestore/firejail/convert.profile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/firejail/convert.profile b/services/filestore/firejail/convert.profile index b0ab076572..efee60ed8e 100644 --- a/services/filestore/firejail/convert.profile +++ b/services/filestore/firejail/convert.profile @@ -16,7 +16,7 @@ blacklist /media blacklist /mnt blacklist /opt blacklist /root -blacklist /run +read-only /run blacklist /sbin blacklist /selinux blacklist /src From fd9559960957f412ad7ca3b940a3d31443d4115b Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 11 Aug 2016 11:52:58 +0100 Subject: [PATCH 172/555] remove old firejail includes --- services/filestore/firejail/convert.profile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/filestore/firejail/convert.profile b/services/filestore/firejail/convert.profile index efee60ed8e..c3d971fde4 100644 --- a/services/filestore/firejail/convert.profile +++ b/services/filestore/firejail/convert.profile @@ -2,8 +2,8 @@ include /etc/firejail/disable-common.inc include /etc/firejail/disable-devel.inc -include /etc/firejail/disable-mgmt.inc -include /etc/firejail/disable-secret.inc +# include /etc/firejail/disable-mgmt.inc ## removed in firejail 0.9.40 +# include /etc/firejail/disable-secret.inc ## removed in firejail 0.9.40 read-only /bin blacklist /boot From 21be88482bb24163b2cb8f21aa426edfb5c8fcbb Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Mon, 26 Sep 2016 15:33:22 +0100 Subject: [PATCH 173/555] Change the seccomp whitelist to match that of docker default list --- services/filestore/firejail/convert.profile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/firejail/convert.profile b/services/filestore/firejail/convert.profile index c3d971fde4..1ed826f953 100644 --- a/services/filestore/firejail/convert.profile +++ b/services/filestore/firejail/convert.profile @@ -32,7 +32,7 @@ net none private-tmp private-dev shell none -seccomp.keep access,arch_prctl,brk,chown,clone,close,dup,execve,exit_group,fcntl,fstat,futex,getcwd,getdents,getegid,geteuid,getgid,getpeername,getpgrp,getpid,getppid,getrlimit,getrusage,getuid,ioctl,lseek,mmap,mprotect,munmap,nanosleep,open,openat,prctl,read,readlink,rt_sigaction,rt_sigprocmask,sched_getaffinity,set_robust_list,set_tid_address,stat,symlink,times,uname,unlink,unshare,wait4,write,madvise +seccomp.keep accept,accept4,access,alarm,arch_prctl,bind,brk,capget,capset,chdir,chmod,chown,chown32,chroot,clock_getres,clock_gettime,clock_nanosleep,clone,close,connect,copy_file_range,creat,dup,dup2,dup3,epoll_create,epoll_create1,epoll_ctl,epoll_ctl_old,epoll_pwait,epoll_wait,epoll_wait_old,eventfd,eventfd2,execve,execveat,exit,exit_group,faccessat,fadvise64,fadvise64_64,fallocate,fanotify_init,fanotify_mark,fchdir,fchmod,fchmodat,fchown,fchown32,fchownat,fcntl,fcntl64,fdatasync,fgetxattr,flistxattr,flock,fork,fremovexattr,fsetxattr,fstat,fstat64,fstatat64,fstatfs,fstatfs64,fsync,ftruncate,ftruncate64,futex,futimesat,getcpu,getcwd,getdents,getdents64,getegid,getegid32,geteuid,geteuid32,getgid,getgid32,getgroups,getgroups32,getitimer,getpeername,getpgid,getpgrp,getpid,getppid,getpriority,getrandom,getresgid,getresgid32,getresuid,getresuid32,getrlimit,get_robust_list,getrusage,getsid,getsockname,getsockopt,get_thread_area,gettid,gettimeofday,getuid,getuid32,getxattr,inotify_add_watch,inotify_init,inotify_init1,inotify_rm_watch,io_cancel,ioctl,io_destroy,io_getevents,ioprio_get,ioprio_set,io_setup,io_submit,ipc,kill,lchown,lchown32,lgetxattr,link,linkat,listen,listxattr,llistxattr,_llseek,lremovexattr,lseek,lsetxattr,lstat,lstat64,madvise,memfd_create,mincore,mkdir,mkdirat,mknod,mknodat,mlock,mlock2,mlockall,mmap,mmap2,mprotect,mq_getsetattr,mq_notify,mq_open,mq_timedreceive,mq_timedsend,mq_unlink,mremap,msgctl,msgget,msgrcv,msgsnd,msync,munlock,munlockall,munmap,nanosleep,newfstatat,_newselect,open,openat,pause,personality,personality,personality,pipe,pipe2,poll,ppoll,prctl,pread64,preadv,prlimit64,pselect6,pwrite64,pwritev,read,readahead,readlink,readlinkat,readv,recv,recvfrom,recvmmsg,recvmsg,remap_file_pages,removexattr,rename,renameat,renameat2,restart_syscall,rmdir,rt_sigaction,rt_sigpending,rt_sigprocmask,rt_sigqueueinfo,rt_sigreturn,rt_sigsuspend,rt_sigtimedwait,rt_tgsigqueueinfo,sched_getaffinity,sched_getattr,sched_getparam,sched_get_priority_max,sched_get_priority_min,sched_getscheduler,sched_rr_get_interval,sched_setaffinity,sched_setattr,sched_setparam,sched_setscheduler,sched_yield,seccomp,select,semctl,semget,semop,semtimedop,send,sendfile,sendfile64,sendmmsg,sendmsg,sendto,setdomainname,setfsgid,setfsgid32,setfsuid,setfsuid32,setgid,setgid32,setgroups,setgroups32,sethostname,setitimer,setpgid,setpriority,setregid,setregid32,setresgid,setresgid32,setresuid,setresuid32,setreuid,setreuid32,setrlimit,set_robust_list,setsid,setsockopt,set_thread_area,set_tid_address,setuid,setuid32,setxattr,shmat,shmctl,shmdt,shmget,shutdown,sigaltstack,signalfd,signalfd4,sigreturn,socket,socketpair,splice,stat,stat64,statfs,statfs64,symlink,symlinkat,sync,sync_file_range,syncfs,sysinfo,syslog,tee,tgkill,time,timer_create,timer_delete,timerfd_create,timerfd_gettime,timerfd_settime,timer_getoverrun,timer_gettime,timer_settime,times,tkill,truncate,truncate64,ugetrlimit,umask,uname,unlink,unlinkat,utime,utimensat,utimes,vfork,vhangup,vmsplice,wait4,waitid,waitpid,write,writev,modify_ldt,breakpoint,cacheflush,set_tls rlimit-fsize 524288000 #500Mb rlimit-nproc 200 From ff39a9da223ca78464b2aa8803c21cf6ab7e5433 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Mon, 26 Sep 2016 15:35:53 +0100 Subject: [PATCH 174/555] sort the list of syscalls --- services/filestore/firejail/convert.profile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/firejail/convert.profile b/services/filestore/firejail/convert.profile index 1ed826f953..97eebd0411 100644 --- a/services/filestore/firejail/convert.profile +++ b/services/filestore/firejail/convert.profile @@ -32,7 +32,7 @@ net none private-tmp private-dev shell none -seccomp.keep accept,accept4,access,alarm,arch_prctl,bind,brk,capget,capset,chdir,chmod,chown,chown32,chroot,clock_getres,clock_gettime,clock_nanosleep,clone,close,connect,copy_file_range,creat,dup,dup2,dup3,epoll_create,epoll_create1,epoll_ctl,epoll_ctl_old,epoll_pwait,epoll_wait,epoll_wait_old,eventfd,eventfd2,execve,execveat,exit,exit_group,faccessat,fadvise64,fadvise64_64,fallocate,fanotify_init,fanotify_mark,fchdir,fchmod,fchmodat,fchown,fchown32,fchownat,fcntl,fcntl64,fdatasync,fgetxattr,flistxattr,flock,fork,fremovexattr,fsetxattr,fstat,fstat64,fstatat64,fstatfs,fstatfs64,fsync,ftruncate,ftruncate64,futex,futimesat,getcpu,getcwd,getdents,getdents64,getegid,getegid32,geteuid,geteuid32,getgid,getgid32,getgroups,getgroups32,getitimer,getpeername,getpgid,getpgrp,getpid,getppid,getpriority,getrandom,getresgid,getresgid32,getresuid,getresuid32,getrlimit,get_robust_list,getrusage,getsid,getsockname,getsockopt,get_thread_area,gettid,gettimeofday,getuid,getuid32,getxattr,inotify_add_watch,inotify_init,inotify_init1,inotify_rm_watch,io_cancel,ioctl,io_destroy,io_getevents,ioprio_get,ioprio_set,io_setup,io_submit,ipc,kill,lchown,lchown32,lgetxattr,link,linkat,listen,listxattr,llistxattr,_llseek,lremovexattr,lseek,lsetxattr,lstat,lstat64,madvise,memfd_create,mincore,mkdir,mkdirat,mknod,mknodat,mlock,mlock2,mlockall,mmap,mmap2,mprotect,mq_getsetattr,mq_notify,mq_open,mq_timedreceive,mq_timedsend,mq_unlink,mremap,msgctl,msgget,msgrcv,msgsnd,msync,munlock,munlockall,munmap,nanosleep,newfstatat,_newselect,open,openat,pause,personality,personality,personality,pipe,pipe2,poll,ppoll,prctl,pread64,preadv,prlimit64,pselect6,pwrite64,pwritev,read,readahead,readlink,readlinkat,readv,recv,recvfrom,recvmmsg,recvmsg,remap_file_pages,removexattr,rename,renameat,renameat2,restart_syscall,rmdir,rt_sigaction,rt_sigpending,rt_sigprocmask,rt_sigqueueinfo,rt_sigreturn,rt_sigsuspend,rt_sigtimedwait,rt_tgsigqueueinfo,sched_getaffinity,sched_getattr,sched_getparam,sched_get_priority_max,sched_get_priority_min,sched_getscheduler,sched_rr_get_interval,sched_setaffinity,sched_setattr,sched_setparam,sched_setscheduler,sched_yield,seccomp,select,semctl,semget,semop,semtimedop,send,sendfile,sendfile64,sendmmsg,sendmsg,sendto,setdomainname,setfsgid,setfsgid32,setfsuid,setfsuid32,setgid,setgid32,setgroups,setgroups32,sethostname,setitimer,setpgid,setpriority,setregid,setregid32,setresgid,setresgid32,setresuid,setresuid32,setreuid,setreuid32,setrlimit,set_robust_list,setsid,setsockopt,set_thread_area,set_tid_address,setuid,setuid32,setxattr,shmat,shmctl,shmdt,shmget,shutdown,sigaltstack,signalfd,signalfd4,sigreturn,socket,socketpair,splice,stat,stat64,statfs,statfs64,symlink,symlinkat,sync,sync_file_range,syncfs,sysinfo,syslog,tee,tgkill,time,timer_create,timer_delete,timerfd_create,timerfd_gettime,timerfd_settime,timer_getoverrun,timer_gettime,timer_settime,times,tkill,truncate,truncate64,ugetrlimit,umask,uname,unlink,unlinkat,utime,utimensat,utimes,vfork,vhangup,vmsplice,wait4,waitid,waitpid,write,writev,modify_ldt,breakpoint,cacheflush,set_tls +seccomp.keep _llseek,_newselect,accept,accept4,access,alarm,arch_prctl,bind,breakpoint,brk,cacheflush,capget,capset,chdir,chmod,chown,chown32,chroot,clock_getres,clock_gettime,clock_nanosleep,clone,close,connect,copy_file_range,creat,dup,dup2,dup3,epoll_create,epoll_create1,epoll_ctl,epoll_ctl_old,epoll_pwait,epoll_wait,epoll_wait_old,eventfd,eventfd2,execve,execveat,exit,exit_group,faccessat,fadvise64,fadvise64_64,fallocate,fanotify_init,fanotify_mark,fchdir,fchmod,fchmodat,fchown,fchown32,fchownat,fcntl,fcntl64,fdatasync,fgetxattr,flistxattr,flock,fork,fremovexattr,fsetxattr,fstat,fstat64,fstatat64,fstatfs,fstatfs64,fsync,ftruncate,ftruncate64,futex,futimesat,get_robust_list,get_thread_area,getcpu,getcwd,getdents,getdents64,getegid,getegid32,geteuid,geteuid32,getgid,getgid32,getgroups,getgroups32,getitimer,getpeername,getpgid,getpgrp,getpid,getppid,getpriority,getrandom,getresgid,getresgid32,getresuid,getresuid32,getrlimit,getrusage,getsid,getsockname,getsockopt,gettid,gettimeofday,getuid,getuid32,getxattr,inotify_add_watch,inotify_init,inotify_init1,inotify_rm_watch,io_cancel,io_destroy,io_getevents,io_setup,io_submit,ioctl,ioprio_get,ioprio_set,ipc,kill,lchown,lchown32,lgetxattr,link,linkat,listen,listxattr,llistxattr,lremovexattr,lseek,lsetxattr,lstat,lstat64,madvise,memfd_create,mincore,mkdir,mkdirat,mknod,mknodat,mlock,mlock2,mlockall,mmap,mmap2,modify_ldt,mprotect,mq_getsetattr,mq_notify,mq_open,mq_timedreceive,mq_timedsend,mq_unlink,mremap,msgctl,msgget,msgrcv,msgsnd,msync,munlock,munlockall,munmap,nanosleep,newfstatat,open,openat,pause,personality,personality,personality,pipe,pipe2,poll,ppoll,prctl,pread64,preadv,prlimit64,pselect6,pwrite64,pwritev,read,readahead,readlink,readlinkat,readv,recv,recvfrom,recvmmsg,recvmsg,remap_file_pages,removexattr,rename,renameat,renameat2,restart_syscall,rmdir,rt_sigaction,rt_sigpending,rt_sigprocmask,rt_sigqueueinfo,rt_sigreturn,rt_sigsuspend,rt_sigtimedwait,rt_tgsigqueueinfo,sched_get_priority_max,sched_get_priority_min,sched_getaffinity,sched_getattr,sched_getparam,sched_getscheduler,sched_rr_get_interval,sched_setaffinity,sched_setattr,sched_setparam,sched_setscheduler,sched_yield,seccomp,select,semctl,semget,semop,semtimedop,send,sendfile,sendfile64,sendmmsg,sendmsg,sendto,set_robust_list,set_thread_area,set_tid_address,set_tls,setdomainname,setfsgid,setfsgid32,setfsuid,setfsuid32,setgid,setgid32,setgroups,setgroups32,sethostname,setitimer,setpgid,setpriority,setregid,setregid32,setresgid,setresgid32,setresuid,setresuid32,setreuid,setreuid32,setrlimit,setsid,setsockopt,setuid,setuid32,setxattr,shmat,shmctl,shmdt,shmget,shutdown,sigaltstack,signalfd,signalfd4,sigreturn,socket,socketpair,splice,stat,stat64,statfs,statfs64,symlink,symlinkat,sync,sync_file_range,syncfs,sysinfo,syslog,tee,tgkill,time,timer_create,timer_delete,timer_getoverrun,timer_gettime,timer_settime,timerfd_create,timerfd_gettime,timerfd_settime,times,tkill,truncate,truncate64,ugetrlimit,umask,uname,unlink,unlinkat,utime,utimensat,utimes,vfork,vhangup,vmsplice,wait4,waitid,waitpid,write,writev rlimit-fsize 524288000 #500Mb rlimit-nproc 200 From edb5987c3d1c87d6a282daeb142495632ba2d36f Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Tue, 27 Sep 2016 15:54:41 +0100 Subject: [PATCH 175/555] A working profile, tested. --- services/filestore/firejail/convert.profile | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/services/filestore/firejail/convert.profile b/services/filestore/firejail/convert.profile index 97eebd0411..8852e0bc4d 100644 --- a/services/filestore/firejail/convert.profile +++ b/services/filestore/firejail/convert.profile @@ -25,14 +25,15 @@ read-only /usr blacklist /var caps.drop all -noroot +# noroot nogroups protocol unix net none private-tmp private-dev shell none -seccomp.keep _llseek,_newselect,accept,accept4,access,alarm,arch_prctl,bind,breakpoint,brk,cacheflush,capget,capset,chdir,chmod,chown,chown32,chroot,clock_getres,clock_gettime,clock_nanosleep,clone,close,connect,copy_file_range,creat,dup,dup2,dup3,epoll_create,epoll_create1,epoll_ctl,epoll_ctl_old,epoll_pwait,epoll_wait,epoll_wait_old,eventfd,eventfd2,execve,execveat,exit,exit_group,faccessat,fadvise64,fadvise64_64,fallocate,fanotify_init,fanotify_mark,fchdir,fchmod,fchmodat,fchown,fchown32,fchownat,fcntl,fcntl64,fdatasync,fgetxattr,flistxattr,flock,fork,fremovexattr,fsetxattr,fstat,fstat64,fstatat64,fstatfs,fstatfs64,fsync,ftruncate,ftruncate64,futex,futimesat,get_robust_list,get_thread_area,getcpu,getcwd,getdents,getdents64,getegid,getegid32,geteuid,geteuid32,getgid,getgid32,getgroups,getgroups32,getitimer,getpeername,getpgid,getpgrp,getpid,getppid,getpriority,getrandom,getresgid,getresgid32,getresuid,getresuid32,getrlimit,getrusage,getsid,getsockname,getsockopt,gettid,gettimeofday,getuid,getuid32,getxattr,inotify_add_watch,inotify_init,inotify_init1,inotify_rm_watch,io_cancel,io_destroy,io_getevents,io_setup,io_submit,ioctl,ioprio_get,ioprio_set,ipc,kill,lchown,lchown32,lgetxattr,link,linkat,listen,listxattr,llistxattr,lremovexattr,lseek,lsetxattr,lstat,lstat64,madvise,memfd_create,mincore,mkdir,mkdirat,mknod,mknodat,mlock,mlock2,mlockall,mmap,mmap2,modify_ldt,mprotect,mq_getsetattr,mq_notify,mq_open,mq_timedreceive,mq_timedsend,mq_unlink,mremap,msgctl,msgget,msgrcv,msgsnd,msync,munlock,munlockall,munmap,nanosleep,newfstatat,open,openat,pause,personality,personality,personality,pipe,pipe2,poll,ppoll,prctl,pread64,preadv,prlimit64,pselect6,pwrite64,pwritev,read,readahead,readlink,readlinkat,readv,recv,recvfrom,recvmmsg,recvmsg,remap_file_pages,removexattr,rename,renameat,renameat2,restart_syscall,rmdir,rt_sigaction,rt_sigpending,rt_sigprocmask,rt_sigqueueinfo,rt_sigreturn,rt_sigsuspend,rt_sigtimedwait,rt_tgsigqueueinfo,sched_get_priority_max,sched_get_priority_min,sched_getaffinity,sched_getattr,sched_getparam,sched_getscheduler,sched_rr_get_interval,sched_setaffinity,sched_setattr,sched_setparam,sched_setscheduler,sched_yield,seccomp,select,semctl,semget,semop,semtimedop,send,sendfile,sendfile64,sendmmsg,sendmsg,sendto,set_robust_list,set_thread_area,set_tid_address,set_tls,setdomainname,setfsgid,setfsgid32,setfsuid,setfsuid32,setgid,setgid32,setgroups,setgroups32,sethostname,setitimer,setpgid,setpriority,setregid,setregid32,setresgid,setresgid32,setresuid,setresuid32,setreuid,setreuid32,setrlimit,setsid,setsockopt,setuid,setuid32,setxattr,shmat,shmctl,shmdt,shmget,shutdown,sigaltstack,signalfd,signalfd4,sigreturn,socket,socketpair,splice,stat,stat64,statfs,statfs64,symlink,symlinkat,sync,sync_file_range,syncfs,sysinfo,syslog,tee,tgkill,time,timer_create,timer_delete,timer_getoverrun,timer_gettime,timer_settime,timerfd_create,timerfd_gettime,timerfd_settime,times,tkill,truncate,truncate64,ugetrlimit,umask,uname,unlink,unlinkat,utime,utimensat,utimes,vfork,vhangup,vmsplice,wait4,waitid,waitpid,write,writev + +seccomp.keep accept,accept4,access,alarm,arch_prctl,bind,brk,capget,capset,chdir,chmod,chown,chroot,clock_getres,clock_gettime,clock_nanosleep,clone,close,connect,creat,dup,dup2,dup3,epoll_create,epoll_create1,epoll_ctl,epoll_ctl_old,epoll_pwait,epoll_wait,epoll_wait_old,eventfd,eventfd2,execve,execveat,exit,exit_group,faccessat,fadvise64,fallocate,fanotify_init,fanotify_mark,fchdir,fchmod,fchmodat,fchown,fchownat,fcntl,fdatasync,fgetxattr,flistxattr,flock,fork,fremovexattr,fsetxattr,fstat,fstatfs,fsync,ftruncate,futex,futimesat,get_robust_list,get_thread_area,getcpu,getcwd,getdents,getdents64,getegid,geteuid,getgid,getgroups,getitimer,getpeername,getpgid,getpgrp,getpid,getppid,getpriority,getrandom,getresgid,getresuid,getrlimit,getrusage,getsid,getsockname,getsockopt,gettid,gettimeofday,getuid,getxattr,inotify_add_watch,inotify_init,inotify_init1,inotify_rm_watch,io_cancel,io_destroy,io_getevents,io_setup,io_submit,ioctl,ioprio_get,ioprio_set,kill,lchown,lgetxattr,link,linkat,listen,listxattr,llistxattr,lremovexattr,lseek,lsetxattr,lstat,madvise,memfd_create,mincore,mkdir,mkdirat,mknod,mknodat,mlock,mlockall,mmap,modify_ldt,mprotect,mq_getsetattr,mq_notify,mq_open,mq_timedreceive,mq_timedsend,mq_unlink,mremap,msgctl,msgget,msgrcv,msgsnd,msync,munlock,munlockall,munmap,nanosleep,newfstatat,open,openat,pause,personality,personality,personality,pipe,pipe2,poll,ppoll,prctl,pread64,preadv,prlimit64,pselect6,pwrite64,pwritev,read,readahead,readlink,readlinkat,readv,recvfrom,recvmmsg,recvmsg,remap_file_pages,removexattr,rename,renameat,renameat2,restart_syscall,rmdir,rt_sigaction,rt_sigpending,rt_sigprocmask,rt_sigqueueinfo,rt_sigreturn,rt_sigsuspend,rt_sigtimedwait,rt_tgsigqueueinfo,sched_get_priority_max,sched_get_priority_min,sched_getaffinity,sched_getattr,sched_getparam,sched_getscheduler,sched_rr_get_interval,sched_setaffinity,sched_setattr,sched_setparam,sched_setscheduler,sched_yield,seccomp,select,semctl,semget,semop,semtimedop,sendfile,sendmmsg,sendmsg,sendto,set_robust_list,set_thread_area,set_tid_address,setdomainname,setfsgid,setfsuid,setgid,setgroups,sethostname,setitimer,setpgid,setpriority,setregid,setresgid,setresuid,setreuid,setrlimit,setsid,setsockopt,setuid,setxattr,shmat,shmctl,shmdt,shmget,shutdown,sigaltstack,signalfd,signalfd4,socket,socketpair,splice,stat,statfs,symlink,symlinkat,sync,sync_file_range,syncfs,sysinfo,syslog,tee,tgkill,time,timer_create,timer_delete,timer_getoverrun,timer_gettime,timer_settime,timerfd_create,timerfd_gettime,timerfd_settime,times,tkill,truncate,umask,uname,unlink,unlinkat,utime,utimensat,utimes,vfork,vhangup,vmsplice,wait4,waitid,write,writev rlimit-fsize 524288000 #500Mb rlimit-nproc 200 From 4064956cea479d91ca83a81623c7846e92740dfb Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Wed, 28 Sep 2016 10:04:55 +0100 Subject: [PATCH 176/555] working minimal profile --- services/filestore/firejail/convert.profile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/filestore/firejail/convert.profile b/services/filestore/firejail/convert.profile index 8852e0bc4d..ee2207e9ef 100644 --- a/services/filestore/firejail/convert.profile +++ b/services/filestore/firejail/convert.profile @@ -25,7 +25,7 @@ read-only /usr blacklist /var caps.drop all -# noroot +noroot nogroups protocol unix net none @@ -33,7 +33,7 @@ private-tmp private-dev shell none -seccomp.keep accept,accept4,access,alarm,arch_prctl,bind,brk,capget,capset,chdir,chmod,chown,chroot,clock_getres,clock_gettime,clock_nanosleep,clone,close,connect,creat,dup,dup2,dup3,epoll_create,epoll_create1,epoll_ctl,epoll_ctl_old,epoll_pwait,epoll_wait,epoll_wait_old,eventfd,eventfd2,execve,execveat,exit,exit_group,faccessat,fadvise64,fallocate,fanotify_init,fanotify_mark,fchdir,fchmod,fchmodat,fchown,fchownat,fcntl,fdatasync,fgetxattr,flistxattr,flock,fork,fremovexattr,fsetxattr,fstat,fstatfs,fsync,ftruncate,futex,futimesat,get_robust_list,get_thread_area,getcpu,getcwd,getdents,getdents64,getegid,geteuid,getgid,getgroups,getitimer,getpeername,getpgid,getpgrp,getpid,getppid,getpriority,getrandom,getresgid,getresuid,getrlimit,getrusage,getsid,getsockname,getsockopt,gettid,gettimeofday,getuid,getxattr,inotify_add_watch,inotify_init,inotify_init1,inotify_rm_watch,io_cancel,io_destroy,io_getevents,io_setup,io_submit,ioctl,ioprio_get,ioprio_set,kill,lchown,lgetxattr,link,linkat,listen,listxattr,llistxattr,lremovexattr,lseek,lsetxattr,lstat,madvise,memfd_create,mincore,mkdir,mkdirat,mknod,mknodat,mlock,mlockall,mmap,modify_ldt,mprotect,mq_getsetattr,mq_notify,mq_open,mq_timedreceive,mq_timedsend,mq_unlink,mremap,msgctl,msgget,msgrcv,msgsnd,msync,munlock,munlockall,munmap,nanosleep,newfstatat,open,openat,pause,personality,personality,personality,pipe,pipe2,poll,ppoll,prctl,pread64,preadv,prlimit64,pselect6,pwrite64,pwritev,read,readahead,readlink,readlinkat,readv,recvfrom,recvmmsg,recvmsg,remap_file_pages,removexattr,rename,renameat,renameat2,restart_syscall,rmdir,rt_sigaction,rt_sigpending,rt_sigprocmask,rt_sigqueueinfo,rt_sigreturn,rt_sigsuspend,rt_sigtimedwait,rt_tgsigqueueinfo,sched_get_priority_max,sched_get_priority_min,sched_getaffinity,sched_getattr,sched_getparam,sched_getscheduler,sched_rr_get_interval,sched_setaffinity,sched_setattr,sched_setparam,sched_setscheduler,sched_yield,seccomp,select,semctl,semget,semop,semtimedop,sendfile,sendmmsg,sendmsg,sendto,set_robust_list,set_thread_area,set_tid_address,setdomainname,setfsgid,setfsuid,setgid,setgroups,sethostname,setitimer,setpgid,setpriority,setregid,setresgid,setresuid,setreuid,setrlimit,setsid,setsockopt,setuid,setxattr,shmat,shmctl,shmdt,shmget,shutdown,sigaltstack,signalfd,signalfd4,socket,socketpair,splice,stat,statfs,symlink,symlinkat,sync,sync_file_range,syncfs,sysinfo,syslog,tee,tgkill,time,timer_create,timer_delete,timer_getoverrun,timer_gettime,timer_settime,timerfd_create,timerfd_gettime,timerfd_settime,times,tkill,truncate,umask,uname,unlink,unlinkat,utime,utimensat,utimes,vfork,vhangup,vmsplice,wait4,waitid,write,writev +seccomp.keep accept,accept4,access,alarm,arch_prctl,bind,brk,capget,capset,chdir,chmod,chown,chroot,clock_getres,clock_gettime,clock_nanosleep,clone,close,connect,creat,dup,dup2,dup3,epoll_create,epoll_create1,epoll_ctl,epoll_ctl_old,epoll_pwait,epoll_wait,epoll_wait_old,eventfd,eventfd2,execve,execveat,exit,exit_group,faccessat,fadvise64,fallocate,fanotify_init,fanotify_mark,fchdir,fchmod,fchmodat,fchown,fchownat,fcntl,fdatasync,fgetxattr,flistxattr,flock,fork,fremovexattr,fsetxattr,fstat,fstatfs,fsync,ftruncate,futex,futimesat,get_robust_list,get_thread_area,getcpu,getcwd,getdents,getdents64,getegid,geteuid,getgid,getgroups,getitimer,getpeername,getpgid,getpgrp,getpid,getppid,getpriority,getrandom,getresgid,getresuid,getrlimit,getrusage,getsid,getsockname,getsockopt,gettid,gettimeofday,getuid,getxattr,inotify_add_watch,inotify_init,inotify_init1,inotify_rm_watch,io_cancel,io_destroy,io_getevents,io_setup,io_submit,ioctl,ioprio_get,ioprio_set,kill,lchown,lgetxattr,link,linkat,listen,listxattr,llistxattr,lremovexattr,lseek,lsetxattr,lstat,madvise,memfd_create,mincore,mkdir,mkdirat,mknod,mknodat,mlock,mlockall,mmap,modify_ldt,mprotect,mq_getsetattr,mq_notify,mq_open,mq_timedreceive,mq_timedsend,mq_unlink,mremap,msgctl,msgget,msgrcv,msgsnd,msync,munlock,munlockall,munmap,nanosleep,newfstatat,open,openat,pause,personality,pipe,pipe2,poll,ppoll,prctl,pread64,preadv,prlimit64,pselect6,pwrite64,pwritev,read,readahead,readlink,readlinkat,readv,recvfrom,recvmmsg,recvmsg,remap_file_pages,removexattr,rename,renameat,renameat2,restart_syscall,rmdir,rt_sigaction,rt_sigpending,rt_sigprocmask,rt_sigqueueinfo,rt_sigreturn,rt_sigsuspend,rt_sigtimedwait,rt_tgsigqueueinfo,sched_get_priority_max,sched_get_priority_min,sched_getaffinity,sched_getattr,sched_getparam,sched_getscheduler,sched_rr_get_interval,sched_setaffinity,sched_setattr,sched_setparam,sched_setscheduler,sched_yield,seccomp,select,semctl,semget,semop,semtimedop,sendfile,sendmmsg,sendmsg,sendto,set_robust_list,set_thread_area,set_tid_address,setdomainname,setfsgid,setfsuid,setgid,setgroups,sethostname,setitimer,setpgid,setpriority,setregid,setresgid,setresuid,setreuid,setrlimit,setsid,setsockopt,setuid,setxattr,shmat,shmctl,shmdt,shmget,shutdown,sigaltstack,signalfd,signalfd4,socket,socketpair,splice,stat,statfs,symlink,symlinkat,sync,sync_file_range,syncfs,sysinfo,syslog,tee,tgkill,time,timer_create,timer_delete,timer_getoverrun,timer_gettime,timer_settime,timerfd_create,timerfd_gettime,timerfd_settime,times,tkill,truncate,umask,uname,unlink,unlinkat,utime,utimensat,utimes,vfork,vhangup,vmsplice,wait4,waitid,write,writev,unshare rlimit-fsize 524288000 #500Mb rlimit-nproc 200 From b38e7d3078940b36a7302a09ff14e10a91a09fc1 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Tue, 13 Dec 2016 12:43:57 +0000 Subject: [PATCH 177/555] * delete both file and converted file if there is an error * increase timeout of optpng --- services/filestore/app/coffee/FileHandler.coffee | 6 ++++-- services/filestore/app/coffee/ImageOptimiser.coffee | 2 +- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/services/filestore/app/coffee/FileHandler.coffee b/services/filestore/app/coffee/FileHandler.coffee index 718195a484..e0680aec3f 100644 --- a/services/filestore/app/coffee/FileHandler.coffee +++ b/services/filestore/app/coffee/FileHandler.coffee @@ -49,7 +49,7 @@ module.exports = convertedFsPath = "" async.series [ (cb) => - @_convertFile bucket, key, opts, (err, fileSystemPath) -> + @_convertFile bucket, key, opts, (err, fileSystemPath, originalFsPath) -> convertedFsPath = fileSystemPath cb err (cb)-> @@ -58,6 +58,8 @@ module.exports = PersistorManager.sendFile bucket, convertedKey, convertedFsPath, cb ], (err)-> if err? + LocalFileWriter.deleteFile convertedFsPath, -> + LocalFileWriter.deleteFile originalFsPath, -> return callback(err) PersistorManager.getFileStream bucket, convertedKey, opts, callback @@ -70,7 +72,7 @@ module.exports = logger.err err:err, bucket:bucket, originalKey:originalKey, opts:opts, "error converting file" return callback(err) LocalFileWriter.deleteFile originalFsPath, -> - callback(err, destPath) + callback(err, destPath, originalFsPath) if opts.format? FileConverter.convert originalFsPath, opts.format, done diff --git a/services/filestore/app/coffee/ImageOptimiser.coffee b/services/filestore/app/coffee/ImageOptimiser.coffee index be3fed1ca2..33c8cc8d56 100644 --- a/services/filestore/app/coffee/ImageOptimiser.coffee +++ b/services/filestore/app/coffee/ImageOptimiser.coffee @@ -8,7 +8,7 @@ module.exports = logger.log localPath:localPath, "optimising png path" args = "optipng #{localPath}" opts = - timeout: 20 * 1000 + timeout: 30 * 1000 killSignal: "SIGKILL" exec args, opts,(err, stdout, stderr)-> if err? From b166c86197568273c5c13bfb09822e18020b5a38 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Tue, 13 Dec 2016 14:29:04 +0000 Subject: [PATCH 178/555] added missing originalFsPath --- services/filestore/app/coffee/FileHandler.coffee | 2 ++ 1 file changed, 2 insertions(+) diff --git a/services/filestore/app/coffee/FileHandler.coffee b/services/filestore/app/coffee/FileHandler.coffee index e0680aec3f..93cad984dd 100644 --- a/services/filestore/app/coffee/FileHandler.coffee +++ b/services/filestore/app/coffee/FileHandler.coffee @@ -47,10 +47,12 @@ module.exports = _getConvertedFileAndCache: (bucket, key, convertedKey, opts, callback)-> convertedFsPath = "" + originalFsPath = "" async.series [ (cb) => @_convertFile bucket, key, opts, (err, fileSystemPath, originalFsPath) -> convertedFsPath = fileSystemPath + originalFsPath = originalFsPath cb err (cb)-> ImageOptimiser.compressPng convertedFsPath, cb From 27139bc433afd9c60fcec0527cd127525cf4a887 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Wed, 14 Dec 2016 16:47:53 +0000 Subject: [PATCH 179/555] increased rlimit-nproc and added health check for converting image --- services/filestore/app.coffee | 18 ++++++------------ services/filestore/firejail/convert.profile | 2 +- services/filestore/package.json | 1 + 3 files changed, 8 insertions(+), 13 deletions(-) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index 04c13eb4b5..531442b6ea 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -5,10 +5,10 @@ settings = require("settings-sharelatex") request = require("request") fileController = require("./app/js/FileController") keyBuilder = require("./app/js/KeyBuilder") +healthCheckController = require("./app/js/HealthCheckController") domain = require("domain") appIsOk = true app = express() -streamBuffers = require("stream-buffers") Metrics = require "metrics-sharelatex" Metrics.initialize("filestore") @@ -102,17 +102,11 @@ app.get '/status', (req, res)-> logger.log "app is not ok - shutting down" res.send("server is being shut down", 500) -app.get "/health_check", (req, res)-> - req.params.project_id = settings.health_check.project_id - req.params.file_id = settings.health_check.file_id - myWritableStreamBuffer = new streamBuffers.WritableStreamBuffer(initialSize: 100) - keyBuilder.userFileKey req, res, -> - fileController.getFile req, myWritableStreamBuffer - myWritableStreamBuffer.on "close", -> - if myWritableStreamBuffer.size() > 0 - res.send(200) - else - res.send(503) + +app.get "/health_check", healthCheckController.check + + + app.get '*', (req, res)-> res.send 404 diff --git a/services/filestore/firejail/convert.profile b/services/filestore/firejail/convert.profile index ee2207e9ef..461e37e409 100644 --- a/services/filestore/firejail/convert.profile +++ b/services/filestore/firejail/convert.profile @@ -36,5 +36,5 @@ shell none seccomp.keep accept,accept4,access,alarm,arch_prctl,bind,brk,capget,capset,chdir,chmod,chown,chroot,clock_getres,clock_gettime,clock_nanosleep,clone,close,connect,creat,dup,dup2,dup3,epoll_create,epoll_create1,epoll_ctl,epoll_ctl_old,epoll_pwait,epoll_wait,epoll_wait_old,eventfd,eventfd2,execve,execveat,exit,exit_group,faccessat,fadvise64,fallocate,fanotify_init,fanotify_mark,fchdir,fchmod,fchmodat,fchown,fchownat,fcntl,fdatasync,fgetxattr,flistxattr,flock,fork,fremovexattr,fsetxattr,fstat,fstatfs,fsync,ftruncate,futex,futimesat,get_robust_list,get_thread_area,getcpu,getcwd,getdents,getdents64,getegid,geteuid,getgid,getgroups,getitimer,getpeername,getpgid,getpgrp,getpid,getppid,getpriority,getrandom,getresgid,getresuid,getrlimit,getrusage,getsid,getsockname,getsockopt,gettid,gettimeofday,getuid,getxattr,inotify_add_watch,inotify_init,inotify_init1,inotify_rm_watch,io_cancel,io_destroy,io_getevents,io_setup,io_submit,ioctl,ioprio_get,ioprio_set,kill,lchown,lgetxattr,link,linkat,listen,listxattr,llistxattr,lremovexattr,lseek,lsetxattr,lstat,madvise,memfd_create,mincore,mkdir,mkdirat,mknod,mknodat,mlock,mlockall,mmap,modify_ldt,mprotect,mq_getsetattr,mq_notify,mq_open,mq_timedreceive,mq_timedsend,mq_unlink,mremap,msgctl,msgget,msgrcv,msgsnd,msync,munlock,munlockall,munmap,nanosleep,newfstatat,open,openat,pause,personality,pipe,pipe2,poll,ppoll,prctl,pread64,preadv,prlimit64,pselect6,pwrite64,pwritev,read,readahead,readlink,readlinkat,readv,recvfrom,recvmmsg,recvmsg,remap_file_pages,removexattr,rename,renameat,renameat2,restart_syscall,rmdir,rt_sigaction,rt_sigpending,rt_sigprocmask,rt_sigqueueinfo,rt_sigreturn,rt_sigsuspend,rt_sigtimedwait,rt_tgsigqueueinfo,sched_get_priority_max,sched_get_priority_min,sched_getaffinity,sched_getattr,sched_getparam,sched_getscheduler,sched_rr_get_interval,sched_setaffinity,sched_setattr,sched_setparam,sched_setscheduler,sched_yield,seccomp,select,semctl,semget,semop,semtimedop,sendfile,sendmmsg,sendmsg,sendto,set_robust_list,set_thread_area,set_tid_address,setdomainname,setfsgid,setfsuid,setgid,setgroups,sethostname,setitimer,setpgid,setpriority,setregid,setresgid,setresuid,setreuid,setrlimit,setsid,setsockopt,setuid,setxattr,shmat,shmctl,shmdt,shmget,shutdown,sigaltstack,signalfd,signalfd4,socket,socketpair,splice,stat,statfs,symlink,symlinkat,sync,sync_file_range,syncfs,sysinfo,syslog,tee,tgkill,time,timer_create,timer_delete,timer_getoverrun,timer_gettime,timer_settime,timerfd_create,timerfd_gettime,timerfd_settime,times,tkill,truncate,umask,uname,unlink,unlinkat,utime,utimensat,utimes,vfork,vhangup,vmsplice,wait4,waitid,write,writev,unshare rlimit-fsize 524288000 #500Mb -rlimit-nproc 200 +rlimit-nproc 300 #if too low this can cause error: Error fork:sandbox(774): Resource temporarily unavailable rlimit-nofile 100 \ No newline at end of file diff --git a/services/filestore/package.json b/services/filestore/package.json index 184899250e..0dbf40d514 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -11,6 +11,7 @@ "aws-sdk": "^2.1.39", "coffee-script": "~1.7.1", "express": "~3.4.8", + "fs-extra": "^1.0.0", "heapdump": "^0.3.2", "knox": "~0.9.1", "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.1.0", From 4bba56ed281fbc9b86d885394bb64e31c54e54e8 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Wed, 14 Dec 2016 17:14:15 +0000 Subject: [PATCH 180/555] added missing files --- .../app/coffee/HealthCheckController.coffee | 46 +++++++++++++++ services/filestore/tiny.pdf | 58 +++++++++++++++++++ 2 files changed, 104 insertions(+) create mode 100644 services/filestore/app/coffee/HealthCheckController.coffee create mode 100644 services/filestore/tiny.pdf diff --git a/services/filestore/app/coffee/HealthCheckController.coffee b/services/filestore/app/coffee/HealthCheckController.coffee new file mode 100644 index 0000000000..252a6ada96 --- /dev/null +++ b/services/filestore/app/coffee/HealthCheckController.coffee @@ -0,0 +1,46 @@ +fs = require("fs-extra") +path = require("path") +async = require("async") +fileConverter = require("./FileConverter") +keyBuilder = require("./KeyBuilder") +fileController = require("./FileController") +logger = require('logger-sharelatex') +settings = require("settings-sharelatex") +streamBuffers = require("stream-buffers") + +checkCanStoreFiles = (callback)-> + req = {params:{}, query:{}, headers:{}} + res = {} + req.params.project_id = settings.health_check.project_id + req.params.file_id = settings.health_check.file_id + myWritableStreamBuffer = new streamBuffers.WritableStreamBuffer(initialSize: 100) + keyBuilder.userFileKey req, res, -> + fileController.getFile req, myWritableStreamBuffer + myWritableStreamBuffer.on "close", -> + if myWritableStreamBuffer.size() > 0 + callback() + else + logger.err "no data in write stream buffer for health check" + callback() + +checkFileConvert = (callback)-> + imgPath = path.join(settings.path.uploadFolder, "/tiny.pdf") + console.log imgPath, settings.path.uploadFolder + async.waterfall [ + (cb)-> + fs.copy("./tiny.pdf", imgPath, cb) + (cb)-> fileConverter.thumbnail "./tiny.pdf", cb + (resultPath, cb)-> fs.unlink resultPath, cb + (resultPath, cb)-> fs.unlink imgPath, cb + ], callback + + +module.exports = + + check: (req, res)-> + async.parallel [checkFileConvert, checkCanStoreFiles], (err)-> + if err? + logger.err err:err, "Health check: error running" + return res.send 500 + else + return res.send 200 \ No newline at end of file diff --git a/services/filestore/tiny.pdf b/services/filestore/tiny.pdf new file mode 100644 index 0000000000..1c641810aa --- /dev/null +++ b/services/filestore/tiny.pdf @@ -0,0 +1,58 @@ +%PDF-1.1 +%¥±ë + +1 0 obj + << /Type /Catalog + /Pages 2 0 R + >> +endobj + +2 0 obj + << /Type /Pages + /Kids [3 0 R] + /Count 1 + /MediaBox [0 0 300 144] + >> +endobj + +3 0 obj + << /Type /Page + /Parent 2 0 R + /Resources + << /Font + << /F1 + << /Type /Font + /Subtype /Type1 + /BaseFont /Times-Roman + >> + >> + >> + /Contents 4 0 R + >> +endobj + +4 0 obj + << /Length 55 >> +stream + BT + /F1 18 Tf + 0 0 Td + (Hello World) Tj + ET +endstream +endobj + +xref +0 5 +0000000000 65535 f +0000000018 00000 n +0000000077 00000 n +0000000178 00000 n +0000000457 00000 n +trailer + << /Root 1 0 R + /Size 5 + >> +startxref +565 +%%EOF From d41bf2e9749d5c92eccf5e0c49374f69117506df Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Wed, 14 Dec 2016 18:11:28 +0000 Subject: [PATCH 181/555] result path not in final cb --- services/filestore/app/coffee/HealthCheckController.coffee | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/filestore/app/coffee/HealthCheckController.coffee b/services/filestore/app/coffee/HealthCheckController.coffee index 252a6ada96..feb8c4c659 100644 --- a/services/filestore/app/coffee/HealthCheckController.coffee +++ b/services/filestore/app/coffee/HealthCheckController.coffee @@ -29,9 +29,9 @@ checkFileConvert = (callback)-> async.waterfall [ (cb)-> fs.copy("./tiny.pdf", imgPath, cb) - (cb)-> fileConverter.thumbnail "./tiny.pdf", cb + (cb)-> fileConverter.thumbnail imgPath, cb (resultPath, cb)-> fs.unlink resultPath, cb - (resultPath, cb)-> fs.unlink imgPath, cb + (cb)-> fs.unlink imgPath, cb ], callback From 6c81d18627d974642894c9ab8c40f5131b03a8fd Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Wed, 14 Dec 2016 18:15:09 +0000 Subject: [PATCH 182/555] bumped up rlimit-nproc to 600 --- services/filestore/firejail/convert.profile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/firejail/convert.profile b/services/filestore/firejail/convert.profile index 461e37e409..38cc1a2701 100644 --- a/services/filestore/firejail/convert.profile +++ b/services/filestore/firejail/convert.profile @@ -36,5 +36,5 @@ shell none seccomp.keep accept,accept4,access,alarm,arch_prctl,bind,brk,capget,capset,chdir,chmod,chown,chroot,clock_getres,clock_gettime,clock_nanosleep,clone,close,connect,creat,dup,dup2,dup3,epoll_create,epoll_create1,epoll_ctl,epoll_ctl_old,epoll_pwait,epoll_wait,epoll_wait_old,eventfd,eventfd2,execve,execveat,exit,exit_group,faccessat,fadvise64,fallocate,fanotify_init,fanotify_mark,fchdir,fchmod,fchmodat,fchown,fchownat,fcntl,fdatasync,fgetxattr,flistxattr,flock,fork,fremovexattr,fsetxattr,fstat,fstatfs,fsync,ftruncate,futex,futimesat,get_robust_list,get_thread_area,getcpu,getcwd,getdents,getdents64,getegid,geteuid,getgid,getgroups,getitimer,getpeername,getpgid,getpgrp,getpid,getppid,getpriority,getrandom,getresgid,getresuid,getrlimit,getrusage,getsid,getsockname,getsockopt,gettid,gettimeofday,getuid,getxattr,inotify_add_watch,inotify_init,inotify_init1,inotify_rm_watch,io_cancel,io_destroy,io_getevents,io_setup,io_submit,ioctl,ioprio_get,ioprio_set,kill,lchown,lgetxattr,link,linkat,listen,listxattr,llistxattr,lremovexattr,lseek,lsetxattr,lstat,madvise,memfd_create,mincore,mkdir,mkdirat,mknod,mknodat,mlock,mlockall,mmap,modify_ldt,mprotect,mq_getsetattr,mq_notify,mq_open,mq_timedreceive,mq_timedsend,mq_unlink,mremap,msgctl,msgget,msgrcv,msgsnd,msync,munlock,munlockall,munmap,nanosleep,newfstatat,open,openat,pause,personality,pipe,pipe2,poll,ppoll,prctl,pread64,preadv,prlimit64,pselect6,pwrite64,pwritev,read,readahead,readlink,readlinkat,readv,recvfrom,recvmmsg,recvmsg,remap_file_pages,removexattr,rename,renameat,renameat2,restart_syscall,rmdir,rt_sigaction,rt_sigpending,rt_sigprocmask,rt_sigqueueinfo,rt_sigreturn,rt_sigsuspend,rt_sigtimedwait,rt_tgsigqueueinfo,sched_get_priority_max,sched_get_priority_min,sched_getaffinity,sched_getattr,sched_getparam,sched_getscheduler,sched_rr_get_interval,sched_setaffinity,sched_setattr,sched_setparam,sched_setscheduler,sched_yield,seccomp,select,semctl,semget,semop,semtimedop,sendfile,sendmmsg,sendmsg,sendto,set_robust_list,set_thread_area,set_tid_address,setdomainname,setfsgid,setfsuid,setgid,setgroups,sethostname,setitimer,setpgid,setpriority,setregid,setresgid,setresuid,setreuid,setrlimit,setsid,setsockopt,setuid,setxattr,shmat,shmctl,shmdt,shmget,shutdown,sigaltstack,signalfd,signalfd4,socket,socketpair,splice,stat,statfs,symlink,symlinkat,sync,sync_file_range,syncfs,sysinfo,syslog,tee,tgkill,time,timer_create,timer_delete,timer_getoverrun,timer_gettime,timer_settime,timerfd_create,timerfd_gettime,timerfd_settime,times,tkill,truncate,umask,uname,unlink,unlinkat,utime,utimensat,utimes,vfork,vhangup,vmsplice,wait4,waitid,write,writev,unshare rlimit-fsize 524288000 #500Mb -rlimit-nproc 300 #if too low this can cause error: Error fork:sandbox(774): Resource temporarily unavailable +rlimit-nproc 600 #if too low this can cause error: Error fork:sandbox(774): Resource temporarily unavailable rlimit-nofile 100 \ No newline at end of file From fd99887753e43de5756363a33830774ca19421eb Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Mon, 19 Dec 2016 12:29:08 +0000 Subject: [PATCH 183/555] running check in background --- .../app/coffee/HealthCheckController.coffee | 22 ++++++++++++++----- 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/services/filestore/app/coffee/HealthCheckController.coffee b/services/filestore/app/coffee/HealthCheckController.coffee index feb8c4c659..2eaa08e1f6 100644 --- a/services/filestore/app/coffee/HealthCheckController.coffee +++ b/services/filestore/app/coffee/HealthCheckController.coffee @@ -35,12 +35,22 @@ checkFileConvert = (callback)-> ], callback +isOk = true + + +runCheckInBackground = -> + async.parallel [checkFileConvert, checkCanStoreFiles], (err)-> + if err? + logger.err err:err, "Health check: error running" + isOk = false + else + isOk = true + module.exports = check: (req, res)-> - async.parallel [checkFileConvert, checkCanStoreFiles], (err)-> - if err? - logger.err err:err, "Health check: error running" - return res.send 500 - else - return res.send 200 \ No newline at end of file + if isOk + res.send 200 + else + res.send 500 + runCheckInBackground() From bf93fc3f5036acd2e298c50965f55a9413f19d8c Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Mon, 19 Dec 2016 13:26:21 +0000 Subject: [PATCH 184/555] run health check max 1 a at a time --- .../filestore/app/coffee/HealthCheckController.coffee | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/services/filestore/app/coffee/HealthCheckController.coffee b/services/filestore/app/coffee/HealthCheckController.coffee index 2eaa08e1f6..25027a5bdd 100644 --- a/services/filestore/app/coffee/HealthCheckController.coffee +++ b/services/filestore/app/coffee/HealthCheckController.coffee @@ -25,7 +25,6 @@ checkCanStoreFiles = (callback)-> checkFileConvert = (callback)-> imgPath = path.join(settings.path.uploadFolder, "/tiny.pdf") - console.log imgPath, settings.path.uploadFolder async.waterfall [ (cb)-> fs.copy("./tiny.pdf", imgPath, cb) @@ -37,14 +36,18 @@ checkFileConvert = (callback)-> isOk = true +q = async.queue (task, callback)-> + task(callback) -runCheckInBackground = -> + +runChecks = (callback)-> async.parallel [checkFileConvert, checkCanStoreFiles], (err)-> if err? logger.err err:err, "Health check: error running" isOk = false else isOk = true + callback() module.exports = @@ -53,4 +56,4 @@ module.exports = res.send 200 else res.send 500 - runCheckInBackground() + q.push runChecks # run in background 1 at a time From 9e8c27f46bca8171591efead080c27df5f7d79ba Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Mon, 19 Dec 2016 14:47:54 +0000 Subject: [PATCH 185/555] =?UTF-8?q?don=E2=80=99t=20shutdown=20filestore=20?= =?UTF-8?q?on=20uncaught=20exception?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- services/filestore/app.coffee | 1 - 1 file changed, 1 deletion(-) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index 531442b6ea..eb97ad48dd 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -39,7 +39,6 @@ app.use (req, res, next) -> requestDomain.add res requestDomain.on "error", (err)-> try - appIsOk = false # request a shutdown to prevent memory leaks beginShutdown() if !res.headerSent From e0f39da6161e087d85244b6e98a2ce687976dce6 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Mon, 19 Dec 2016 14:48:09 +0000 Subject: [PATCH 186/555] null check fsPath on unlink file --- services/filestore/app/coffee/LocalFileWriter.coffee | 2 ++ 1 file changed, 2 insertions(+) diff --git a/services/filestore/app/coffee/LocalFileWriter.coffee b/services/filestore/app/coffee/LocalFileWriter.coffee index 2ecb3885df..47b2b91e77 100644 --- a/services/filestore/app/coffee/LocalFileWriter.coffee +++ b/services/filestore/app/coffee/LocalFileWriter.coffee @@ -27,6 +27,8 @@ module.exports = stream.pipe writeStream deleteFile: (fsPath, callback)-> + if !fsPath? or fsPath == "" + return callback() logger.log fsPath:fsPath, "removing local temp file" fs.unlink fsPath, callback From b12f69722fb1396059a921d3732f5c4257b3a8a0 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Wed, 8 Mar 2017 14:59:34 +0000 Subject: [PATCH 187/555] Begin fixing health-check --- .../app/coffee/FileController.coffee | 2 +- .../app/coffee/HealthCheckController.coffee | 44 +++++++++---------- 2 files changed, 21 insertions(+), 25 deletions(-) diff --git a/services/filestore/app/coffee/FileController.coffee b/services/filestore/app/coffee/FileController.coffee index e1c212711a..27ac078379 100644 --- a/services/filestore/app/coffee/FileController.coffee +++ b/services/filestore/app/coffee/FileController.coffee @@ -32,7 +32,7 @@ module.exports = FileController = logger.err err:err, key:key, bucket:bucket, format:format, style:style, "problem getting file" if err instanceof Errors.NotFoundError return res.send 404 - if !res.finished and res?.send? + else return res.send 500 else if req.query.cacheWarm logger.log key:key, bucket:bucket, format:format, style:style, "request is only for cache warm so not sending stream" diff --git a/services/filestore/app/coffee/HealthCheckController.coffee b/services/filestore/app/coffee/HealthCheckController.coffee index 25027a5bdd..db3f111c5e 100644 --- a/services/filestore/app/coffee/HealthCheckController.coffee +++ b/services/filestore/app/coffee/HealthCheckController.coffee @@ -7,21 +7,30 @@ fileController = require("./FileController") logger = require('logger-sharelatex') settings = require("settings-sharelatex") streamBuffers = require("stream-buffers") +_ = require('underscore') + checkCanStoreFiles = (callback)-> + callback = _.once(callback) req = {params:{}, query:{}, headers:{}} - res = {} req.params.project_id = settings.health_check.project_id req.params.file_id = settings.health_check.file_id myWritableStreamBuffer = new streamBuffers.WritableStreamBuffer(initialSize: 100) + res = { + send: (code) -> + if code != 200 + callback(new Error("non-200 code from getFile: #{code}")) + } + myWritableStreamBuffer.send = res.send keyBuilder.userFileKey req, res, -> fileController.getFile req, myWritableStreamBuffer myWritableStreamBuffer.on "close", -> if myWritableStreamBuffer.size() > 0 callback() else - logger.err "no data in write stream buffer for health check" - callback() + err = "no data in write stream buffer for health check" + logger.err {err,}, "error performing health check" + callback(err) checkFileConvert = (callback)-> imgPath = path.join(settings.path.uploadFolder, "/tiny.pdf") @@ -34,26 +43,13 @@ checkFileConvert = (callback)-> ], callback -isOk = true - -q = async.queue (task, callback)-> - task(callback) - - -runChecks = (callback)-> - async.parallel [checkFileConvert, checkCanStoreFiles], (err)-> - if err? - logger.err err:err, "Health check: error running" - isOk = false - else - isOk = true - callback() - module.exports = - check: (req, res)-> - if isOk - res.send 200 - else - res.send 500 - q.push runChecks # run in background 1 at a time + check: (req, res) -> + logger.log {}, "performing health check" + async.parallel [checkFileConvert, checkCanStoreFiles], (err)-> + if err? + logger.err err:err, "Health check: error running" + res.send 500 + else + res.send 200 From 2f8c693be3b74904496d528c6444dfdf44a18cbf Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Mon, 27 Mar 2017 14:49:25 +0100 Subject: [PATCH 188/555] Add a .nvmrc file --- services/filestore/.nvmrc | 1 + 1 file changed, 1 insertion(+) create mode 100644 services/filestore/.nvmrc diff --git a/services/filestore/.nvmrc b/services/filestore/.nvmrc new file mode 100644 index 0000000000..7fd0b1e8e6 --- /dev/null +++ b/services/filestore/.nvmrc @@ -0,0 +1 @@ +0.12.4 \ No newline at end of file From d0bbe8beaeaa847117594ba37cf73fa920c9c626 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Wed, 21 Jun 2017 15:18:36 +0100 Subject: [PATCH 189/555] If optipng is sigkilled by timeout, then ignore that error --- services/filestore/app/coffee/ImageOptimiser.coffee | 4 +++- .../test/unit/coffee/ImageOptimiserTests.coffee | 13 ++++++++++++- 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/services/filestore/app/coffee/ImageOptimiser.coffee b/services/filestore/app/coffee/ImageOptimiser.coffee index 33c8cc8d56..939023a997 100644 --- a/services/filestore/app/coffee/ImageOptimiser.coffee +++ b/services/filestore/app/coffee/ImageOptimiser.coffee @@ -11,10 +11,12 @@ module.exports = timeout: 30 * 1000 killSignal: "SIGKILL" exec args, opts,(err, stdout, stderr)-> + if err? and err?.signal == 'SIGKILL' + logger.warn {err: err, stderr: stderr, localPath: localPath}, "optimiser timeout reached" + err = null if err? logger.err err:err, stderr:stderr, localPath:localPath, "something went wrong converting compressPng" else logger.log localPath:localPath, "finished compressPng file" callback(err) - diff --git a/services/filestore/test/unit/coffee/ImageOptimiserTests.coffee b/services/filestore/test/unit/coffee/ImageOptimiserTests.coffee index 80ca0c1d66..e16d8e0917 100644 --- a/services/filestore/test/unit/coffee/ImageOptimiserTests.coffee +++ b/services/filestore/test/unit/coffee/ImageOptimiserTests.coffee @@ -17,6 +17,7 @@ describe "ImageOptimiser", -> "logger-sharelatex": log:-> err:-> + warn:-> @sourcePath = "/this/path/here.eps" @error = "Error" @@ -36,4 +37,14 @@ describe "ImageOptimiser", -> @child_process.exec.callsArgWith(2, @error) @optimiser.compressPng @sourcePath, (err)=> err.should.equal @error - done() \ No newline at end of file + done() + + describe 'when optimiser is sigkilled', -> + + it 'should not produce an error', (done) -> + @error = new Error('woops') + @error.signal = 'SIGKILL' + @child_process.exec.callsArgWith(2, @error) + @optimiser.compressPng @sourcePath, (err)=> + expect(err).to.equal(null) + done() From ba9477cdb3cdc2f2c041889b65ebdf6e89ba76f9 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Thu, 22 Jun 2017 09:41:36 +0100 Subject: [PATCH 190/555] Re-work error checking --- services/filestore/app/coffee/ImageOptimiser.coffee | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/services/filestore/app/coffee/ImageOptimiser.coffee b/services/filestore/app/coffee/ImageOptimiser.coffee index 939023a997..fb2001a1d0 100644 --- a/services/filestore/app/coffee/ImageOptimiser.coffee +++ b/services/filestore/app/coffee/ImageOptimiser.coffee @@ -11,11 +11,12 @@ module.exports = timeout: 30 * 1000 killSignal: "SIGKILL" exec args, opts,(err, stdout, stderr)-> - if err? and err?.signal == 'SIGKILL' - logger.warn {err: err, stderr: stderr, localPath: localPath}, "optimiser timeout reached" - err = null if err? - logger.err err:err, stderr:stderr, localPath:localPath, "something went wrong converting compressPng" + if err.signal == 'SIGKILL' + logger.warn {err: err, stderr: stderr, localPath: localPath}, "optimiser timeout reached" + err = null + else + logger.err err:err, stderr:stderr, localPath:localPath, "something went wrong converting compressPng" else logger.log localPath:localPath, "finished compressPng file" callback(err) From 23fe93ccfda0fe6d187147ee5b37b564680d6636 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Thu, 22 Jun 2017 15:28:08 +0100 Subject: [PATCH 191/555] Prevent double-logging --- services/filestore/app/coffee/ImageOptimiser.coffee | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/services/filestore/app/coffee/ImageOptimiser.coffee b/services/filestore/app/coffee/ImageOptimiser.coffee index fb2001a1d0..4888e00224 100644 --- a/services/filestore/app/coffee/ImageOptimiser.coffee +++ b/services/filestore/app/coffee/ImageOptimiser.coffee @@ -11,13 +11,11 @@ module.exports = timeout: 30 * 1000 killSignal: "SIGKILL" exec args, opts,(err, stdout, stderr)-> - if err? - if err.signal == 'SIGKILL' - logger.warn {err: err, stderr: stderr, localPath: localPath}, "optimiser timeout reached" - err = null - else - logger.err err:err, stderr:stderr, localPath:localPath, "something went wrong converting compressPng" + if err? and err.signal == 'SIGKILL' + logger.warn {err: err, stderr: stderr, localPath: localPath}, "optimiser timeout reached" + err = null + else if err? + logger.err err:err, stderr:stderr, localPath:localPath, "something went wrong converting compressPng" else logger.log localPath:localPath, "finished compressPng file" callback(err) - From eaa72d0747aac51230b334598e09cdaf0a7c79ab Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Tue, 27 Jun 2017 14:53:20 +0100 Subject: [PATCH 192/555] upgrade to node 6 --- services/filestore/.nvmrc | 2 +- .../unit/coffee/FSPersistorManagerTests.coffee | 16 ++++++++-------- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/services/filestore/.nvmrc b/services/filestore/.nvmrc index 7fd0b1e8e6..e1e5d1369a 100644 --- a/services/filestore/.nvmrc +++ b/services/filestore/.nvmrc @@ -1 +1 @@ -0.12.4 \ No newline at end of file +6.9.5 diff --git a/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee b/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee index 63fe9eded7..46def22a05 100644 --- a/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee @@ -81,7 +81,7 @@ describe "FSPersistorManagerTests", -> it "should use correct file location", (done) -> @Fs.createReadStream.returns({on: ->}) @FSPersistorManager.getFileStream @location, @name1, @opts, (err,res) => - @Fs.createReadStream.calledWith("#{@location}/#{@name1Filtered}").should.equal.true + @Fs.createReadStream.calledWith("#{@location}/#{@name1Filtered}").should.equal true done() describe "with start and end options", -> @@ -144,17 +144,17 @@ describe "FSPersistorManagerTests", -> it "Should open the source for reading", (done) -> @FSPersistorManager.copyFile @location, @name1, @name2, -> - @Fs.createReadStream.calledWith("#{@location}/#{@name1}").should.equal.true + @Fs.createReadStream.calledWith("#{@location}/#{@name1Filtered}").should.equal true done() it "Should open the target for writing", (done) -> @FSPersistorManager.copyFile @location, @name1, @name2, -> - @Fs.createWriteStream.calledWith("#{@location}/#{@name2}").should.equal.true + @Fs.createWriteStream.calledWith("#{@location}/#{@name2}").should.equal true done() it "Should pipe the source to the target", (done) -> @FSPersistorManager.copyFile @location, @name1, @name2, -> - @ReadStream.pipe.calledWith(@WriteStream).should.equal.true + @ReadStream.pipe.calledWith(@WriteStream).should.equal true done() describe "deleteFile", -> @@ -163,7 +163,7 @@ describe "FSPersistorManagerTests", -> it "Should call unlink with correct options", (done) -> @FSPersistorManager.deleteFile @location, @name1, (err) => - @Fs.unlink.calledWith("#{@location}/#{@name1}").should.equal.true + @Fs.unlink.calledWith("#{@location}/#{@name1Filtered}").should.equal true done() it "Should propogate the error", (done) -> @@ -178,7 +178,7 @@ describe "FSPersistorManagerTests", -> it "Should call rmdir(rimraf) with correct options", (done) -> @FSPersistorManager.deleteDirectory @location, @name1, (err) => - @Rimraf.calledWith("#{@location}/#{@name1}").should.equal.true + @Rimraf.calledWith("#{@location}/#{@name1Filtered}").should.equal true done() it "Should propogate the error", (done) -> @@ -192,7 +192,7 @@ describe "FSPersistorManagerTests", -> it "Should call exists with correct options", (done) -> @FSPersistorManager.checkIfFileExists @location, @name1, (exists) => - @Fs.exists.calledWith("#{@location}/#{@name1}").should.equal.true + @Fs.exists.calledWith("#{@location}/#{@name1Filtered}").should.equal true done() # fs.exists simply returns false on any error, so... @@ -226,4 +226,4 @@ describe "FSPersistorManagerTests", -> @Fs.fstatSync.returns({size : 1024}) @FSPersistorManager.directorySize @location, @name1, (err, totalsize) => expect(totalsize).to.equal 2048 - done() \ No newline at end of file + done() From 706dc917dd7f38f44d38232c45495d29af4e00e6 Mon Sep 17 00:00:00 2001 From: Joe Green Date: Fri, 11 Aug 2017 11:12:32 +0100 Subject: [PATCH 193/555] Create Jenkinsfile --- services/filestore/Jenkinsfile | 77 ++++++++++++++++++++++++++++++++++ 1 file changed, 77 insertions(+) create mode 100644 services/filestore/Jenkinsfile diff --git a/services/filestore/Jenkinsfile b/services/filestore/Jenkinsfile new file mode 100644 index 0000000000..4cf38af8e5 --- /dev/null +++ b/services/filestore/Jenkinsfile @@ -0,0 +1,77 @@ +pipeline { + + agent { + docker { + image 'node:6.9.5' + args "-v /var/lib/jenkins/.npm:/tmp/.npm" + } + } + + environment { + HOME = "/tmp" + } + + triggers { + pollSCM('* * * * *') + cron('@daily') + } + + stages { + stage('Set up') { + steps { + // we need to disable logallrefupdates, else git clones during the npm install will require git to lookup the user id + // which does not exist in the container's /etc/passwd file, causing the clone to fail. + sh 'git config --global core.logallrefupdates false' + } + } + stage('Install') { + steps { + sh 'rm -fr node_modules' + sh 'npm install' + sh 'npm rebuild' + sh 'npm install --quiet grunt-cli' + } + } + stage('Compile') { + steps { + sh 'node_modules/.bin/grunt compile' + } + } + stage('Test') { + steps { + sh 'NODE_ENV=development node_modules/.bin/grunt test:unit' + } + } + stage('Package') { + steps { + sh 'touch build.tar.gz' // Avoid tar warning about files changing during read + sh 'tar -czf build.tar.gz --exclude=build.tar.gz --exclude-vcs .' + } + } + stage('Publish') { + steps { + withAWS(credentials:'S3_CI_BUILDS_AWS_KEYS', region:"${S3_REGION_BUILD_ARTEFACTS}") { + s3Upload(file:'build.tar.gz', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/${BUILD_NUMBER}.tar.gz") + } + } + } + } + + post { + failure { + mail(from: "${EMAIL_ALERT_FROM}", + to: "${EMAIL_ALERT_TO}", + subject: "Jenkins build failed: ${JOB_NAME}:${BUILD_NUMBER}", + body: "Build: ${BUILD_URL}") + } + } + + // The options directive is for configuration that applies to the whole job. + options { + // we'd like to make sure remove old builds, so we don't fill up our storage! + buildDiscarder(logRotator(numToKeepStr:'50')) + + // And we'd really like to be sure that this build doesn't hang forever, so let's time it out after: + timeout(time: 30, unit: 'MINUTES') + } +} From bfef9a9d672fc73ab0cbdd9786833e915aaabddd Mon Sep 17 00:00:00 2001 From: Joe Green Date: Mon, 4 Sep 2017 14:49:14 +0100 Subject: [PATCH 194/555] added build.txt --- services/filestore/Jenkinsfile | 3 +++ 1 file changed, 3 insertions(+) diff --git a/services/filestore/Jenkinsfile b/services/filestore/Jenkinsfile index 4cf38af8e5..03528475d6 100644 --- a/services/filestore/Jenkinsfile +++ b/services/filestore/Jenkinsfile @@ -44,6 +44,7 @@ pipeline { } stage('Package') { steps { + sh 'echo ${BUILD_NUMBER} > build_number.txt' sh 'touch build.tar.gz' // Avoid tar warning about files changing during read sh 'tar -czf build.tar.gz --exclude=build.tar.gz --exclude-vcs .' } @@ -52,6 +53,8 @@ pipeline { steps { withAWS(credentials:'S3_CI_BUILDS_AWS_KEYS', region:"${S3_REGION_BUILD_ARTEFACTS}") { s3Upload(file:'build.tar.gz', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/${BUILD_NUMBER}.tar.gz") + // The deployment process uses this file to figure out the latest build + s3Upload(file:'build_number.txt', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/latest") } } } From 0e2de1e5ad1225ea767115042da760a68e4ca797 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Fri, 15 Sep 2017 14:57:01 +0100 Subject: [PATCH 195/555] added shrinkwrap file --- services/filestore/npm-shrinkwrap.json | 854 +++++++++++++++++++++++++ 1 file changed, 854 insertions(+) create mode 100644 services/filestore/npm-shrinkwrap.json diff --git a/services/filestore/npm-shrinkwrap.json b/services/filestore/npm-shrinkwrap.json new file mode 100644 index 0000000000..1b87652bb1 --- /dev/null +++ b/services/filestore/npm-shrinkwrap.json @@ -0,0 +1,854 @@ +{ + "name": "filestore-sharelatex", + "version": "0.1.4", + "dependencies": { + "accept-encoding": { + "version": "0.1.0", + "from": "accept-encoding@>=0.1.0 <0.2.0", + "resolved": "https://registry.npmjs.org/accept-encoding/-/accept-encoding-0.1.0.tgz" + }, + "assertion-error": { + "version": "1.0.2", + "from": "assertion-error@>=1.0.1 <2.0.0", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.0.2.tgz" + }, + "async": { + "version": "0.2.10", + "from": "async@>=0.2.10 <0.3.0", + "resolved": "https://registry.npmjs.org/async/-/async-0.2.10.tgz" + }, + "aws-sdk": { + "version": "2.116.0", + "from": "aws-sdk@>=2.1.39 <3.0.0", + "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.116.0.tgz", + "dependencies": { + "uuid": { + "version": "3.0.1", + "from": "uuid@3.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.0.1.tgz" + } + } + }, + "aws-sign": { + "version": "0.2.0", + "from": "aws-sign@>=0.2.0 <0.3.0", + "resolved": "https://registry.npmjs.org/aws-sign/-/aws-sign-0.2.0.tgz" + }, + "balanced-match": { + "version": "1.0.0", + "from": "balanced-match@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", + "optional": true + }, + "base64-js": { + "version": "1.2.1", + "from": "base64-js@>=1.0.2 <2.0.0", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.2.1.tgz" + }, + "batch": { + "version": "0.5.0", + "from": "batch@0.5.0", + "resolved": "https://registry.npmjs.org/batch/-/batch-0.5.0.tgz" + }, + "best-encoding": { + "version": "0.1.1", + "from": "best-encoding@>=0.1.1 <0.2.0", + "resolved": "https://registry.npmjs.org/best-encoding/-/best-encoding-0.1.1.tgz" + }, + "bl": { + "version": "0.7.0", + "from": "bl@>=0.7.0 <0.8.0", + "resolved": "https://registry.npmjs.org/bl/-/bl-0.7.0.tgz", + "dependencies": { + "isarray": { + "version": "0.0.1", + "from": "isarray@0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + }, + "readable-stream": { + "version": "1.0.34", + "from": "readable-stream@>=1.0.2 <1.1.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz" + } + } + }, + "boom": { + "version": "0.3.8", + "from": "boom@>=0.3.0 <0.4.0", + "resolved": "https://registry.npmjs.org/boom/-/boom-0.3.8.tgz" + }, + "brace-expansion": { + "version": "1.1.8", + "from": "brace-expansion@>=1.1.7 <2.0.0", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.8.tgz", + "optional": true + }, + "buffer": { + "version": "4.9.1", + "from": "buffer@4.9.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.1.tgz" + }, + "buffer-crc32": { + "version": "0.2.1", + "from": "buffer-crc32@0.2.1", + "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.1.tgz" + }, + "bunyan": { + "version": "1.3.6", + "from": "bunyan@1.3.6", + "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.3.6.tgz" + }, + "bytes": { + "version": "0.2.1", + "from": "bytes@0.2.1", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-0.2.1.tgz" + }, + "caseless": { + "version": "0.3.0", + "from": "caseless@>=0.3.0 <0.4.0", + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.3.0.tgz" + }, + "chai": { + "version": "4.1.2", + "from": "chai@latest", + "resolved": "https://registry.npmjs.org/chai/-/chai-4.1.2.tgz" + }, + "check-error": { + "version": "1.0.2", + "from": "check-error@>=1.0.1 <2.0.0", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz" + }, + "coffee-script": { + "version": "1.7.1", + "from": "coffee-script@>=1.7.1 <1.8.0", + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.7.1.tgz" + }, + "combined-stream": { + "version": "0.0.7", + "from": "combined-stream@>=0.0.4 <0.1.0", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-0.0.7.tgz" + }, + "commander": { + "version": "1.3.2", + "from": "commander@1.3.2", + "resolved": "https://registry.npmjs.org/commander/-/commander-1.3.2.tgz" + }, + "concat-map": { + "version": "0.0.1", + "from": "concat-map@0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "optional": true + }, + "connect": { + "version": "2.12.0", + "from": "connect@2.12.0", + "resolved": "https://registry.npmjs.org/connect/-/connect-2.12.0.tgz" + }, + "cookie": { + "version": "0.1.0", + "from": "cookie@0.1.0", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.1.0.tgz" + }, + "cookie-jar": { + "version": "0.2.0", + "from": "cookie-jar@>=0.2.0 <0.3.0", + "resolved": "https://registry.npmjs.org/cookie-jar/-/cookie-jar-0.2.0.tgz" + }, + "cookie-signature": { + "version": "1.0.1", + "from": "cookie-signature@1.0.1", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.1.tgz" + }, + "core-util-is": { + "version": "1.0.2", + "from": "core-util-is@>=1.0.0 <1.1.0", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz" + }, + "cryptiles": { + "version": "0.1.3", + "from": "cryptiles@>=0.1.0 <0.2.0", + "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-0.1.3.tgz" + }, + "crypto-browserify": { + "version": "1.0.9", + "from": "crypto-browserify@1.0.9", + "resolved": "https://registry.npmjs.org/crypto-browserify/-/crypto-browserify-1.0.9.tgz" + }, + "debug": { + "version": "0.8.1", + "from": "debug@>=0.7.3 <1.0.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-0.8.1.tgz" + }, + "deep-eql": { + "version": "3.0.1", + "from": "deep-eql@>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-3.0.1.tgz" + }, + "delayed-stream": { + "version": "0.0.5", + "from": "delayed-stream@0.0.5", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz" + }, + "dtrace-provider": { + "version": "0.4.0", + "from": "dtrace-provider@>=0.4.0 <0.5.0", + "resolved": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.4.0.tgz", + "optional": true + }, + "events": { + "version": "1.1.1", + "from": "events@>=1.1.1 <2.0.0", + "resolved": "https://registry.npmjs.org/events/-/events-1.1.1.tgz" + }, + "express": { + "version": "3.4.8", + "from": "express@>=3.4.8 <3.5.0", + "resolved": "https://registry.npmjs.org/express/-/express-3.4.8.tgz", + "dependencies": { + "range-parser": { + "version": "0.0.4", + "from": "range-parser@0.0.4", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-0.0.4.tgz" + } + } + }, + "forever-agent": { + "version": "0.2.0", + "from": "forever-agent@>=0.2.0 <0.3.0", + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.2.0.tgz" + }, + "form-data": { + "version": "0.0.10", + "from": "form-data@>=0.0.3 <0.1.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-0.0.10.tgz" + }, + "formatio": { + "version": "1.2.0", + "from": "formatio@1.2.0", + "resolved": "https://registry.npmjs.org/formatio/-/formatio-1.2.0.tgz" + }, + "fresh": { + "version": "0.2.0", + "from": "fresh@0.2.0", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.2.0.tgz" + }, + "fs-extra": { + "version": "1.0.0", + "from": "fs-extra@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-1.0.0.tgz" + }, + "get-func-name": { + "version": "2.0.0", + "from": "get-func-name@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz" + }, + "gettemporaryfilepath": { + "version": "0.0.1", + "from": "gettemporaryfilepath@0.0.1", + "resolved": "https://registry.npmjs.org/gettemporaryfilepath/-/gettemporaryfilepath-0.0.1.tgz" + }, + "glob": { + "version": "6.0.4", + "from": "glob@>=6.0.1 <7.0.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz", + "optional": true + }, + "graceful-fs": { + "version": "4.1.11", + "from": "graceful-fs@>=4.1.2 <5.0.0", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.11.tgz" + }, + "hawk": { + "version": "0.10.2", + "from": "hawk@>=0.10.2 <0.11.0", + "resolved": "https://registry.npmjs.org/hawk/-/hawk-0.10.2.tgz" + }, + "heapdump": { + "version": "0.3.9", + "from": "heapdump@>=0.3.2 <0.4.0", + "resolved": "https://registry.npmjs.org/heapdump/-/heapdump-0.3.9.tgz" + }, + "hoek": { + "version": "0.7.6", + "from": "hoek@>=0.7.0 <0.8.0", + "resolved": "https://registry.npmjs.org/hoek/-/hoek-0.7.6.tgz" + }, + "ieee754": { + "version": "1.1.8", + "from": "ieee754@>=1.1.4 <2.0.0", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.8.tgz" + }, + "inflight": { + "version": "1.0.6", + "from": "inflight@>=1.0.4 <2.0.0", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "optional": true + }, + "inherits": { + "version": "2.0.3", + "from": "inherits@>=2.0.1 <2.1.0", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" + }, + "isarray": { + "version": "1.0.0", + "from": "isarray@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz" + }, + "jmespath": { + "version": "0.15.0", + "from": "jmespath@0.15.0", + "resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.15.0.tgz" + }, + "json-stringify-safe": { + "version": "3.0.0", + "from": "json-stringify-safe@>=3.0.0 <3.1.0", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-3.0.0.tgz" + }, + "jsonfile": { + "version": "2.4.0", + "from": "jsonfile@>=2.1.0 <3.0.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-2.4.0.tgz" + }, + "just-extend": { + "version": "1.1.22", + "from": "just-extend@>=1.1.22 <2.0.0", + "resolved": "https://registry.npmjs.org/just-extend/-/just-extend-1.1.22.tgz" + }, + "keypress": { + "version": "0.1.0", + "from": "keypress@>=0.1.0 <0.2.0", + "resolved": "https://registry.npmjs.org/keypress/-/keypress-0.1.0.tgz" + }, + "klaw": { + "version": "1.3.1", + "from": "klaw@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/klaw/-/klaw-1.3.1.tgz" + }, + "knox": { + "version": "0.9.2", + "from": "knox@~0.9.1", + "resolved": "https://registry.npmjs.org/knox/-/knox-0.9.2.tgz", + "dependencies": { + "debug": { + "version": "1.0.4", + "from": "debug@^1.0.2", + "resolved": "https://registry.npmjs.org/debug/-/debug-1.0.4.tgz", + "dependencies": { + "ms": { + "version": "0.6.2", + "from": "ms@0.6.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-0.6.2.tgz" + } + } + }, + "mime": { + "version": "1.3.4", + "from": "mime@*", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.3.4.tgz" + }, + "once": { + "version": "1.4.0", + "from": "once@^1.3.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "dependencies": { + "wrappy": { + "version": "1.0.2", + "from": "wrappy@1", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" + } + } + }, + "stream-counter": { + "version": "1.0.0", + "from": "stream-counter@^1.0.0", + "resolved": "https://registry.npmjs.org/stream-counter/-/stream-counter-1.0.0.tgz" + }, + "xml2js": { + "version": "0.4.17", + "from": "xml2js@^0.4.4", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.17.tgz", + "dependencies": { + "sax": { + "version": "1.2.2", + "from": "sax@>=0.6.0", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.2.tgz" + }, + "xmlbuilder": { + "version": "4.2.1", + "from": "xmlbuilder@^4.1.0", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-4.2.1.tgz", + "dependencies": { + "lodash": { + "version": "4.17.4", + "from": "lodash@^4.0.0", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz" + } + } + } + } + } + } + }, + "lodash": { + "version": "4.17.4", + "from": "lodash@>=4.0.0 <5.0.0", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz" + }, + "logger-sharelatex": { + "version": "1.0.0", + "from": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.1.0", + "resolved": "git+https://github.com/sharelatex/logger-sharelatex.git#5a3ea8e655f23e76a77bbc207c012d3fc944c8d8", + "dependencies": { + "coffee-script": { + "version": "1.4.0", + "from": "coffee-script@1.4.0", + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.4.0.tgz" + } + } + }, + "lolex": { + "version": "2.1.2", + "from": "lolex@>=2.1.2 <3.0.0", + "resolved": "https://registry.npmjs.org/lolex/-/lolex-2.1.2.tgz" + }, + "lsmod": { + "version": "0.0.3", + "from": "lsmod@>=0.0.3 <0.1.0", + "resolved": "https://registry.npmjs.org/lsmod/-/lsmod-0.0.3.tgz" + }, + "lynx": { + "version": "0.1.1", + "from": "lynx@>=0.1.1 <0.2.0", + "resolved": "https://registry.npmjs.org/lynx/-/lynx-0.1.1.tgz" + }, + "merge-descriptors": { + "version": "0.0.1", + "from": "merge-descriptors@0.0.1", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-0.0.1.tgz" + }, + "mersenne": { + "version": "0.0.4", + "from": "mersenne@>=0.0.3 <0.1.0", + "resolved": "https://registry.npmjs.org/mersenne/-/mersenne-0.0.4.tgz" + }, + "methods": { + "version": "0.1.0", + "from": "methods@0.1.0", + "resolved": "https://registry.npmjs.org/methods/-/methods-0.1.0.tgz" + }, + "metrics-sharelatex": { + "version": "1.3.0", + "from": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.3.0", + "resolved": "git+https://github.com/sharelatex/metrics-sharelatex.git#080c4aeb696edcd5d6d86f202f2c528f0661d7a6", + "dependencies": { + "coffee-script": { + "version": "1.6.0", + "from": "coffee-script@1.6.0", + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz" + }, + "underscore": { + "version": "1.6.0", + "from": "underscore@>=1.6.0 <1.7.0", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz" + } + } + }, + "mime": { + "version": "1.2.11", + "from": "mime@>=1.2.9 <1.3.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz" + }, + "minimatch": { + "version": "3.0.4", + "from": "minimatch@>=2.0.0 <3.0.0||>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "optional": true + }, + "minimist": { + "version": "0.0.8", + "from": "minimist@0.0.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz" + }, + "mkdirp": { + "version": "0.3.5", + "from": "mkdirp@>=0.3.5 <0.4.0", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.5.tgz" + }, + "multiparty": { + "version": "2.2.0", + "from": "multiparty@2.2.0", + "resolved": "https://registry.npmjs.org/multiparty/-/multiparty-2.2.0.tgz" + }, + "mv": { + "version": "2.1.1", + "from": "mv@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/mv/-/mv-2.1.1.tgz", + "optional": true, + "dependencies": { + "mkdirp": { + "version": "0.5.1", + "from": "mkdirp@>=0.5.1 <0.6.0", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", + "optional": true + }, + "rimraf": { + "version": "2.4.5", + "from": "rimraf@>=2.4.0 <2.5.0", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.5.tgz", + "optional": true + } + } + }, + "nan": { + "version": "1.5.3", + "from": "nan@>=1.5.1 <1.6.0", + "resolved": "https://registry.npmjs.org/nan/-/nan-1.5.3.tgz", + "optional": true + }, + "native-promise-only": { + "version": "0.8.1", + "from": "native-promise-only@>=0.8.1 <0.9.0", + "resolved": "https://registry.npmjs.org/native-promise-only/-/native-promise-only-0.8.1.tgz" + }, + "ncp": { + "version": "2.0.0", + "from": "ncp@>=2.0.0 <2.1.0", + "resolved": "https://registry.npmjs.org/ncp/-/ncp-2.0.0.tgz", + "optional": true + }, + "negotiator": { + "version": "0.3.0", + "from": "negotiator@0.3.0", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.3.0.tgz" + }, + "nise": { + "version": "1.0.1", + "from": "nise@>=1.0.1 <2.0.0", + "resolved": "https://registry.npmjs.org/nise/-/nise-1.0.1.tgz", + "dependencies": { + "lolex": { + "version": "1.6.0", + "from": "lolex@>=1.6.0 <2.0.0", + "resolved": "https://registry.npmjs.org/lolex/-/lolex-1.6.0.tgz" + } + } + }, + "node-transloadit": { + "version": "0.0.4", + "from": "node-transloadit@0.0.4", + "resolved": "https://registry.npmjs.org/node-transloadit/-/node-transloadit-0.0.4.tgz", + "dependencies": { + "qs": { + "version": "0.5.6", + "from": "qs@>=0.5.4 <0.6.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-0.5.6.tgz" + }, + "request": { + "version": "2.16.6", + "from": "request@>=2.16.6 <2.17.0", + "resolved": "https://registry.npmjs.org/request/-/request-2.16.6.tgz" + }, + "underscore": { + "version": "1.2.1", + "from": "underscore@1.2.1", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.2.1.tgz" + } + } + }, + "node-uuid": { + "version": "1.4.8", + "from": "node-uuid@>=1.4.1 <1.5.0", + "resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.8.tgz" + }, + "oauth-sign": { + "version": "0.2.0", + "from": "oauth-sign@>=0.2.0 <0.3.0", + "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.2.0.tgz" + }, + "once": { + "version": "1.4.0", + "from": "once@>=1.3.0 <2.0.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz" + }, + "path-is-absolute": { + "version": "1.0.1", + "from": "path-is-absolute@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "optional": true + }, + "path-to-regexp": { + "version": "1.7.0", + "from": "path-to-regexp@>=1.7.0 <2.0.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.7.0.tgz", + "dependencies": { + "isarray": { + "version": "0.0.1", + "from": "isarray@0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + } + } + }, + "pathval": { + "version": "1.1.0", + "from": "pathval@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.0.tgz" + }, + "pause": { + "version": "0.0.1", + "from": "pause@0.0.1", + "resolved": "https://registry.npmjs.org/pause/-/pause-0.0.1.tgz" + }, + "pngcrush": { + "version": "0.0.3", + "from": "pngcrush@0.0.3", + "resolved": "https://registry.npmjs.org/pngcrush/-/pngcrush-0.0.3.tgz" + }, + "process-nextick-args": { + "version": "1.0.7", + "from": "process-nextick-args@>=1.0.6 <1.1.0", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz" + }, + "punycode": { + "version": "1.3.2", + "from": "punycode@1.3.2", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz" + }, + "qs": { + "version": "0.6.6", + "from": "qs@0.6.6", + "resolved": "https://registry.npmjs.org/qs/-/qs-0.6.6.tgz" + }, + "querystring": { + "version": "0.2.0", + "from": "querystring@0.2.0", + "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz" + }, + "range-parser": { + "version": "1.2.0", + "from": "range-parser@>=1.0.2 <2.0.0", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz" + }, + "raven": { + "version": "0.8.1", + "from": "raven@>=0.8.0 <0.9.0", + "resolved": "https://registry.npmjs.org/raven/-/raven-0.8.1.tgz" + }, + "raw-body": { + "version": "1.1.2", + "from": "raw-body@1.1.2", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-1.1.2.tgz" + }, + "readable-stream": { + "version": "1.1.14", + "from": "readable-stream@>=1.1.9 <1.2.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.14.tgz", + "dependencies": { + "isarray": { + "version": "0.0.1", + "from": "isarray@0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + } + } + }, + "recluster": { + "version": "0.3.7", + "from": "recluster@>=0.3.7 <0.4.0", + "resolved": "https://registry.npmjs.org/recluster/-/recluster-0.3.7.tgz" + }, + "request": { + "version": "2.14.0", + "from": "request@2.14.0", + "resolved": "https://registry.npmjs.org/request/-/request-2.14.0.tgz", + "dependencies": { + "form-data": { + "version": "0.0.7", + "from": "form-data@~0.0.3", + "dependencies": { + "async": { + "version": "0.1.22", + "from": "async@~0.1.9" + }, + "combined-stream": { + "version": "0.0.4", + "from": "combined-stream@~0.0.4", + "dependencies": { + "delayed-stream": { + "version": "0.0.5", + "from": "delayed-stream@0.0.5" + } + } + } + } + }, + "mime": { + "version": "1.2.9", + "from": "mime@~1.2.7" + } + } + }, + "require-like": { + "version": "0.1.2", + "from": "require-like@0.1.2", + "resolved": "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz" + }, + "response": { + "version": "0.14.0", + "from": "response@0.14.0", + "resolved": "https://registry.npmjs.org/response/-/response-0.14.0.tgz" + }, + "rimraf": { + "version": "2.2.8", + "from": "rimraf@2.2.8", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.2.8.tgz" + }, + "safe-buffer": { + "version": "5.1.1", + "from": "safe-buffer@>=5.0.1 <6.0.0", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.1.tgz" + }, + "safe-json-stringify": { + "version": "1.0.4", + "from": "safe-json-stringify@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.0.4.tgz", + "optional": true + }, + "samsam": { + "version": "1.2.1", + "from": "samsam@>=1.1.3 <2.0.0", + "resolved": "https://registry.npmjs.org/samsam/-/samsam-1.2.1.tgz" + }, + "sandboxed-module": { + "version": "2.0.3", + "from": "sandboxed-module@latest", + "resolved": "https://registry.npmjs.org/sandboxed-module/-/sandboxed-module-2.0.3.tgz", + "dependencies": { + "stack-trace": { + "version": "0.0.9", + "from": "stack-trace@0.0.9", + "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.9.tgz" + } + } + }, + "sax": { + "version": "1.2.1", + "from": "sax@1.2.1", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz" + }, + "send": { + "version": "0.1.4", + "from": "send@0.1.4", + "resolved": "https://registry.npmjs.org/send/-/send-0.1.4.tgz", + "dependencies": { + "range-parser": { + "version": "0.0.4", + "from": "range-parser@0.0.4", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-0.0.4.tgz" + } + } + }, + "settings-sharelatex": { + "version": "1.0.0", + "from": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.0.0", + "resolved": "git+https://github.com/sharelatex/settings-sharelatex.git#cbc5e41c1dbe6789721a14b3fdae05bf22546559", + "dependencies": { + "coffee-script": { + "version": "1.6.0", + "from": "coffee-script@1.6.0", + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz" + } + } + }, + "sinon": { + "version": "3.2.1", + "from": "sinon@latest", + "resolved": "https://registry.npmjs.org/sinon/-/sinon-3.2.1.tgz", + "dependencies": { + "diff": { + "version": "3.3.1", + "from": "diff@>=3.1.0 <4.0.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-3.3.1.tgz" + } + } + }, + "sntp": { + "version": "0.1.4", + "from": "sntp@>=0.1.0 <0.2.0", + "resolved": "https://registry.npmjs.org/sntp/-/sntp-0.1.4.tgz" + }, + "stack-trace": { + "version": "0.0.7", + "from": "stack-trace@0.0.7", + "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.7.tgz" + }, + "statsd-parser": { + "version": "0.0.4", + "from": "statsd-parser@>=0.0.4 <0.1.0", + "resolved": "https://registry.npmjs.org/statsd-parser/-/statsd-parser-0.0.4.tgz" + }, + "stream-buffers": { + "version": "0.2.6", + "from": "stream-buffers@>=0.2.5 <0.3.0", + "resolved": "https://registry.npmjs.org/stream-buffers/-/stream-buffers-0.2.6.tgz" + }, + "stream-counter": { + "version": "0.2.0", + "from": "stream-counter@>=0.2.0 <0.3.0", + "resolved": "https://registry.npmjs.org/stream-counter/-/stream-counter-0.2.0.tgz" + }, + "string_decoder": { + "version": "0.10.31", + "from": "string_decoder@>=0.10.0 <0.11.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" + }, + "text-encoding": { + "version": "0.6.4", + "from": "text-encoding@0.6.4", + "resolved": "https://registry.npmjs.org/text-encoding/-/text-encoding-0.6.4.tgz" + }, + "tunnel-agent": { + "version": "0.2.0", + "from": "tunnel-agent@>=0.2.0 <0.3.0", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.2.0.tgz" + }, + "type-detect": { + "version": "4.0.3", + "from": "type-detect@>=4.0.0 <5.0.0", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.3.tgz" + }, + "uid2": { + "version": "0.0.3", + "from": "uid2@0.0.3", + "resolved": "https://registry.npmjs.org/uid2/-/uid2-0.0.3.tgz" + }, + "underscore": { + "version": "1.5.2", + "from": "underscore@>=1.5.2 <1.6.0", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.5.2.tgz" + }, + "url": { + "version": "0.10.3", + "from": "url@0.10.3", + "resolved": "https://registry.npmjs.org/url/-/url-0.10.3.tgz" + }, + "util-deprecate": { + "version": "1.0.2", + "from": "util-deprecate@>=1.0.1 <1.1.0", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz" + }, + "wrappy": { + "version": "1.0.2", + "from": "wrappy@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" + }, + "xml2js": { + "version": "0.4.17", + "from": "xml2js@0.4.17", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.17.tgz" + }, + "xmlbuilder": { + "version": "4.2.1", + "from": "xmlbuilder@4.2.1", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-4.2.1.tgz" + } + } +} From 6dcef9fdeaa04e67b64cb8a8ef85523ef76b3fdc Mon Sep 17 00:00:00 2001 From: Joe Green Date: Thu, 12 Oct 2017 17:07:06 +0100 Subject: [PATCH 196/555] alert only on master --- services/filestore/Jenkinsfile | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/services/filestore/Jenkinsfile b/services/filestore/Jenkinsfile index 03528475d6..f2c998d99d 100644 --- a/services/filestore/Jenkinsfile +++ b/services/filestore/Jenkinsfile @@ -62,6 +62,10 @@ pipeline { post { failure { + when { + branch 'master' + } + mail(from: "${EMAIL_ALERT_FROM}", to: "${EMAIL_ALERT_TO}", subject: "Jenkins build failed: ${JOB_NAME}:${BUILD_NUMBER}", From a70a216c5262ace1143939a177f860270e048583 Mon Sep 17 00:00:00 2001 From: Joe Green Date: Mon, 16 Oct 2017 14:10:06 +0100 Subject: [PATCH 197/555] Update Jenkinsfile --- services/filestore/Jenkinsfile | 4 ---- 1 file changed, 4 deletions(-) diff --git a/services/filestore/Jenkinsfile b/services/filestore/Jenkinsfile index f2c998d99d..03528475d6 100644 --- a/services/filestore/Jenkinsfile +++ b/services/filestore/Jenkinsfile @@ -62,10 +62,6 @@ pipeline { post { failure { - when { - branch 'master' - } - mail(from: "${EMAIL_ALERT_FROM}", to: "${EMAIL_ALERT_TO}", subject: "Jenkins build failed: ${JOB_NAME}:${BUILD_NUMBER}", From b834e53e13036780a60adba2e099894a66025f5e Mon Sep 17 00:00:00 2001 From: James Allen Date: Fri, 29 Dec 2017 08:14:23 +0000 Subject: [PATCH 198/555] Provide hosts as environment settings and add npm run start script --- services/filestore/config/settings.defaults.coffee | 2 +- services/filestore/package.json | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/services/filestore/config/settings.defaults.coffee b/services/filestore/config/settings.defaults.coffee index bb4a885478..92c4a7ec8d 100644 --- a/services/filestore/config/settings.defaults.coffee +++ b/services/filestore/config/settings.defaults.coffee @@ -4,7 +4,7 @@ module.exports = internal: filestore: port: 3009 - host: "localhost" + host: process.env['LISTEN_ADDRESS'] or "localhost" filestore: # Which backend persistor to use. diff --git a/services/filestore/package.json b/services/filestore/package.json index 0dbf40d514..4048d93e21 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -6,6 +6,10 @@ "type": "git", "url": "https://github.com/sharelatex/filestore-sharelatex.git" }, + "scripts": { + "compile:app": "coffee -o app/js -c app/coffee && coffee -c app.coffee", + "start": "npm run compile:app && node app.js" + }, "dependencies": { "async": "~0.2.10", "aws-sdk": "^2.1.39", From c23b700927259a4ee3ab5b8c8dbeb77a3dda4143 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Tue, 16 Jan 2018 12:27:36 +0000 Subject: [PATCH 199/555] wip --- services/filestore/Dockerfile | 27 + services/filestore/Makefile | 62 + services/filestore/app.coffee | 6 +- services/filestore/npm-shrinkwrap.json | 1715 ++++++++++++++--- services/filestore/package.json | 12 + .../coffee/SendingFileTest.coffee | 0 6 files changed, 1503 insertions(+), 319 deletions(-) create mode 100644 services/filestore/Dockerfile create mode 100644 services/filestore/Makefile rename services/filestore/test/{acceptence => acceptance}/coffee/SendingFileTest.coffee (100%) diff --git a/services/filestore/Dockerfile b/services/filestore/Dockerfile new file mode 100644 index 0000000000..82a4edd9f0 --- /dev/null +++ b/services/filestore/Dockerfile @@ -0,0 +1,27 @@ +FROM node:8.9.1 + +WORKDIR /app + +COPY package.json /app/ + +RUN npm install --quiet + +COPY config /app/config +COPY test /app/test +COPY app /app/app +COPY app.coffee /app +RUN npm run compile:all + +FROM node:8.9.1 + +COPY --from=0 /app /app +# All app and node_modules will be owned by root. +# The app will run as the 'app' user, and so not have write permissions +# on any files it doesn't need. +RUN useradd --user-group --create-home --home-dir /app --shell /bin/false app +USER app +WORKDIR /app + +EXPOSE 3009 + +CMD ["node","app.js"] diff --git a/services/filestore/Makefile b/services/filestore/Makefile new file mode 100644 index 0000000000..3ebf4c4a34 --- /dev/null +++ b/services/filestore/Makefile @@ -0,0 +1,62 @@ +NPM := docker-compose -f docker-compose.yml ${DOCKER_COMPOSE_FLAGS} run --rm npm npm +BUILD_NUMBER ?= local +BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) +PROJECT_NAME = filestore +DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml +DOCKER_COMPOSE := docker-compose ${DOCKER_COMPOSE_FLAGS} + +all: install test + @echo "Run:" + @echo " make install to set up the project dependencies (in docker)" + @echo " make test to run all the tests for the project (in docker)" + @echo " make run to run the app (natively)" + +install: + $(NPM) install + +run: + $(NPM) run start + +clean: + rm -f app.js + rm -rf app/js + rm -rf test/unit/js + rm -rf test/acceptance/js + # Deletes node_modules volume + docker-compose down --volumes + +test: test_unit test_acceptance + +test_unit: + $(DOCKER_COMPOSE) run --rm test_unit -- ${MOCHA_ARGS} + +test_acceptance: ci_clean # clear the database before each acceptance test run + $(DOCKER_COMPOSE) run --rm test_acceptance -- ${MOCHA_ARGS} + +build: + docker build --pull --tag quay.io/sharelatex/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) . + +publish: + docker push quay.io/sharelatex/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + +ci: + # On the CI server, we want to run our tests in the image that we + # have built for deployment, which is what the docker-compose.ci.yml + # override does. + PROJECT_NAME=$(PROJECT_NAME) \ + BRANCH_NAME=$(BRANCH_NAME) \ + BUILD_NUMBER=$(BUILD_NUMBER) \ + DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" \ + $(MAKE) build test publish + +ci_clean: + PROJECT_NAME=$(PROJECT_NAME) \ + BRANCH_NAME=$(BRANCH_NAME) \ + BUILD_NUMBER=$(BUILD_NUMBER) \ + DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" \ + $(DOCKER_COMPOSE) down + +.PHONY: + all install compile clean test test_unit test_acceptance \ + test_acceptance_start_service test_acceptance_stop_service \ + test_acceptance_run build publish ci ci_clean \ No newline at end of file diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index eb97ad48dd..3c0ca7b72d 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -96,7 +96,7 @@ app.post "/shutdown", (req, res)-> app.get '/status', (req, res)-> if appIsOk - res.send('filestore sharelatex up') + res.send('filestore sharelatex up - v2') else logger.log "app is not ok - shutting down" res.send("server is being shut down", 500) @@ -112,7 +112,7 @@ app.get '*', (req, res)-> server = require('http').createServer(app) port = settings.internal.filestore.port or 3009 -host = settings.internal.filestore.host or "localhost" +host = "0.0.0.0" beginShutdown = () -> if appIsOk @@ -128,7 +128,7 @@ beginShutdown = () -> process.disconnect?() logger.log "server will stop accepting connections" -server.listen port, host, -> +server.listen port, -> logger.info "Filestore starting up, listening on #{host}:#{port}" process.on 'SIGTERM', () -> diff --git a/services/filestore/npm-shrinkwrap.json b/services/filestore/npm-shrinkwrap.json index 1b87652bb1..a0bcfff9ff 100644 --- a/services/filestore/npm-shrinkwrap.json +++ b/services/filestore/npm-shrinkwrap.json @@ -1,387 +1,1026 @@ { "name": "filestore-sharelatex", "version": "0.1.4", + "lockfileVersion": 1, + "requires": true, "dependencies": { + "abbrev": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", + "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==", + "dev": true + }, "accept-encoding": { "version": "0.1.0", - "from": "accept-encoding@>=0.1.0 <0.2.0", - "resolved": "https://registry.npmjs.org/accept-encoding/-/accept-encoding-0.1.0.tgz" + "resolved": "https://registry.npmjs.org/accept-encoding/-/accept-encoding-0.1.0.tgz", + "integrity": "sha1-XdiLjfcfHcLlzGuVZezOHjmaMz4=" + }, + "ansi-styles": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-1.0.0.tgz", + "integrity": "sha1-yxAt8cVvUSPquLZ817mAJ6AnkXg=", + "dev": true + }, + "argparse": { + "version": "0.1.16", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-0.1.16.tgz", + "integrity": "sha1-z9AeD7uj1srtBJ+9dY1A9lGW9Xw=", + "dev": true, + "requires": { + "underscore": "1.5.2", + "underscore.string": "2.4.0" + }, + "dependencies": { + "underscore.string": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/underscore.string/-/underscore.string-2.4.0.tgz", + "integrity": "sha1-jN2PusTi0uoefi6Al8QvRCKA+Fs=", + "dev": true + } + } }, "assertion-error": { - "version": "1.0.2", - "from": "assertion-error@>=1.0.1 <2.0.0", - "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.0.2.tgz" + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", + "integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==" }, "async": { "version": "0.2.10", - "from": "async@>=0.2.10 <0.3.0", - "resolved": "https://registry.npmjs.org/async/-/async-0.2.10.tgz" + "resolved": "https://registry.npmjs.org/async/-/async-0.2.10.tgz", + "integrity": "sha1-trvgsGdLnXGXCMo43owjfLUmw9E=" }, "aws-sdk": { "version": "2.116.0", - "from": "aws-sdk@>=2.1.39 <3.0.0", "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.116.0.tgz", + "integrity": "sha1-2UpsZnvuY++PvZRBzMIitTn3y+w=", + "requires": { + "buffer": "4.9.1", + "crypto-browserify": "1.0.9", + "events": "1.1.1", + "jmespath": "0.15.0", + "querystring": "0.2.0", + "sax": "1.2.1", + "url": "0.10.3", + "uuid": "3.0.1", + "xml2js": "0.4.17", + "xmlbuilder": "4.2.1" + }, "dependencies": { "uuid": { "version": "3.0.1", - "from": "uuid@3.0.1", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.0.1.tgz" + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.0.1.tgz", + "integrity": "sha1-ZUS7ot/ajBzxfmKaOjBeK7H+5sE=" } } }, "aws-sign": { "version": "0.2.0", - "from": "aws-sign@>=0.2.0 <0.3.0", - "resolved": "https://registry.npmjs.org/aws-sign/-/aws-sign-0.2.0.tgz" + "resolved": "https://registry.npmjs.org/aws-sign/-/aws-sign-0.2.0.tgz", + "integrity": "sha1-xVAThWyBlOyFSgy+yQqrWgTOOsU=" }, "balanced-match": { "version": "1.0.0", - "from": "balanced-match@>=1.0.0 <2.0.0", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", - "optional": true + "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=" }, "base64-js": { "version": "1.2.1", - "from": "base64-js@>=1.0.2 <2.0.0", - "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.2.1.tgz" + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.2.1.tgz", + "integrity": "sha512-dwVUVIXsBZXwTuwnXI9RK8sBmgq09NDHzyR9SAph9eqk76gKK2JSQmZARC2zRC81JC2QTtxD0ARU5qTS25gIGw==" }, "batch": { "version": "0.5.0", - "from": "batch@0.5.0", - "resolved": "https://registry.npmjs.org/batch/-/batch-0.5.0.tgz" + "resolved": "https://registry.npmjs.org/batch/-/batch-0.5.0.tgz", + "integrity": "sha1-/S4Fp6XWlrTbkxQBPihdj/NVfsM=" }, "best-encoding": { "version": "0.1.1", - "from": "best-encoding@>=0.1.1 <0.2.0", - "resolved": "https://registry.npmjs.org/best-encoding/-/best-encoding-0.1.1.tgz" + "resolved": "https://registry.npmjs.org/best-encoding/-/best-encoding-0.1.1.tgz", + "integrity": "sha1-GVIT2rysBFgYuAe3ox+Dn63cl04=", + "requires": { + "accept-encoding": "0.1.0" + } }, "bl": { "version": "0.7.0", - "from": "bl@>=0.7.0 <0.8.0", "resolved": "https://registry.npmjs.org/bl/-/bl-0.7.0.tgz", + "integrity": "sha1-P7BnBgKsKHjrdw3CA58YNr5irls=", + "requires": { + "readable-stream": "1.0.34" + }, "dependencies": { "isarray": { "version": "0.0.1", - "from": "isarray@0.0.1", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" }, "readable-stream": { "version": "1.0.34", - "from": "readable-stream@>=1.0.2 <1.1.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz" + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz", + "integrity": "sha1-Elgg40vIQtLyqq+v5MKRbuMsFXw=", + "requires": { + "core-util-is": "1.0.2", + "inherits": "2.0.3", + "isarray": "0.0.1", + "string_decoder": "0.10.31" + } } } }, "boom": { "version": "0.3.8", - "from": "boom@>=0.3.0 <0.4.0", - "resolved": "https://registry.npmjs.org/boom/-/boom-0.3.8.tgz" + "resolved": "https://registry.npmjs.org/boom/-/boom-0.3.8.tgz", + "integrity": "sha1-yM2wQUNZEnQWKMBE7Mcy0dF8Ceo=", + "requires": { + "hoek": "0.7.6" + } }, "brace-expansion": { "version": "1.1.8", - "from": "brace-expansion@>=1.1.7 <2.0.0", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.8.tgz", - "optional": true + "integrity": "sha1-wHshHHyVLsH479Uad+8NHTmQopI=", + "requires": { + "balanced-match": "1.0.0", + "concat-map": "0.0.1" + } + }, + "broadway": { + "version": "0.3.6", + "resolved": "https://registry.npmjs.org/broadway/-/broadway-0.3.6.tgz", + "integrity": "sha1-fb7waLlUt5B5Jf1USWO1eKkCuno=", + "dev": true, + "requires": { + "cliff": "0.1.9", + "eventemitter2": "0.4.14", + "nconf": "0.6.9", + "utile": "0.2.1", + "winston": "0.8.0" + }, + "dependencies": { + "cliff": { + "version": "0.1.9", + "resolved": "https://registry.npmjs.org/cliff/-/cliff-0.1.9.tgz", + "integrity": "sha1-ohHgnGo947oa8n0EnTASUNGIErw=", + "dev": true, + "requires": { + "colors": "0.6.2", + "eyes": "0.1.8", + "winston": "0.8.0" + } + }, + "winston": { + "version": "0.8.0", + "resolved": "https://registry.npmjs.org/winston/-/winston-0.8.0.tgz", + "integrity": "sha1-YdCDD6aZcGISIGsKK1ymmpMENmg=", + "dev": true, + "requires": { + "async": "0.2.10", + "colors": "0.6.2", + "cycle": "1.0.3", + "eyes": "0.1.8", + "pkginfo": "0.3.1", + "stack-trace": "0.0.7" + } + } + } }, "buffer": { "version": "4.9.1", - "from": "buffer@4.9.1", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.1.tgz" + "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.1.tgz", + "integrity": "sha1-bRu2AbB6TvztlwlBMgkwJ8lbwpg=", + "requires": { + "base64-js": "1.2.1", + "ieee754": "1.1.8", + "isarray": "1.0.0" + } }, "buffer-crc32": { "version": "0.2.1", - "from": "buffer-crc32@0.2.1", - "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.1.tgz" + "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.1.tgz", + "integrity": "sha1-vj5TgvwCttYySVasGvmKqYsIU0w=" }, "bunyan": { "version": "1.3.6", - "from": "bunyan@1.3.6", - "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.3.6.tgz" + "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.3.6.tgz", + "integrity": "sha1-xDM3p7Dc5mPYIjNnurBRlbkLR2o=", + "requires": { + "dtrace-provider": "0.4.0", + "mv": "2.1.1", + "safe-json-stringify": "1.0.4" + } }, "bytes": { "version": "0.2.1", - "from": "bytes@0.2.1", - "resolved": "https://registry.npmjs.org/bytes/-/bytes-0.2.1.tgz" + "resolved": "https://registry.npmjs.org/bytes/-/bytes-0.2.1.tgz", + "integrity": "sha1-VVsIq8sGP4l1kFMCUj5M1P/f3zE=" }, "caseless": { "version": "0.3.0", - "from": "caseless@>=0.3.0 <0.4.0", - "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.3.0.tgz" + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.3.0.tgz", + "integrity": "sha1-U06XkWOH07cGtk/eu6xGQ4RQk08=" }, "chai": { "version": "4.1.2", - "from": "chai@latest", - "resolved": "https://registry.npmjs.org/chai/-/chai-4.1.2.tgz" + "resolved": "https://registry.npmjs.org/chai/-/chai-4.1.2.tgz", + "integrity": "sha1-D2RYS6ZC8PKs4oBiefTwbKI61zw=", + "requires": { + "assertion-error": "1.1.0", + "check-error": "1.0.2", + "deep-eql": "3.0.1", + "get-func-name": "2.0.0", + "pathval": "1.1.0", + "type-detect": "4.0.5" + } + }, + "chalk": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-0.4.0.tgz", + "integrity": "sha1-UZmj3c0MHv4jvAjBsCewYXbgxk8=", + "dev": true, + "requires": { + "ansi-styles": "1.0.0", + "has-color": "0.1.7", + "strip-ansi": "0.1.1" + } }, "check-error": { "version": "1.0.2", - "from": "check-error@>=1.0.1 <2.0.0", - "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz" + "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz", + "integrity": "sha1-V00xLt2Iu13YkS6Sht1sCu1KrII=" + }, + "cliff": { + "version": "0.1.10", + "resolved": "https://registry.npmjs.org/cliff/-/cliff-0.1.10.tgz", + "integrity": "sha1-U74z6p9ZvshWCe4wCsQgdgPlIBM=", + "dev": true, + "requires": { + "colors": "1.0.3", + "eyes": "0.1.8", + "winston": "0.8.3" + }, + "dependencies": { + "colors": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/colors/-/colors-1.0.3.tgz", + "integrity": "sha1-BDP0TYCWgP3rYO0mDxsMJi6CpAs=", + "dev": true + } + } }, "coffee-script": { "version": "1.7.1", - "from": "coffee-script@>=1.7.1 <1.8.0", - "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.7.1.tgz" + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.7.1.tgz", + "integrity": "sha1-YplqhheAx15tUGnROCJyO3NAS/w=", + "requires": { + "mkdirp": "0.3.5" + } + }, + "colors": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/colors/-/colors-0.6.2.tgz", + "integrity": "sha1-JCP+ZnisDF2uiFLl0OW+CMmXq8w=", + "dev": true }, "combined-stream": { "version": "0.0.7", - "from": "combined-stream@>=0.0.4 <0.1.0", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-0.0.7.tgz" + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-0.0.7.tgz", + "integrity": "sha1-ATfmV7qlp1QcV6w3rF/AfXO03B8=", + "requires": { + "delayed-stream": "0.0.5" + } }, "commander": { "version": "1.3.2", - "from": "commander@1.3.2", - "resolved": "https://registry.npmjs.org/commander/-/commander-1.3.2.tgz" + "resolved": "https://registry.npmjs.org/commander/-/commander-1.3.2.tgz", + "integrity": "sha1-io8w7GcKb91kr1LxkUuQfXnq1bU=", + "requires": { + "keypress": "0.1.0" + } }, "concat-map": { "version": "0.0.1", - "from": "concat-map@0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "optional": true + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" + }, + "configstore": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/configstore/-/configstore-0.3.2.tgz", + "integrity": "sha1-JeTBbDdoq/dcWmW8YXYfSVBVtFk=", + "dev": true, + "requires": { + "graceful-fs": "4.1.11", + "js-yaml": "3.10.0", + "mkdirp": "0.3.5", + "object-assign": "2.1.1", + "osenv": "0.1.4", + "user-home": "1.1.1", + "uuid": "2.0.3", + "xdg-basedir": "1.0.1" + }, + "dependencies": { + "argparse": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.9.tgz", + "integrity": "sha1-c9g7wmP4bpf4zE9rrhsOkKfSLIY=", + "dev": true, + "requires": { + "sprintf-js": "1.0.3" + } + }, + "esprima": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.0.tgz", + "integrity": "sha512-oftTcaMu/EGrEIu904mWteKIv8vMuOgGYo7EhVJJN00R/EED9DCua/xxHRdYnKtcECzVg7xOWhflvJMnqcFZjw==", + "dev": true + }, + "js-yaml": { + "version": "3.10.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.10.0.tgz", + "integrity": "sha512-O2v52ffjLa9VeM43J4XocZE//WT9N0IiwDa3KSHH7Tu8CtH+1qM8SIZvnsTh6v+4yFy5KUY3BHUVwjpfAWsjIA==", + "dev": true, + "requires": { + "argparse": "1.0.9", + "esprima": "4.0.0" + } + } + } }, "connect": { "version": "2.12.0", - "from": "connect@2.12.0", - "resolved": "https://registry.npmjs.org/connect/-/connect-2.12.0.tgz" + "resolved": "https://registry.npmjs.org/connect/-/connect-2.12.0.tgz", + "integrity": "sha1-Mdj6DcrN8ZCNgivSkjvootKn7Zo=", + "requires": { + "batch": "0.5.0", + "buffer-crc32": "0.2.1", + "bytes": "0.2.1", + "cookie": "0.1.0", + "cookie-signature": "1.0.1", + "debug": "0.8.1", + "fresh": "0.2.0", + "methods": "0.1.0", + "multiparty": "2.2.0", + "negotiator": "0.3.0", + "pause": "0.0.1", + "qs": "0.6.6", + "raw-body": "1.1.2", + "send": "0.1.4", + "uid2": "0.0.3" + } }, "cookie": { "version": "0.1.0", - "from": "cookie@0.1.0", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.1.0.tgz" + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.1.0.tgz", + "integrity": "sha1-kOtGndzpBchm3mh+/EMTHYgB+dA=" }, "cookie-jar": { "version": "0.2.0", - "from": "cookie-jar@>=0.2.0 <0.3.0", - "resolved": "https://registry.npmjs.org/cookie-jar/-/cookie-jar-0.2.0.tgz" + "resolved": "https://registry.npmjs.org/cookie-jar/-/cookie-jar-0.2.0.tgz", + "integrity": "sha1-ZOzAasl423leS1KQy+SLo3gUAPo=" }, "cookie-signature": { "version": "1.0.1", - "from": "cookie-signature@1.0.1", - "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.1.tgz" + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.1.tgz", + "integrity": "sha1-ROByFIrwHm6OJK+/EmkNaK5pjss=" }, "core-util-is": { "version": "1.0.2", - "from": "core-util-is@>=1.0.0 <1.1.0", - "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz" + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" }, "cryptiles": { "version": "0.1.3", - "from": "cryptiles@>=0.1.0 <0.2.0", - "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-0.1.3.tgz" + "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-0.1.3.tgz", + "integrity": "sha1-GlVnNPBtJLo0hirpy55wmjr7/xw=", + "requires": { + "boom": "0.3.8" + } }, "crypto-browserify": { "version": "1.0.9", - "from": "crypto-browserify@1.0.9", - "resolved": "https://registry.npmjs.org/crypto-browserify/-/crypto-browserify-1.0.9.tgz" + "resolved": "https://registry.npmjs.org/crypto-browserify/-/crypto-browserify-1.0.9.tgz", + "integrity": "sha1-zFRJaF37hesRyYKKzHy4erW7/MA=" + }, + "cycle": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/cycle/-/cycle-1.0.3.tgz", + "integrity": "sha1-IegLK+hYD5i0aPN5QwZisEbDStI=", + "dev": true + }, + "dateformat": { + "version": "1.0.2-1.2.3", + "resolved": "https://registry.npmjs.org/dateformat/-/dateformat-1.0.2-1.2.3.tgz", + "integrity": "sha1-sCIMAt6YYXQztyhRz0fePfLNvuk=", + "dev": true }, "debug": { "version": "0.8.1", - "from": "debug@>=0.7.3 <1.0.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-0.8.1.tgz" + "resolved": "https://registry.npmjs.org/debug/-/debug-0.8.1.tgz", + "integrity": "sha1-IP9NJvXkIstoobrLu2EDmtjBwTA=" }, "deep-eql": { "version": "3.0.1", - "from": "deep-eql@>=3.0.0 <4.0.0", - "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-3.0.1.tgz" + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-3.0.1.tgz", + "integrity": "sha512-+QeIQyN5ZuO+3Uk5DYh6/1eKO0m0YmJFGNmFHGACpf1ClL1nmlV/p4gNgbl2pJGxgXb4faqo6UE+M5ACEMyVcw==", + "requires": { + "type-detect": "4.0.5" + } + }, + "deep-equal": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/deep-equal/-/deep-equal-1.0.1.tgz", + "integrity": "sha1-9dJgKStmDghO/0zbyfCK0yR0SLU=", + "dev": true }, "delayed-stream": { "version": "0.0.5", - "from": "delayed-stream@0.0.5", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz" + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz", + "integrity": "sha1-1LH0OpPoKW3+AmlPRoC8N6MTxz8=" + }, + "diff": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/diff/-/diff-1.0.7.tgz", + "integrity": "sha1-JLuwAcSn1VIhaefKvbLCgU7ZHPQ=", + "dev": true + }, + "director": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/director/-/director-1.2.7.tgz", + "integrity": "sha1-v9N0EHX9f7GlsuE2WMX0vsd3NvM=", + "dev": true }, "dtrace-provider": { "version": "0.4.0", - "from": "dtrace-provider@>=0.4.0 <0.5.0", "resolved": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.4.0.tgz", - "optional": true + "integrity": "sha1-C2e8HMd+eb+IuHrSBmT0p1POPyY=", + "optional": true, + "requires": { + "nan": "1.5.3" + } + }, + "esprima": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-1.0.4.tgz", + "integrity": "sha1-n1V+CPw7TSbs6d00+Pv0drYlha0=", + "dev": true + }, + "event-stream": { + "version": "0.5.3", + "resolved": "https://registry.npmjs.org/event-stream/-/event-stream-0.5.3.tgz", + "integrity": "sha1-t3uTCfcQet3+q2PwwOr9jbC9jBw=", + "dev": true, + "requires": { + "optimist": "0.2.8" + }, + "dependencies": { + "optimist": { + "version": "0.2.8", + "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.2.8.tgz", + "integrity": "sha1-6YGrfiaLRXlIWTtVZ0wJmoFcrDE=", + "dev": true, + "requires": { + "wordwrap": "0.0.3" + } + } + } + }, + "eventemitter2": { + "version": "0.4.14", + "resolved": "https://registry.npmjs.org/eventemitter2/-/eventemitter2-0.4.14.tgz", + "integrity": "sha1-j2G3XN4BKy6esoTUVFWDtWQ7Yas=", + "dev": true }, "events": { "version": "1.1.1", - "from": "events@>=1.1.1 <2.0.0", - "resolved": "https://registry.npmjs.org/events/-/events-1.1.1.tgz" + "resolved": "https://registry.npmjs.org/events/-/events-1.1.1.tgz", + "integrity": "sha1-nr23Y1rQmccNzEwqH1AEKI6L2SQ=" }, "express": { "version": "3.4.8", - "from": "express@>=3.4.8 <3.5.0", "resolved": "https://registry.npmjs.org/express/-/express-3.4.8.tgz", + "integrity": "sha1-qnqJht4HBTM39Lxe2aZFPZzI4uE=", + "requires": { + "buffer-crc32": "0.2.1", + "commander": "1.3.2", + "connect": "2.12.0", + "cookie": "0.1.0", + "cookie-signature": "1.0.1", + "debug": "0.8.1", + "fresh": "0.2.0", + "merge-descriptors": "0.0.1", + "methods": "0.1.0", + "mkdirp": "0.3.5", + "range-parser": "0.0.4", + "send": "0.1.4" + }, "dependencies": { "range-parser": { "version": "0.0.4", - "from": "range-parser@0.0.4", - "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-0.0.4.tgz" + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-0.0.4.tgz", + "integrity": "sha1-wEJ//vUcEKy6B4KkbJYC50T/Ygs=" } } }, + "eyes": { + "version": "0.1.8", + "resolved": "https://registry.npmjs.org/eyes/-/eyes-0.1.8.tgz", + "integrity": "sha1-Ys8SAjTGg3hdkCNIqADvPgzCC8A=", + "dev": true + }, + "faye-websocket": { + "version": "0.4.4", + "resolved": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.4.4.tgz", + "integrity": "sha1-wUxbO/FNdBf/v9mQwKdJXNnzN7w=", + "dev": true + }, + "findup-sync": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/findup-sync/-/findup-sync-0.1.3.tgz", + "integrity": "sha1-fz56l7gjksZTvwZYm9hRkOk8NoM=", + "dev": true, + "requires": { + "glob": "6.0.4", + "lodash": "4.17.4" + } + }, + "flatiron": { + "version": "0.4.3", + "resolved": "https://registry.npmjs.org/flatiron/-/flatiron-0.4.3.tgz", + "integrity": "sha1-JIz3mj2n19w3nioRySonGcu1QPY=", + "dev": true, + "requires": { + "broadway": "0.3.6", + "director": "1.2.7", + "optimist": "0.6.0", + "prompt": "0.2.14" + }, + "dependencies": { + "optimist": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.0.tgz", + "integrity": "sha1-aUJIJvNAX3nxQub8PZrljU27kgA=", + "dev": true, + "requires": { + "minimist": "0.0.8", + "wordwrap": "0.0.3" + } + } + } + }, + "forever": { + "version": "0.14.2", + "resolved": "https://registry.npmjs.org/forever/-/forever-0.14.2.tgz", + "integrity": "sha1-6Tsr2UxXBavBmxXlTDEz1puinGs=", + "dev": true, + "requires": { + "cliff": "0.1.10", + "colors": "0.6.2", + "flatiron": "0.4.3", + "forever-monitor": "1.5.2", + "nconf": "0.6.9", + "nssocket": "0.5.3", + "optimist": "0.6.1", + "timespan": "2.3.0", + "utile": "0.2.1", + "winston": "0.8.3" + } + }, "forever-agent": { "version": "0.2.0", - "from": "forever-agent@>=0.2.0 <0.3.0", - "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.2.0.tgz" + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.2.0.tgz", + "integrity": "sha1-4cJcetROCcOPIzh2x2/MJP+EOx8=" + }, + "forever-monitor": { + "version": "1.5.2", + "resolved": "https://registry.npmjs.org/forever-monitor/-/forever-monitor-1.5.2.tgz", + "integrity": "sha1-J5OI36k7CFNj1rKKgj7wpq7rNdc=", + "dev": true, + "requires": { + "broadway": "0.3.6", + "minimatch": "3.0.4", + "ps-tree": "0.0.3", + "utile": "0.2.1", + "watch": "0.13.0" + } }, "form-data": { "version": "0.0.10", - "from": "form-data@>=0.0.3 <0.1.0", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-0.0.10.tgz" + "resolved": "https://registry.npmjs.org/form-data/-/form-data-0.0.10.tgz", + "integrity": "sha1-2zRaU3jYau6x7V1VO4aawZLS9e0=", + "requires": { + "async": "0.2.10", + "combined-stream": "0.0.7", + "mime": "1.2.11" + } }, "formatio": { "version": "1.2.0", - "from": "formatio@1.2.0", - "resolved": "https://registry.npmjs.org/formatio/-/formatio-1.2.0.tgz" + "resolved": "https://registry.npmjs.org/formatio/-/formatio-1.2.0.tgz", + "integrity": "sha1-87IWfZBoxGmKjVH092CjmlTYGOs=", + "requires": { + "samsam": "1.3.0" + } }, "fresh": { "version": "0.2.0", - "from": "fresh@0.2.0", - "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.2.0.tgz" + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.2.0.tgz", + "integrity": "sha1-v9lALPPfEsSkwxDHn5mj3eE9NKc=" }, "fs-extra": { "version": "1.0.0", - "from": "fs-extra@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-1.0.0.tgz" + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-1.0.0.tgz", + "integrity": "sha1-zTzl9+fLYUWIP8rjGR6Yd/hYeVA=", + "requires": { + "graceful-fs": "4.1.11", + "jsonfile": "2.4.0", + "klaw": "1.3.1" + } + }, + "gaze": { + "version": "0.4.3", + "resolved": "https://registry.npmjs.org/gaze/-/gaze-0.4.3.tgz", + "integrity": "sha1-5Tj0/15P5kj0c6l+HrslPS3hJ7U=", + "dev": true, + "requires": { + "globule": "0.1.0" + } }, "get-func-name": { "version": "2.0.0", - "from": "get-func-name@>=2.0.0 <3.0.0", - "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz" + "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz", + "integrity": "sha1-6td0q+5y4gQJQzoGY2YCPdaIekE=" }, "gettemporaryfilepath": { "version": "0.0.1", - "from": "gettemporaryfilepath@0.0.1", - "resolved": "https://registry.npmjs.org/gettemporaryfilepath/-/gettemporaryfilepath-0.0.1.tgz" + "resolved": "https://registry.npmjs.org/gettemporaryfilepath/-/gettemporaryfilepath-0.0.1.tgz", + "integrity": "sha1-uKLHAUu1zUFTTpg7XKFgo3RwhGk=" }, "glob": { "version": "6.0.4", - "from": "glob@>=6.0.1 <7.0.0", "resolved": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz", - "optional": true + "integrity": "sha1-DwiGD2oVUSey+t1PnOJLGqtuTSI=", + "requires": { + "inflight": "1.0.6", + "inherits": "2.0.3", + "minimatch": "3.0.4", + "once": "1.4.0", + "path-is-absolute": "1.0.1" + } + }, + "globule": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/globule/-/globule-0.1.0.tgz", + "integrity": "sha1-2cjt3h2nnRJaFRt5UzuXhnY0auU=", + "dev": true, + "requires": { + "glob": "6.0.4", + "lodash": "4.17.4", + "minimatch": "3.0.4" + } }, "graceful-fs": { "version": "4.1.11", - "from": "graceful-fs@>=4.1.2 <5.0.0", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.11.tgz" + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.11.tgz", + "integrity": "sha1-Dovf5NHduIVNZOBOp8AOKgJuVlg=" + }, + "growl": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/growl/-/growl-1.7.0.tgz", + "integrity": "sha1-3i1mE20ALhErpw8/EMMc98NQsto=", + "dev": true + }, + "grunt": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/grunt/-/grunt-0.4.1.tgz", + "integrity": "sha1-1YkuVoCt2e0b796apjXPRrj0lyk=", + "dev": true, + "requires": { + "async": "0.2.10", + "coffee-script": "1.7.1", + "colors": "0.6.2", + "dateformat": "1.0.2-1.2.3", + "eventemitter2": "0.4.14", + "findup-sync": "0.1.3", + "glob": "6.0.4", + "hooker": "0.2.3", + "iconv-lite": "0.2.11", + "js-yaml": "2.0.5", + "lodash": "4.17.4", + "minimatch": "3.0.4", + "nopt": "1.0.10", + "rimraf": "2.2.8", + "underscore.string": "2.2.1", + "which": "1.0.9" + } + }, + "grunt-bunyan": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/grunt-bunyan/-/grunt-bunyan-0.5.0.tgz", + "integrity": "sha1-aCnXbgGZQ9owQTk2MaNuKsgpsWw=", + "dev": true, + "requires": { + "lodash": "4.17.4" + } + }, + "grunt-concurrent": { + "version": "0.4.2", + "resolved": "https://registry.npmjs.org/grunt-concurrent/-/grunt-concurrent-0.4.2.tgz", + "integrity": "sha1-Mf2Qbm4X2oTXgLLOZNn4QGX3PgY=", + "dev": true, + "requires": { + "async": "0.2.10", + "lpad": "0.1.0" + } + }, + "grunt-contrib-clean": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/grunt-contrib-clean/-/grunt-contrib-clean-0.5.0.tgz", + "integrity": "sha1-9T397ghJsce0Dp67umn0jExgecU=", + "dev": true, + "requires": { + "rimraf": "2.2.8" + } + }, + "grunt-contrib-coffee": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/grunt-contrib-coffee/-/grunt-contrib-coffee-0.7.0.tgz", + "integrity": "sha1-ixIme3TnM4sfKcW4txj7n4mYLxM=", + "dev": true, + "requires": { + "coffee-script": "1.7.1" + } + }, + "grunt-contrib-requirejs": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/grunt-contrib-requirejs/-/grunt-contrib-requirejs-0.4.1.tgz", + "integrity": "sha1-hiuhZxQbio82r1RE/qsycruM9L0=", + "dev": true, + "requires": { + "requirejs": "2.1.22" + } + }, + "grunt-contrib-watch": { + "version": "0.5.3", + "resolved": "https://registry.npmjs.org/grunt-contrib-watch/-/grunt-contrib-watch-0.5.3.tgz", + "integrity": "sha1-fZ61Rl1Qb6FPqspH5uh5CoLBye4=", + "dev": true, + "requires": { + "gaze": "0.4.3", + "tiny-lr": "0.0.4" + } + }, + "grunt-execute": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/grunt-execute/-/grunt-execute-0.2.2.tgz", + "integrity": "sha1-TpRf5XlZzA3neZCDtrQq7ZYWNQo=", + "dev": true + }, + "grunt-forever": { + "version": "0.4.7", + "resolved": "https://registry.npmjs.org/grunt-forever/-/grunt-forever-0.4.7.tgz", + "integrity": "sha1-dHDb4a2hFFAhZKTCoAOHXfj+EzA=", + "dev": true, + "requires": { + "forever": "0.14.2" + } + }, + "grunt-mocha-test": { + "version": "0.8.2", + "resolved": "https://registry.npmjs.org/grunt-mocha-test/-/grunt-mocha-test-0.8.2.tgz", + "integrity": "sha1-emGEuYhg0Phb3qrWvqob199bvus=", + "dev": true, + "requires": { + "mocha": "1.14.0" + } + }, + "grunt-nodemon": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/grunt-nodemon/-/grunt-nodemon-0.2.1.tgz", + "integrity": "sha1-G48kiVKSCX3IFNFgOpfo/sHJJPM=", + "dev": true, + "requires": { + "nodemon": "1.0.20" + } + }, + "has-color": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/has-color/-/has-color-0.1.7.tgz", + "integrity": "sha1-ZxRKUmDDT8PMpnfQQdr1L+e3iy8=", + "dev": true + }, + "has-flag": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-2.0.0.tgz", + "integrity": "sha1-6CB68cx7MNRGzHC3NLXovhj4jVE=" }, "hawk": { "version": "0.10.2", - "from": "hawk@>=0.10.2 <0.11.0", - "resolved": "https://registry.npmjs.org/hawk/-/hawk-0.10.2.tgz" + "resolved": "https://registry.npmjs.org/hawk/-/hawk-0.10.2.tgz", + "integrity": "sha1-mzYd7pWpMWQObVBOBWCaj8OsRdI=", + "requires": { + "boom": "0.3.8", + "cryptiles": "0.1.3", + "hoek": "0.7.6", + "sntp": "0.1.4" + } }, "heapdump": { "version": "0.3.9", - "from": "heapdump@>=0.3.2 <0.4.0", - "resolved": "https://registry.npmjs.org/heapdump/-/heapdump-0.3.9.tgz" + "resolved": "https://registry.npmjs.org/heapdump/-/heapdump-0.3.9.tgz", + "integrity": "sha1-A8dOsN9dZ74Jgug0KbqcnSs7f3g=" }, "hoek": { "version": "0.7.6", - "from": "hoek@>=0.7.0 <0.8.0", - "resolved": "https://registry.npmjs.org/hoek/-/hoek-0.7.6.tgz" + "resolved": "https://registry.npmjs.org/hoek/-/hoek-0.7.6.tgz", + "integrity": "sha1-YPvZBFV1Qc0rh5Wr8wihs3cOFVo=" + }, + "hooker": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/hooker/-/hooker-0.2.3.tgz", + "integrity": "sha1-uDT3I8xKJCqmWWNFnfbZhMXT2Vk=", + "dev": true + }, + "i": { + "version": "0.3.6", + "resolved": "https://registry.npmjs.org/i/-/i-0.3.6.tgz", + "integrity": "sha1-2WyScyB28HJxG2sQ/X1PZa2O4j0=", + "dev": true + }, + "iconv-lite": { + "version": "0.2.11", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.2.11.tgz", + "integrity": "sha1-HOYKOleGSiktEyH/RgnKS7llrcg=", + "dev": true }, "ieee754": { "version": "1.1.8", - "from": "ieee754@>=1.1.4 <2.0.0", - "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.8.tgz" + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.8.tgz", + "integrity": "sha1-vjPUCsEO8ZJnAfbwii2G+/0a0+Q=" }, "inflight": { "version": "1.0.6", - "from": "inflight@>=1.0.4 <2.0.0", "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "optional": true + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "requires": { + "once": "1.4.0", + "wrappy": "1.0.2" + } }, "inherits": { "version": "2.0.3", - "from": "inherits@>=2.0.1 <2.1.0", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=" + }, + "ini": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.5.tgz", + "integrity": "sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw==", + "dev": true }, "isarray": { "version": "1.0.0", - "from": "isarray@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz" + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" + }, + "isstream": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", + "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=", + "dev": true + }, + "jade": { + "version": "0.26.3", + "resolved": "https://registry.npmjs.org/jade/-/jade-0.26.3.tgz", + "integrity": "sha1-jxDXl32NefL2/4YqgbBRPMslaGw=", + "dev": true, + "requires": { + "commander": "1.3.2", + "mkdirp": "0.3.5" + } }, "jmespath": { "version": "0.15.0", - "from": "jmespath@0.15.0", - "resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.15.0.tgz" + "resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.15.0.tgz", + "integrity": "sha1-o/Iiqarp+Wb10nx5ZRDigJF2Qhc=" + }, + "js-yaml": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-2.0.5.tgz", + "integrity": "sha1-olrmUJmZ6X3yeMZxnaEb0Gh3Q6g=", + "dev": true, + "requires": { + "argparse": "0.1.16", + "esprima": "1.0.4" + } }, "json-stringify-safe": { "version": "3.0.0", - "from": "json-stringify-safe@>=3.0.0 <3.1.0", - "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-3.0.0.tgz" + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-3.0.0.tgz", + "integrity": "sha1-nbew5TDH8onF6MhDKvGRwv91pbM=" }, "jsonfile": { "version": "2.4.0", - "from": "jsonfile@>=2.1.0 <3.0.0", - "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-2.4.0.tgz" + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-2.4.0.tgz", + "integrity": "sha1-NzaitCi4e72gzIO1P6PWM6NcKug=", + "requires": { + "graceful-fs": "4.1.11" + } }, "just-extend": { - "version": "1.1.22", - "from": "just-extend@>=1.1.22 <2.0.0", - "resolved": "https://registry.npmjs.org/just-extend/-/just-extend-1.1.22.tgz" + "version": "1.1.27", + "resolved": "https://registry.npmjs.org/just-extend/-/just-extend-1.1.27.tgz", + "integrity": "sha512-mJVp13Ix6gFo3SBAy9U/kL+oeZqzlYYYLQBwXVBlVzIsZwBqGREnOro24oC/8s8aox+rJhtZ2DiQof++IrkA+g==" }, "keypress": { "version": "0.1.0", - "from": "keypress@>=0.1.0 <0.2.0", - "resolved": "https://registry.npmjs.org/keypress/-/keypress-0.1.0.tgz" + "resolved": "https://registry.npmjs.org/keypress/-/keypress-0.1.0.tgz", + "integrity": "sha1-SjGI1CkbZrT2XtuZ+AaqmuKTWSo=" }, "klaw": { "version": "1.3.1", - "from": "klaw@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/klaw/-/klaw-1.3.1.tgz" + "resolved": "https://registry.npmjs.org/klaw/-/klaw-1.3.1.tgz", + "integrity": "sha1-QIhDO0azsbolnXh4XY6W9zugJDk=", + "requires": { + "graceful-fs": "4.1.11" + } }, "knox": { "version": "0.9.2", - "from": "knox@~0.9.1", "resolved": "https://registry.npmjs.org/knox/-/knox-0.9.2.tgz", + "integrity": "sha1-NzZZNmniTwJP2vcjtqHcSv2DmnE=", + "requires": { + "debug": "1.0.4", + "mime": "1.3.4", + "once": "1.4.0", + "stream-counter": "1.0.0", + "xml2js": "0.4.17" + }, "dependencies": { "debug": { "version": "1.0.4", - "from": "debug@^1.0.2", "resolved": "https://registry.npmjs.org/debug/-/debug-1.0.4.tgz", + "integrity": "sha1-W5wla9VLbsAigxdvqKDt5tFUy/g=", + "requires": { + "ms": "0.6.2" + }, "dependencies": { "ms": { "version": "0.6.2", - "from": "ms@0.6.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-0.6.2.tgz" + "resolved": "https://registry.npmjs.org/ms/-/ms-0.6.2.tgz", + "integrity": "sha1-2JwhJMb9wTU9Zai3e/GqxLGTcIw=" } } }, "mime": { "version": "1.3.4", - "from": "mime@*", - "resolved": "https://registry.npmjs.org/mime/-/mime-1.3.4.tgz" + "resolved": "https://registry.npmjs.org/mime/-/mime-1.3.4.tgz", + "integrity": "sha1-EV+eO2s9rylZmDyzjxSaLUDrXVM=" }, "once": { "version": "1.4.0", - "from": "once@^1.3.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "requires": { + "wrappy": "1.0.2" + }, "dependencies": { "wrappy": { "version": "1.0.2", - "from": "wrappy@1", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" } } }, "stream-counter": { "version": "1.0.0", - "from": "stream-counter@^1.0.0", - "resolved": "https://registry.npmjs.org/stream-counter/-/stream-counter-1.0.0.tgz" + "resolved": "https://registry.npmjs.org/stream-counter/-/stream-counter-1.0.0.tgz", + "integrity": "sha1-kc8lac5NxQYf6816yyY5SloRR1E=" }, "xml2js": { "version": "0.4.17", - "from": "xml2js@^0.4.4", "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.17.tgz", + "integrity": "sha1-F76T6q4/O3eTWceVtBlwWogX6Gg=", + "requires": { + "sax": "1.2.2", + "xmlbuilder": "4.2.1" + }, "dependencies": { "sax": { "version": "1.2.2", - "from": "sax@>=0.6.0", - "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.2.tgz" + "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.2.tgz", + "integrity": "sha1-/YYxojvHgmvvXYcb24c3jJVkeCg=" }, "xmlbuilder": { "version": "4.2.1", - "from": "xmlbuilder@^4.1.0", "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-4.2.1.tgz", + "integrity": "sha1-qlijBBoGb5DqoWwvU4n/GfP0YaU=", + "requires": { + "lodash": "4.17.4" + }, "dependencies": { "lodash": { "version": "4.17.4", - "from": "lodash@^4.0.0", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz" + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz", + "integrity": "sha1-eCA6TRwyiuHYbcpkYONptX9AVa4=" } } } @@ -389,292 +1028,557 @@ } } }, + "lazy": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/lazy/-/lazy-1.0.11.tgz", + "integrity": "sha1-2qBoIGKCVCwIgojpdcKXwa53tpA=", + "dev": true + }, "lodash": { "version": "4.17.4", - "from": "lodash@>=4.0.0 <5.0.0", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz" + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz", + "integrity": "sha1-eCA6TRwyiuHYbcpkYONptX9AVa4=" + }, + "lodash.get": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz", + "integrity": "sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk=" }, "logger-sharelatex": { - "version": "1.0.0", - "from": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.1.0", - "resolved": "git+https://github.com/sharelatex/logger-sharelatex.git#5a3ea8e655f23e76a77bbc207c012d3fc944c8d8", + "version": "git+https://github.com/sharelatex/logger-sharelatex.git#5a3ea8e655f23e76a77bbc207c012d3fc944c8d8", + "requires": { + "bunyan": "1.3.6", + "coffee-script": "1.4.0", + "raven": "0.8.1" + }, "dependencies": { "coffee-script": { "version": "1.4.0", - "from": "coffee-script@1.4.0", - "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.4.0.tgz" + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.4.0.tgz", + "integrity": "sha1-XjvIqsJsAajie/EHcixWVfWtfTY=" } } }, "lolex": { - "version": "2.1.2", - "from": "lolex@>=2.1.2 <3.0.0", - "resolved": "https://registry.npmjs.org/lolex/-/lolex-2.1.2.tgz" + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/lolex/-/lolex-2.3.1.tgz", + "integrity": "sha512-mQuW55GhduF3ppo+ZRUTz1PRjEh1hS5BbqU7d8D0ez2OKxHDod7StPPeAVKisZR5aLkHZjdGWSL42LSONUJsZw==" + }, + "lpad": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/lpad/-/lpad-0.1.0.tgz", + "integrity": "sha1-5MYMKROTIcWXDeSTtJauDXdM0qc=", + "dev": true }, "lsmod": { "version": "0.0.3", - "from": "lsmod@>=0.0.3 <0.1.0", - "resolved": "https://registry.npmjs.org/lsmod/-/lsmod-0.0.3.tgz" + "resolved": "https://registry.npmjs.org/lsmod/-/lsmod-0.0.3.tgz", + "integrity": "sha1-F+E9ThrpF1DqVlNUjNiecUetAkQ=" }, "lynx": { "version": "0.1.1", - "from": "lynx@>=0.1.1 <0.2.0", - "resolved": "https://registry.npmjs.org/lynx/-/lynx-0.1.1.tgz" + "resolved": "https://registry.npmjs.org/lynx/-/lynx-0.1.1.tgz", + "integrity": "sha1-Mxjc7xaQi4KG6Bisz9sxzXQkj50=", + "requires": { + "mersenne": "0.0.4", + "statsd-parser": "0.0.4" + } }, "merge-descriptors": { "version": "0.0.1", - "from": "merge-descriptors@0.0.1", - "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-0.0.1.tgz" + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-0.0.1.tgz", + "integrity": "sha1-L/CYDJJM+B0LXR+2ARd8uLtWwNA=" }, "mersenne": { "version": "0.0.4", - "from": "mersenne@>=0.0.3 <0.1.0", - "resolved": "https://registry.npmjs.org/mersenne/-/mersenne-0.0.4.tgz" + "resolved": "https://registry.npmjs.org/mersenne/-/mersenne-0.0.4.tgz", + "integrity": "sha1-QB/ex+whzbngPNPTAhOY2iGycIU=" }, "methods": { "version": "0.1.0", - "from": "methods@0.1.0", - "resolved": "https://registry.npmjs.org/methods/-/methods-0.1.0.tgz" + "resolved": "https://registry.npmjs.org/methods/-/methods-0.1.0.tgz", + "integrity": "sha1-M11Cnu/SG3us8unJIqjSvRSjDk8=" }, "metrics-sharelatex": { - "version": "1.3.0", - "from": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.3.0", - "resolved": "git+https://github.com/sharelatex/metrics-sharelatex.git#080c4aeb696edcd5d6d86f202f2c528f0661d7a6", + "version": "git+https://github.com/sharelatex/metrics-sharelatex.git#080c4aeb696edcd5d6d86f202f2c528f0661d7a6", + "requires": { + "coffee-script": "1.6.0", + "lynx": "0.1.1", + "underscore": "1.6.0" + }, "dependencies": { "coffee-script": { "version": "1.6.0", - "from": "coffee-script@1.6.0", - "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz" + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz", + "integrity": "sha1-gIs5bhEPU9AhoZpO8fZb4OjjX6M=" }, "underscore": { "version": "1.6.0", - "from": "underscore@>=1.6.0 <1.7.0", - "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz" + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz", + "integrity": "sha1-izixDKze9jM3uLJOT/htRa6lKag=" } } }, "mime": { "version": "1.2.11", - "from": "mime@>=1.2.9 <1.3.0", - "resolved": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz" + "resolved": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz", + "integrity": "sha1-WCA+7Ybjpe8XrtK32evUfwpg3RA=" }, "minimatch": { "version": "3.0.4", - "from": "minimatch@>=2.0.0 <3.0.0||>=3.0.0 <4.0.0", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "optional": true + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "requires": { + "brace-expansion": "1.1.8" + } }, "minimist": { "version": "0.0.8", - "from": "minimist@0.0.8", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz" + "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", + "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=" }, "mkdirp": { "version": "0.3.5", - "from": "mkdirp@>=0.3.5 <0.4.0", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.5.tgz" + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.5.tgz", + "integrity": "sha1-3j5fiWHIjHh+4TaN+EmsRBPsqNc=" + }, + "mocha": { + "version": "1.14.0", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-1.14.0.tgz", + "integrity": "sha1-cT223FAAGRqdA1gZXQkIeQ7LYVc=", + "dev": true, + "requires": { + "commander": "1.3.2", + "debug": "0.8.1", + "diff": "1.0.7", + "glob": "6.0.4", + "growl": "1.7.0", + "jade": "0.26.3", + "mkdirp": "0.3.5" + } }, "multiparty": { "version": "2.2.0", - "from": "multiparty@2.2.0", - "resolved": "https://registry.npmjs.org/multiparty/-/multiparty-2.2.0.tgz" + "resolved": "https://registry.npmjs.org/multiparty/-/multiparty-2.2.0.tgz", + "integrity": "sha1-pWfCrwAK0i3I8qZT2Rl4rh9TFvQ=", + "requires": { + "readable-stream": "1.1.14", + "stream-counter": "0.2.0" + } + }, + "mute-stream": { + "version": "0.0.7", + "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz", + "integrity": "sha1-MHXOk7whuPq0PhvE2n6BFe0ee6s=", + "dev": true }, "mv": { "version": "2.1.1", - "from": "mv@>=2.0.0 <3.0.0", "resolved": "https://registry.npmjs.org/mv/-/mv-2.1.1.tgz", + "integrity": "sha1-rmzg1vbV4KT32JN5jQPB6pVZtqI=", "optional": true, + "requires": { + "mkdirp": "0.5.1", + "ncp": "2.0.0", + "rimraf": "2.4.5" + }, "dependencies": { "mkdirp": { "version": "0.5.1", - "from": "mkdirp@>=0.5.1 <0.6.0", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", - "optional": true + "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", + "optional": true, + "requires": { + "minimist": "0.0.8" + } }, "rimraf": { "version": "2.4.5", - "from": "rimraf@>=2.4.0 <2.5.0", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.5.tgz", - "optional": true + "integrity": "sha1-7nEM5dk6j9uFb7Xqj/Di11k0sto=", + "optional": true, + "requires": { + "glob": "6.0.4" + } } } }, "nan": { "version": "1.5.3", - "from": "nan@>=1.5.1 <1.6.0", "resolved": "https://registry.npmjs.org/nan/-/nan-1.5.3.tgz", + "integrity": "sha1-TNDswTO3sHAKSSpkat1CeuijGOs=", "optional": true }, - "native-promise-only": { - "version": "0.8.1", - "from": "native-promise-only@>=0.8.1 <0.9.0", - "resolved": "https://registry.npmjs.org/native-promise-only/-/native-promise-only-0.8.1.tgz" + "nconf": { + "version": "0.6.9", + "resolved": "https://registry.npmjs.org/nconf/-/nconf-0.6.9.tgz", + "integrity": "sha1-lXDvFe1vmuays8jV5xtm0xk81mE=", + "dev": true, + "requires": { + "async": "0.2.10", + "ini": "1.3.5", + "optimist": "0.6.0" + }, + "dependencies": { + "optimist": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.0.tgz", + "integrity": "sha1-aUJIJvNAX3nxQub8PZrljU27kgA=", + "dev": true, + "requires": { + "minimist": "0.0.8", + "wordwrap": "0.0.3" + } + } + } }, "ncp": { "version": "2.0.0", - "from": "ncp@>=2.0.0 <2.1.0", "resolved": "https://registry.npmjs.org/ncp/-/ncp-2.0.0.tgz", - "optional": true + "integrity": "sha1-GVoh1sRuNh0vsSgbo4uR6d9727M=" }, "negotiator": { "version": "0.3.0", - "from": "negotiator@0.3.0", - "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.3.0.tgz" + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.3.0.tgz", + "integrity": "sha1-cG1pLv7d9XTVfqn7GriaT6fuj2A=" }, "nise": { - "version": "1.0.1", - "from": "nise@>=1.0.1 <2.0.0", - "resolved": "https://registry.npmjs.org/nise/-/nise-1.0.1.tgz", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/nise/-/nise-1.2.0.tgz", + "integrity": "sha512-q9jXh3UNsMV28KeqI43ILz5+c3l+RiNW8mhurEwCKckuHQbL+hTJIKKTiUlCPKlgQ/OukFvSnKB/Jk3+sFbkGA==", + "requires": { + "formatio": "1.2.0", + "just-extend": "1.1.27", + "lolex": "1.6.0", + "path-to-regexp": "1.7.0", + "text-encoding": "0.6.4" + }, "dependencies": { "lolex": { "version": "1.6.0", - "from": "lolex@>=1.6.0 <2.0.0", - "resolved": "https://registry.npmjs.org/lolex/-/lolex-1.6.0.tgz" + "resolved": "https://registry.npmjs.org/lolex/-/lolex-1.6.0.tgz", + "integrity": "sha1-OpoCg0UqR9dDnnJzG54H1zhuSfY=" } } }, "node-transloadit": { "version": "0.0.4", - "from": "node-transloadit@0.0.4", "resolved": "https://registry.npmjs.org/node-transloadit/-/node-transloadit-0.0.4.tgz", + "integrity": "sha1-4ZoHheON94NblO2AANHjXmg7zsE=", + "requires": { + "request": "2.16.6", + "underscore": "1.2.1" + }, "dependencies": { "qs": { "version": "0.5.6", - "from": "qs@>=0.5.4 <0.6.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-0.5.6.tgz" + "resolved": "https://registry.npmjs.org/qs/-/qs-0.5.6.tgz", + "integrity": "sha1-MbGtBYVnZRxSaSFQa5qHk5EaA4Q=" }, "request": { "version": "2.16.6", - "from": "request@>=2.16.6 <2.17.0", - "resolved": "https://registry.npmjs.org/request/-/request-2.16.6.tgz" + "resolved": "https://registry.npmjs.org/request/-/request-2.16.6.tgz", + "integrity": "sha1-hy/kRa5y3iZrN4edatfclI+gHK0=", + "requires": { + "aws-sign": "0.2.0", + "cookie-jar": "0.2.0", + "forever-agent": "0.2.0", + "form-data": "0.0.10", + "hawk": "0.10.2", + "json-stringify-safe": "3.0.0", + "mime": "1.2.11", + "node-uuid": "1.4.8", + "oauth-sign": "0.2.0", + "qs": "0.5.6", + "tunnel-agent": "0.2.0" + } }, "underscore": { "version": "1.2.1", - "from": "underscore@1.2.1", - "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.2.1.tgz" + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.2.1.tgz", + "integrity": "sha1-/FxrB2VnPZKi1KyLTcCqiHAuK9Q=" } } }, "node-uuid": { "version": "1.4.8", - "from": "node-uuid@>=1.4.1 <1.5.0", - "resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.8.tgz" + "resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.8.tgz", + "integrity": "sha1-sEDrCSOWivq/jTL7HxfxFn/auQc=" + }, + "nodemon": { + "version": "1.0.20", + "resolved": "https://registry.npmjs.org/nodemon/-/nodemon-1.0.20.tgz", + "integrity": "sha1-vBOKNwaMt426UIhbYkl6/f7u3aQ=", + "dev": true, + "requires": { + "minimatch": "3.0.4", + "ps-tree": "0.0.3", + "update-notifier": "0.1.10" + } + }, + "nopt": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/nopt/-/nopt-1.0.10.tgz", + "integrity": "sha1-bd0hvSoxQXuScn3Vhfim83YI6+4=", + "dev": true, + "requires": { + "abbrev": "1.1.1" + } + }, + "noptify": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/noptify/-/noptify-0.0.3.tgz", + "integrity": "sha1-WPZUpz2XU98MUdlobckhBKZ/S7s=", + "dev": true, + "requires": { + "nopt": "2.0.0" + }, + "dependencies": { + "nopt": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/nopt/-/nopt-2.0.0.tgz", + "integrity": "sha1-ynQW8gpeP5w7hhgPlilfo9C1Lg0=", + "dev": true, + "requires": { + "abbrev": "1.1.1" + } + } + } + }, + "nssocket": { + "version": "0.5.3", + "resolved": "https://registry.npmjs.org/nssocket/-/nssocket-0.5.3.tgz", + "integrity": "sha1-iDyi7GBfXtZKTVGQsmJUAZKPj40=", + "dev": true, + "requires": { + "eventemitter2": "0.4.14", + "lazy": "1.0.11" + } }, "oauth-sign": { "version": "0.2.0", - "from": "oauth-sign@>=0.2.0 <0.3.0", - "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.2.0.tgz" + "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.2.0.tgz", + "integrity": "sha1-oOahcV2u0GLzIrYit/5a/RA1tuI=" + }, + "object-assign": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-2.1.1.tgz", + "integrity": "sha1-Q8NuXVaf+OSBbE76i+AtJpZ8GKo=", + "dev": true }, "once": { "version": "1.4.0", - "from": "once@>=1.3.0 <2.0.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz" + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "requires": { + "wrappy": "1.0.2" + } + }, + "optimist": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz", + "integrity": "sha1-2j6nRob6IaGaERwybpDrFaAZZoY=", + "dev": true, + "requires": { + "minimist": "0.0.8", + "wordwrap": "0.0.3" + } + }, + "os-homedir": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz", + "integrity": "sha1-/7xJiDNuDoM94MFox+8VISGqf7M=", + "dev": true + }, + "os-tmpdir": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", + "integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=", + "dev": true + }, + "osenv": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/osenv/-/osenv-0.1.4.tgz", + "integrity": "sha1-Qv5tWVPfBsgGS+bxdsPQWqqjRkQ=", + "dev": true, + "requires": { + "os-homedir": "1.0.2", + "os-tmpdir": "1.0.2" + } }, "path-is-absolute": { "version": "1.0.1", - "from": "path-is-absolute@>=1.0.0 <2.0.0", "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "optional": true + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=" }, "path-to-regexp": { "version": "1.7.0", - "from": "path-to-regexp@>=1.7.0 <2.0.0", "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.7.0.tgz", + "integrity": "sha1-Wf3g9DW62suhA6hOnTvGTpa5k30=", + "requires": { + "isarray": "0.0.1" + }, "dependencies": { "isarray": { "version": "0.0.1", - "from": "isarray@0.0.1", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" } } }, "pathval": { "version": "1.1.0", - "from": "pathval@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.0.tgz" + "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.0.tgz", + "integrity": "sha1-uULm1L3mUwBe9rcTYd74cn0GReA=" }, "pause": { "version": "0.0.1", - "from": "pause@0.0.1", - "resolved": "https://registry.npmjs.org/pause/-/pause-0.0.1.tgz" + "resolved": "https://registry.npmjs.org/pause/-/pause-0.0.1.tgz", + "integrity": "sha1-HUCLP9t2kjuVQ9lvtMnf1TXZy10=" + }, + "pkginfo": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/pkginfo/-/pkginfo-0.3.1.tgz", + "integrity": "sha1-Wyn2qB9wcXFC4J52W76rl7T4HiE=", + "dev": true }, "pngcrush": { "version": "0.0.3", - "from": "pngcrush@0.0.3", - "resolved": "https://registry.npmjs.org/pngcrush/-/pngcrush-0.0.3.tgz" + "resolved": "https://registry.npmjs.org/pngcrush/-/pngcrush-0.0.3.tgz", + "integrity": "sha1-v2dW6s2h+rNJwHdo6AXMEA0o+Tc=", + "requires": { + "gettemporaryfilepath": "0.0.1" + } }, - "process-nextick-args": { - "version": "1.0.7", - "from": "process-nextick-args@>=1.0.6 <1.1.0", - "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz" + "prompt": { + "version": "0.2.14", + "resolved": "https://registry.npmjs.org/prompt/-/prompt-0.2.14.tgz", + "integrity": "sha1-V3VPZPVD/XsIRXB8gY7OYY8F/9w=", + "dev": true, + "requires": { + "pkginfo": "0.3.1", + "read": "1.0.7", + "revalidator": "0.1.8", + "utile": "0.2.1", + "winston": "0.8.3" + } + }, + "ps-tree": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/ps-tree/-/ps-tree-0.0.3.tgz", + "integrity": "sha1-2/jXUqf+Ivp9WGNWiUmWEOknbdw=", + "dev": true, + "requires": { + "event-stream": "0.5.3" + } }, "punycode": { "version": "1.3.2", - "from": "punycode@1.3.2", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz" + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", + "integrity": "sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0=" }, "qs": { "version": "0.6.6", - "from": "qs@0.6.6", - "resolved": "https://registry.npmjs.org/qs/-/qs-0.6.6.tgz" + "resolved": "https://registry.npmjs.org/qs/-/qs-0.6.6.tgz", + "integrity": "sha1-bgFQmP9RlouKPIGQAdXyyJvEsQc=" }, "querystring": { "version": "0.2.0", - "from": "querystring@0.2.0", - "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz" + "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", + "integrity": "sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=" }, "range-parser": { "version": "1.2.0", - "from": "range-parser@>=1.0.2 <2.0.0", - "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz" + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz", + "integrity": "sha1-9JvmtIeJTdxA3MlKMi9hEJLgDV4=" }, "raven": { "version": "0.8.1", - "from": "raven@>=0.8.0 <0.9.0", - "resolved": "https://registry.npmjs.org/raven/-/raven-0.8.1.tgz" + "resolved": "https://registry.npmjs.org/raven/-/raven-0.8.1.tgz", + "integrity": "sha1-UVk7tlnHcnjc00gitlq+d7dRuvU=", + "requires": { + "cookie": "0.1.0", + "lsmod": "0.0.3", + "node-uuid": "1.4.8", + "stack-trace": "0.0.7" + } }, "raw-body": { "version": "1.1.2", - "from": "raw-body@1.1.2", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-1.1.2.tgz" + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-1.1.2.tgz", + "integrity": "sha1-x0swBN6l3v0WlhcRBqx0DsMdYr4=", + "requires": { + "bytes": "0.2.1" + } + }, + "read": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/read/-/read-1.0.7.tgz", + "integrity": "sha1-s9oZvQUkMal2cdRKQmNK33ELQMQ=", + "dev": true, + "requires": { + "mute-stream": "0.0.7" + } }, "readable-stream": { "version": "1.1.14", - "from": "readable-stream@>=1.1.9 <1.2.0", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.14.tgz", + "integrity": "sha1-fPTFTvZI44EwhMY23SB54WbAgdk=", + "requires": { + "core-util-is": "1.0.2", + "inherits": "2.0.3", + "isarray": "0.0.1", + "string_decoder": "0.10.31" + }, "dependencies": { "isarray": { "version": "0.0.1", - "from": "isarray@0.0.1", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" } } }, "recluster": { "version": "0.3.7", - "from": "recluster@>=0.3.7 <0.4.0", - "resolved": "https://registry.npmjs.org/recluster/-/recluster-0.3.7.tgz" + "resolved": "https://registry.npmjs.org/recluster/-/recluster-0.3.7.tgz", + "integrity": "sha1-aKRx3ZC2obl3ZjTPdpZAWutWeJU=" }, "request": { "version": "2.14.0", - "from": "request@2.14.0", "resolved": "https://registry.npmjs.org/request/-/request-2.14.0.tgz", + "integrity": "sha1-DYrLsLFMGrguAAt9OB+oyA0afYg=", + "requires": { + "form-data": "0.0.7", + "mime": "1.2.9" + }, "dependencies": { "form-data": { "version": "0.0.7", - "from": "form-data@~0.0.3", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-0.0.7.tgz", + "integrity": "sha1-chEYKiaiZs45cQ3IvEqBtwQIWb4=", + "requires": { + "async": "0.1.22", + "combined-stream": "0.0.4", + "mime": "1.2.9" + }, "dependencies": { "async": { "version": "0.1.22", - "from": "async@~0.1.9" + "resolved": "https://registry.npmjs.org/async/-/async-0.1.22.tgz", + "integrity": "sha1-D8GqoIig4+8Ovi2IMbqw3PiEUGE=" }, "combined-stream": { "version": "0.0.4", - "from": "combined-stream@~0.0.4", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-0.0.4.tgz", + "integrity": "sha1-LRpDNH2+lRWkonlnMuW4hHOECyI=", + "requires": { + "delayed-stream": "0.0.5" + }, "dependencies": { "delayed-stream": { "version": "0.0.5", - "from": "delayed-stream@0.0.5" + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz", + "integrity": "sha1-1LH0OpPoKW3+AmlPRoC8N6MTxz8=" } } } @@ -682,173 +1586,352 @@ }, "mime": { "version": "1.2.9", - "from": "mime@~1.2.7" + "resolved": "https://registry.npmjs.org/mime/-/mime-1.2.9.tgz", + "integrity": "sha1-AJzUCGe9Nd5SGzuWbwTi+NTRPQk=" } } }, "require-like": { "version": "0.1.2", - "from": "require-like@0.1.2", - "resolved": "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz" + "resolved": "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz", + "integrity": "sha1-rW8wwTvs15cBDEaK+ndcDAprR/o=" + }, + "requirejs": { + "version": "2.1.22", + "resolved": "https://registry.npmjs.org/requirejs/-/requirejs-2.1.22.tgz", + "integrity": "sha1-3Xj9LTQYDA1ixyS1uK68BmTgNm8=", + "dev": true }, "response": { "version": "0.14.0", - "from": "response@0.14.0", - "resolved": "https://registry.npmjs.org/response/-/response-0.14.0.tgz" + "resolved": "https://registry.npmjs.org/response/-/response-0.14.0.tgz", + "integrity": "sha1-BmNS/z5rAm0EdYCUB2Y7Rob9JpY=", + "requires": { + "best-encoding": "0.1.1", + "bl": "0.7.0", + "caseless": "0.3.0", + "mime": "1.2.11" + } + }, + "revalidator": { + "version": "0.1.8", + "resolved": "https://registry.npmjs.org/revalidator/-/revalidator-0.1.8.tgz", + "integrity": "sha1-/s5hv6DBtSoga9axgZgYS91SOjs=", + "dev": true }, "rimraf": { "version": "2.2.8", - "from": "rimraf@2.2.8", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.2.8.tgz" - }, - "safe-buffer": { - "version": "5.1.1", - "from": "safe-buffer@>=5.0.1 <6.0.0", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.1.tgz" + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.2.8.tgz", + "integrity": "sha1-5Dm+Kq7jJzIZUnMPmaiSnk/FBYI=" }, "safe-json-stringify": { "version": "1.0.4", - "from": "safe-json-stringify@>=1.0.0 <2.0.0", "resolved": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.0.4.tgz", + "integrity": "sha1-gaCY9Efku8P/MxKiQ1IbwGDvWRE=", "optional": true }, "samsam": { - "version": "1.2.1", - "from": "samsam@>=1.1.3 <2.0.0", - "resolved": "https://registry.npmjs.org/samsam/-/samsam-1.2.1.tgz" + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/samsam/-/samsam-1.3.0.tgz", + "integrity": "sha512-1HwIYD/8UlOtFS3QO3w7ey+SdSDFE4HRNLZoZRYVQefrOY3l17epswImeB1ijgJFQJodIaHcwkp3r/myBjFVbg==" }, "sandboxed-module": { "version": "2.0.3", - "from": "sandboxed-module@latest", "resolved": "https://registry.npmjs.org/sandboxed-module/-/sandboxed-module-2.0.3.tgz", + "integrity": "sha1-x+VFkzm7y6KMUwPusz9ug4e/upY=", + "requires": { + "require-like": "0.1.2", + "stack-trace": "0.0.9" + }, "dependencies": { "stack-trace": { "version": "0.0.9", - "from": "stack-trace@0.0.9", - "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.9.tgz" + "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.9.tgz", + "integrity": "sha1-qPbq7KkGdMMz58Q5U/J1tFFRBpU=" } } }, "sax": { "version": "1.2.1", - "from": "sax@1.2.1", - "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz" + "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz", + "integrity": "sha1-e45lYZCyKOgaZq6nSEgNgozS03o=" + }, + "semver": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-2.3.2.tgz", + "integrity": "sha1-uYSPJdbPNjMwc+ye+IVtQvEjPlI=", + "dev": true }, "send": { "version": "0.1.4", - "from": "send@0.1.4", "resolved": "https://registry.npmjs.org/send/-/send-0.1.4.tgz", + "integrity": "sha1-vnDY0b4B3mGCGvE3gLUDRaT3Gr0=", + "requires": { + "debug": "0.8.1", + "fresh": "0.2.0", + "mime": "1.2.11", + "range-parser": "0.0.4" + }, "dependencies": { "range-parser": { "version": "0.0.4", - "from": "range-parser@0.0.4", - "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-0.0.4.tgz" + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-0.0.4.tgz", + "integrity": "sha1-wEJ//vUcEKy6B4KkbJYC50T/Ygs=" } } }, "settings-sharelatex": { - "version": "1.0.0", - "from": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.0.0", - "resolved": "git+https://github.com/sharelatex/settings-sharelatex.git#cbc5e41c1dbe6789721a14b3fdae05bf22546559", + "version": "git+https://github.com/sharelatex/settings-sharelatex.git#cbc5e41c1dbe6789721a14b3fdae05bf22546559", + "requires": { + "coffee-script": "1.6.0" + }, "dependencies": { "coffee-script": { "version": "1.6.0", - "from": "coffee-script@1.6.0", - "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz" + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz", + "integrity": "sha1-gIs5bhEPU9AhoZpO8fZb4OjjX6M=" } } }, "sinon": { - "version": "3.2.1", - "from": "sinon@latest", - "resolved": "https://registry.npmjs.org/sinon/-/sinon-3.2.1.tgz", + "version": "4.1.4", + "resolved": "https://registry.npmjs.org/sinon/-/sinon-4.1.4.tgz", + "integrity": "sha512-ISJZDPf8RS2z4/LAgy1gIimAvF9zg9C9ClQhLTWYWm4HBZjo1WELXlVfkudjdYeN+GtQ2uVBe52m0npIV0gDow==", + "requires": { + "diff": "3.4.0", + "formatio": "1.2.0", + "lodash.get": "4.4.2", + "lolex": "2.3.1", + "nise": "1.2.0", + "supports-color": "4.5.0", + "type-detect": "4.0.5" + }, "dependencies": { "diff": { - "version": "3.3.1", - "from": "diff@>=3.1.0 <4.0.0", - "resolved": "https://registry.npmjs.org/diff/-/diff-3.3.1.tgz" + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-3.4.0.tgz", + "integrity": "sha512-QpVuMTEoJMF7cKzi6bvWhRulU1fZqZnvyVQgNhPaxxuTYwyjn/j1v9falseQ/uXWwPnO56RBfwtg4h/EQXmucA==" } } }, "sntp": { "version": "0.1.4", - "from": "sntp@>=0.1.0 <0.2.0", - "resolved": "https://registry.npmjs.org/sntp/-/sntp-0.1.4.tgz" + "resolved": "https://registry.npmjs.org/sntp/-/sntp-0.1.4.tgz", + "integrity": "sha1-XvSBuVGnspr/30r9fyaDj8ESD4Q=", + "requires": { + "hoek": "0.7.6" + } + }, + "sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=", + "dev": true }, "stack-trace": { "version": "0.0.7", - "from": "stack-trace@0.0.7", - "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.7.tgz" + "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.7.tgz", + "integrity": "sha1-xy4Il0T8Nln1CM3ONiGvVjTsD/8=" }, "statsd-parser": { "version": "0.0.4", - "from": "statsd-parser@>=0.0.4 <0.1.0", - "resolved": "https://registry.npmjs.org/statsd-parser/-/statsd-parser-0.0.4.tgz" + "resolved": "https://registry.npmjs.org/statsd-parser/-/statsd-parser-0.0.4.tgz", + "integrity": "sha1-y9JDlTzELv/VSLXSI4jtaJ7GOb0=" }, "stream-buffers": { "version": "0.2.6", - "from": "stream-buffers@>=0.2.5 <0.3.0", - "resolved": "https://registry.npmjs.org/stream-buffers/-/stream-buffers-0.2.6.tgz" + "resolved": "https://registry.npmjs.org/stream-buffers/-/stream-buffers-0.2.6.tgz", + "integrity": "sha1-GBwI1bs2kARfaUAbmuanoM8zE/w=" }, "stream-counter": { "version": "0.2.0", - "from": "stream-counter@>=0.2.0 <0.3.0", - "resolved": "https://registry.npmjs.org/stream-counter/-/stream-counter-0.2.0.tgz" + "resolved": "https://registry.npmjs.org/stream-counter/-/stream-counter-0.2.0.tgz", + "integrity": "sha1-3tJmVWMZyLDiIoErnPOyb6fZR94=", + "requires": { + "readable-stream": "1.1.14" + } }, "string_decoder": { "version": "0.10.31", - "from": "string_decoder@>=0.10.0 <0.11.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=" + }, + "strip-ansi": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-0.1.1.tgz", + "integrity": "sha1-OeipjQRNFQZgq+SmgIrPcLt7yZE=", + "dev": true + }, + "supports-color": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-4.5.0.tgz", + "integrity": "sha1-vnoN5ITexcXN34s9WRJQRJEvY1s=", + "requires": { + "has-flag": "2.0.0" + } }, "text-encoding": { "version": "0.6.4", - "from": "text-encoding@0.6.4", - "resolved": "https://registry.npmjs.org/text-encoding/-/text-encoding-0.6.4.tgz" + "resolved": "https://registry.npmjs.org/text-encoding/-/text-encoding-0.6.4.tgz", + "integrity": "sha1-45mpgiV6J22uQou5KEXLcb3CbRk=" + }, + "timespan": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/timespan/-/timespan-2.3.0.tgz", + "integrity": "sha1-SQLOBAvRPYRcj1myfp1ZutbzmSk=", + "dev": true + }, + "tiny-lr": { + "version": "0.0.4", + "resolved": "https://registry.npmjs.org/tiny-lr/-/tiny-lr-0.0.4.tgz", + "integrity": "sha1-gGGFR/Y/aX0Fy0DEwsSwg1Ia77Y=", + "dev": true, + "requires": { + "debug": "0.8.1", + "faye-websocket": "0.4.4", + "noptify": "0.0.3", + "qs": "0.6.6" + } }, "tunnel-agent": { "version": "0.2.0", - "from": "tunnel-agent@>=0.2.0 <0.3.0", - "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.2.0.tgz" + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.2.0.tgz", + "integrity": "sha1-aFPCr7GyEJ5FYp5JK9419Fnqaeg=" }, "type-detect": { - "version": "4.0.3", - "from": "type-detect@>=4.0.0 <5.0.0", - "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.3.tgz" + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.5.tgz", + "integrity": "sha512-N9IvkQslUGYGC24RkJk1ba99foK6TkwC2FHAEBlQFBP0RxQZS8ZpJuAZcwiY/w9ZJHFQb1aOXBI60OdxhTrwEQ==" }, "uid2": { "version": "0.0.3", - "from": "uid2@0.0.3", - "resolved": "https://registry.npmjs.org/uid2/-/uid2-0.0.3.tgz" + "resolved": "https://registry.npmjs.org/uid2/-/uid2-0.0.3.tgz", + "integrity": "sha1-SDEm4Rd03y9xuLY53NeZw3YWK4I=" }, "underscore": { "version": "1.5.2", - "from": "underscore@>=1.5.2 <1.6.0", - "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.5.2.tgz" + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.5.2.tgz", + "integrity": "sha1-EzXF5PXm0zu7SwBrqMhqAPVW3gg=" + }, + "underscore.string": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/underscore.string/-/underscore.string-2.2.1.tgz", + "integrity": "sha1-18D6KvXVoaZ/QlPa7pgTLnM/Dxk=", + "dev": true + }, + "update-notifier": { + "version": "0.1.10", + "resolved": "https://registry.npmjs.org/update-notifier/-/update-notifier-0.1.10.tgz", + "integrity": "sha1-IVy+EFM2nw1KRPhLUeuny4BIRpU=", + "dev": true, + "requires": { + "chalk": "0.4.0", + "configstore": "0.3.2", + "request": "2.14.0", + "semver": "2.3.2" + } }, "url": { "version": "0.10.3", - "from": "url@0.10.3", - "resolved": "https://registry.npmjs.org/url/-/url-0.10.3.tgz" + "resolved": "https://registry.npmjs.org/url/-/url-0.10.3.tgz", + "integrity": "sha1-Ah5NnHcF8hu/N9A861h2dAJ3TGQ=", + "requires": { + "punycode": "1.3.2", + "querystring": "0.2.0" + } }, - "util-deprecate": { - "version": "1.0.2", - "from": "util-deprecate@>=1.0.1 <1.1.0", - "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz" + "user-home": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/user-home/-/user-home-1.1.1.tgz", + "integrity": "sha1-K1viOjK2Onyd640PKNSFcko98ZA=", + "dev": true + }, + "utile": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/utile/-/utile-0.2.1.tgz", + "integrity": "sha1-kwyI6ZCY1iIINMNWy9mncFItkNc=", + "dev": true, + "requires": { + "async": "0.2.10", + "deep-equal": "1.0.1", + "i": "0.3.6", + "mkdirp": "0.3.5", + "ncp": "2.0.0", + "rimraf": "2.2.8" + } + }, + "uuid": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-2.0.3.tgz", + "integrity": "sha1-Z+LoY3lyFVMN/zGOW/nc6/1Hsho=", + "dev": true + }, + "watch": { + "version": "0.13.0", + "resolved": "https://registry.npmjs.org/watch/-/watch-0.13.0.tgz", + "integrity": "sha1-/MbSs/DoxzSC61Qjmhn9W8+adTw=", + "dev": true, + "requires": { + "minimist": "0.0.8" + } + }, + "which": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/which/-/which-1.0.9.tgz", + "integrity": "sha1-RgwdoPgQED0DIam2M6+eV15kSG8=", + "dev": true + }, + "winston": { + "version": "0.8.3", + "resolved": "https://registry.npmjs.org/winston/-/winston-0.8.3.tgz", + "integrity": "sha1-ZLar9M0Brcrv1QCTk7HY6L7BnbA=", + "dev": true, + "requires": { + "async": "0.2.10", + "colors": "0.6.2", + "cycle": "1.0.3", + "eyes": "0.1.8", + "isstream": "0.1.2", + "pkginfo": "0.3.1", + "stack-trace": "0.0.7" + } + }, + "wordwrap": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz", + "integrity": "sha1-o9XabNXAvAAI03I0u68b7WMFkQc=", + "dev": true }, "wrappy": { "version": "1.0.2", - "from": "wrappy@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" + }, + "xdg-basedir": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-1.0.1.tgz", + "integrity": "sha1-FP+PY6T9vLBdW27qIrNvMDO58E4=", + "dev": true, + "requires": { + "user-home": "1.1.1" + } }, "xml2js": { "version": "0.4.17", - "from": "xml2js@0.4.17", - "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.17.tgz" + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.17.tgz", + "integrity": "sha1-F76T6q4/O3eTWceVtBlwWogX6Gg=", + "requires": { + "sax": "1.2.1", + "xmlbuilder": "4.2.1" + } }, "xmlbuilder": { "version": "4.2.1", - "from": "xmlbuilder@4.2.1", - "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-4.2.1.tgz" + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-4.2.1.tgz", + "integrity": "sha1-qlijBBoGb5DqoWwvU4n/GfP0YaU=", + "requires": { + "lodash": "4.17.4" + } } } } diff --git a/services/filestore/package.json b/services/filestore/package.json index 0dbf40d514..75adf6aec8 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -6,6 +6,18 @@ "type": "git", "url": "https://github.com/sharelatex/filestore-sharelatex.git" }, + "scripts": { + "test:acceptance:run": "mocha --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", + "test:acceptance": "npm run compile:app && npm run compile:acceptance_tests && npm run test:acceptance:run -- $@", + "test:unit:run": "mocha --recursive --reporter spec $@ test/unit/js", + "test:unit": "npm run compile:app && npm run compile:unit_tests && npm run test:unit:run -- $@", + "compile:unit_tests": "coffee -o test/unit/js -c test/unit/coffee", + "compile:acceptance_tests": "coffee -o test/acceptance/js -c test/acceptance/coffee", + "compile:app": "coffee -o app/js -c app/coffee && coffee -c app.coffee", + "compile:all": "npm run compile:app && npm run compile:unit_tests && npm run compile:acceptance_tests", + "start": "npm run compile:app && node app.js", + "nodemon": "nodemon --config nodemon.json" + }, "dependencies": { "async": "~0.2.10", "aws-sdk": "^2.1.39", diff --git a/services/filestore/test/acceptence/coffee/SendingFileTest.coffee b/services/filestore/test/acceptance/coffee/SendingFileTest.coffee similarity index 100% rename from services/filestore/test/acceptence/coffee/SendingFileTest.coffee rename to services/filestore/test/acceptance/coffee/SendingFileTest.coffee From e235651e9c00e3c92793ed60b6418becf8b0f3fd Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Tue, 16 Jan 2018 12:44:26 +0000 Subject: [PATCH 200/555] bump version --- services/filestore/app.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index 3c0ca7b72d..dc1e54361c 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -96,7 +96,7 @@ app.post "/shutdown", (req, res)-> app.get '/status', (req, res)-> if appIsOk - res.send('filestore sharelatex up - v2') + res.send('filestore sharelatex up - v3') else logger.log "app is not ok - shutting down" res.send("server is being shut down", 500) From dbfebc0d1cf936ac9e7f2bf0e2ff09c20b9172a5 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Tue, 16 Jan 2018 14:36:09 +0000 Subject: [PATCH 201/555] bump --- services/filestore/app.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index dc1e54361c..d135f4fb25 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -96,7 +96,7 @@ app.post "/shutdown", (req, res)-> app.get '/status', (req, res)-> if appIsOk - res.send('filestore sharelatex up - v3') + res.send('filestore sharelatex up - v4') else logger.log "app is not ok - shutting down" res.send("server is being shut down", 500) From 08c734e850a6cd733a6258bf63c83d69d091acda Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Tue, 16 Jan 2018 14:48:21 +0000 Subject: [PATCH 202/555] another bump git log --- services/filestore/app.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index d135f4fb25..56706b7a3d 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -96,7 +96,7 @@ app.post "/shutdown", (req, res)-> app.get '/status', (req, res)-> if appIsOk - res.send('filestore sharelatex up - v4') + res.send('filestore sharelatex up - v5') else logger.log "app is not ok - shutting down" res.send("server is being shut down", 500) From 77e34d33dfe742fb8689e72752a0db37194343a9 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Tue, 16 Jan 2018 17:00:27 +0000 Subject: [PATCH 203/555] hello james --- services/filestore/app.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index 56706b7a3d..bd693af655 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -96,7 +96,7 @@ app.post "/shutdown", (req, res)-> app.get '/status', (req, res)-> if appIsOk - res.send('filestore sharelatex up - v5') + res.send('filestore sharelatex up - hello james') else logger.log "app is not ok - shutting down" res.send("server is being shut down", 500) From 9e321658ede754876b6e18ef1ce1d3e53f8da2f2 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Fri, 23 Mar 2018 15:26:38 +0000 Subject: [PATCH 204/555] make s3 configurable via env vars --- .../filestore/config/settings.defaults.coffee | 56 +++++++++++-------- 1 file changed, 33 insertions(+), 23 deletions(-) diff --git a/services/filestore/config/settings.defaults.coffee b/services/filestore/config/settings.defaults.coffee index bb4a885478..673c411462 100644 --- a/services/filestore/config/settings.defaults.coffee +++ b/services/filestore/config/settings.defaults.coffee @@ -1,6 +1,6 @@ Path = require "path" -module.exports = +settings = internal: filestore: port: 3009 @@ -11,22 +11,26 @@ module.exports = # Choices are # s3 - Amazon S3 # fs - local filesystem - backend: "fs" - stores: - # where to store user and template binary files - # - # For Amazon S3 this is the bucket name to store binary files in. - # - # For local filesystem this is the directory to store the files in. - # Must contain full path, e.g. "/var/lib/sharelatex/data". - # This path must exist, not be tmpfs and be writable to by the user sharelatex is run as. - user_files: Path.resolve(__dirname + "/../user_files") - public_files: Path.resolve(__dirname + "/../public_files") - template_files: Path.resolve(__dirname + "/../template_files") - # if you are using S3, then fill in your S3 details below - # s3: - # key: "" - # secret: "" + if process.env['AWS_KEY']? + backend: "s3" + s3: + key: process.env['AWS_KEY'] + secret: process.env['AWS_SECRET'] + stores: + user_files: process.env['AWS_S3_USER_FILES_BUCKET_NAME'] + template_files: process.env['AWS_S3_TEMPLATE_FILES_BUCKET_NAME'] + public_files: process.env['AWS_S3_PUBLIC_FILES_BUCKET_NAME'] + else + backend: "fs" + stores: + # + # For local filesystem this is the directory to store the files in. + # Must contain full path, e.g. "/var/lib/sharelatex/data". + # This path must exist, not be tmpfs and be writable to by the user sharelatex is run as. + user_files: Path.resolve(__dirname + "/../user_files") + public_files: Path.resolve(__dirname + "/../public_files") + template_files: Path.resolve(__dirname + "/../template_files") + path: uploadFolder: Path.resolve(__dirname + "/../uploads") @@ -35,9 +39,15 @@ module.exports = # Any commands to wrap the convert utility in, for example ["nice"], or ["firejail", "--profile=/etc/firejail/convert.profile"] convertCommandPrefix: [] - # Filestore health check - # ---------------------- - # Project and file details to check in persistor when calling /health_check - # health_check: - # project_id: "" - # file_id: "" + +# Filestore health check +# ---------------------- +# Project and file details to check in persistor when calling /health_check +if process.env['HEALTH_CHECK_PROJECT_ID']? and process.env['HEALTH_CHECK_FILE_ID']? + settings.health_check = + project_id: process.env['HEALTH_CHECK_PROJECT_ID'] + file_id: process.env['HEALTH_CHECK_FILE_ID'] + +module.exports = settings + +console.log module.exports \ No newline at end of file From 73f77b5667b9a9b94b52e64b2697719f2a6213ff Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Fri, 23 Mar 2018 16:14:30 +0000 Subject: [PATCH 205/555] copy everything into docker container --- services/filestore/Dockerfile | 6 ++---- services/filestore/config/settings.defaults.coffee | 2 +- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/services/filestore/Dockerfile b/services/filestore/Dockerfile index 82a4edd9f0..9a01b4da61 100644 --- a/services/filestore/Dockerfile +++ b/services/filestore/Dockerfile @@ -6,10 +6,8 @@ COPY package.json /app/ RUN npm install --quiet -COPY config /app/config -COPY test /app/test -COPY app /app/app -COPY app.coffee /app + +COPY . /app RUN npm run compile:all FROM node:8.9.1 diff --git a/services/filestore/config/settings.defaults.coffee b/services/filestore/config/settings.defaults.coffee index 673c411462..37800eecc7 100644 --- a/services/filestore/config/settings.defaults.coffee +++ b/services/filestore/config/settings.defaults.coffee @@ -4,7 +4,7 @@ settings = internal: filestore: port: 3009 - host: "localhost" + host: process.env['LISTEN_ADDRESS'] or "localhost" filestore: # Which backend persistor to use. From 2f0cc4e64a73945913f097e92b319be85c62d3b2 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Fri, 23 Mar 2018 16:19:52 +0000 Subject: [PATCH 206/555] chown uploads --- services/filestore/Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/services/filestore/Dockerfile b/services/filestore/Dockerfile index 9a01b4da61..4ca82556fc 100644 --- a/services/filestore/Dockerfile +++ b/services/filestore/Dockerfile @@ -13,6 +13,7 @@ RUN npm run compile:all FROM node:8.9.1 COPY --from=0 /app /app +RUN chown app uploads # All app and node_modules will be owned by root. # The app will run as the 'app' user, and so not have write permissions # on any files it doesn't need. From 1af209cd246c713d87e5b5709a9af39d9ad95c9d Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Fri, 23 Mar 2018 16:23:04 +0000 Subject: [PATCH 207/555] chown uploads --- services/filestore/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/Dockerfile b/services/filestore/Dockerfile index 4ca82556fc..108e373071 100644 --- a/services/filestore/Dockerfile +++ b/services/filestore/Dockerfile @@ -13,7 +13,7 @@ RUN npm run compile:all FROM node:8.9.1 COPY --from=0 /app /app -RUN chown app uploads +RUN chown app:app /app/uploads # All app and node_modules will be owned by root. # The app will run as the 'app' user, and so not have write permissions # on any files it doesn't need. From 837a8ff35f6d5b728a808d3aaa2fd32981f81967 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Fri, 23 Mar 2018 16:26:42 +0000 Subject: [PATCH 208/555] change uploads permission after creation of user --- services/filestore/Dockerfile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/services/filestore/Dockerfile b/services/filestore/Dockerfile index 108e373071..eb03c3257b 100644 --- a/services/filestore/Dockerfile +++ b/services/filestore/Dockerfile @@ -13,11 +13,12 @@ RUN npm run compile:all FROM node:8.9.1 COPY --from=0 /app /app -RUN chown app:app /app/uploads # All app and node_modules will be owned by root. # The app will run as the 'app' user, and so not have write permissions # on any files it doesn't need. RUN useradd --user-group --create-home --home-dir /app --shell /bin/false app +RUN chown app:app /app/uploads + USER app WORKDIR /app From 092a151d511e32d4775884517aa7d77a448b3b7a Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Fri, 23 Mar 2018 16:36:41 +0000 Subject: [PATCH 209/555] Use new build system --- services/filestore/Dockerfile | 25 +++++++++++-------------- services/filestore/install_deps.sh | 4 ++++ 2 files changed, 15 insertions(+), 14 deletions(-) create mode 100755 services/filestore/install_deps.sh diff --git a/services/filestore/Dockerfile b/services/filestore/Dockerfile index eb03c3257b..c3070f5a3e 100644 --- a/services/filestore/Dockerfile +++ b/services/filestore/Dockerfile @@ -1,27 +1,24 @@ -FROM node:8.9.1 +FROM node:8.9.1 as app + +COPY ./ /app WORKDIR /app -COPY package.json /app/ +RUN npm install -RUN npm install --quiet - - -COPY . /app RUN npm run compile:all FROM node:8.9.1 -COPY --from=0 /app /app -# All app and node_modules will be owned by root. -# The app will run as the 'app' user, and so not have write permissions -# on any files it doesn't need. -RUN useradd --user-group --create-home --home-dir /app --shell /bin/false app -RUN chown app:app /app/uploads +COPY --from=app /app /app -USER app WORKDIR /app -EXPOSE 3009 +# All app and node_modules will be owned by root. +# The app will run as the 'node' user, and so not have write permissions +# on any files it doesn't need. +RUN ./install_deps.sh + +USER node CMD ["node","app.js"] diff --git a/services/filestore/install_deps.sh b/services/filestore/install_deps.sh new file mode 100755 index 0000000000..dceb2a7d8a --- /dev/null +++ b/services/filestore/install_deps.sh @@ -0,0 +1,4 @@ +#!/bin/sh +apt-get install poppler-utils vim ghostscript imagemagick optipng --yes +npm rebuild +chown -R node:node /app/uploads From 801adf8e9ea728fc9bc153cf3080fa5ed8cc1249 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Fri, 23 Mar 2018 17:06:31 +0000 Subject: [PATCH 210/555] install gs manually --- services/filestore/install_deps.sh | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/services/filestore/install_deps.sh b/services/filestore/install_deps.sh index dceb2a7d8a..4861bcfd3f 100755 --- a/services/filestore/install_deps.sh +++ b/services/filestore/install_deps.sh @@ -1,4 +1,11 @@ #!/bin/sh -apt-get install poppler-utils vim ghostscript imagemagick optipng --yes +apt-get update + +apt-get install vim imagemagick optipng --yes + +wget https://s3.amazonaws.com/sl-public-dev-assets/ghostscript-9.15.tar.gz /tmp/ghostscript-9.15.tar.gz +tar -xvf /tmp/ghostscript-9.15.tar.gz +cd /tmp/ghostscript-9.15 && ./configure && make && make install + npm rebuild chown -R node:node /app/uploads From 8c782ba6b2622a4bfd924ed483b5810a9952155f Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Fri, 23 Mar 2018 17:12:37 +0000 Subject: [PATCH 211/555] debuging --- services/filestore/Dockerfile | 2 +- services/filestore/install_deps.sh | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/services/filestore/Dockerfile b/services/filestore/Dockerfile index c3070f5a3e..efe5879b50 100644 --- a/services/filestore/Dockerfile +++ b/services/filestore/Dockerfile @@ -19,6 +19,6 @@ WORKDIR /app # on any files it doesn't need. RUN ./install_deps.sh -USER node +# USER node CMD ["node","app.js"] diff --git a/services/filestore/install_deps.sh b/services/filestore/install_deps.sh index 4861bcfd3f..2d2632ea19 100755 --- a/services/filestore/install_deps.sh +++ b/services/filestore/install_deps.sh @@ -9,3 +9,5 @@ cd /tmp/ghostscript-9.15 && ./configure && make && make install npm rebuild chown -R node:node /app/uploads + +echo gs --version \ No newline at end of file From b45e2a922c7375e183f73a10765a6fc4eb54abff Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Fri, 23 Mar 2018 17:21:15 +0000 Subject: [PATCH 212/555] install gs correctly --- services/filestore/Dockerfile | 2 +- services/filestore/install_deps.sh | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/services/filestore/Dockerfile b/services/filestore/Dockerfile index efe5879b50..c3070f5a3e 100644 --- a/services/filestore/Dockerfile +++ b/services/filestore/Dockerfile @@ -19,6 +19,6 @@ WORKDIR /app # on any files it doesn't need. RUN ./install_deps.sh -# USER node +USER node CMD ["node","app.js"] diff --git a/services/filestore/install_deps.sh b/services/filestore/install_deps.sh index 2d2632ea19..193c358628 100755 --- a/services/filestore/install_deps.sh +++ b/services/filestore/install_deps.sh @@ -3,7 +3,8 @@ apt-get update apt-get install vim imagemagick optipng --yes -wget https://s3.amazonaws.com/sl-public-dev-assets/ghostscript-9.15.tar.gz /tmp/ghostscript-9.15.tar.gz +wget https://s3.amazonaws.com/sl-public-dev-assets/ghostscript-9.15.tar.gz -O /tmp/ghostscript-9.15.tar.gz +ls -al /tmp tar -xvf /tmp/ghostscript-9.15.tar.gz cd /tmp/ghostscript-9.15 && ./configure && make && make install From 46f2ff051c7d9eb3b6adc5514dd96000d46ad71b Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Fri, 23 Mar 2018 17:29:21 +0000 Subject: [PATCH 213/555] cd into tmp --- services/filestore/install_deps.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/filestore/install_deps.sh b/services/filestore/install_deps.sh index 193c358628..52b6bde59d 100755 --- a/services/filestore/install_deps.sh +++ b/services/filestore/install_deps.sh @@ -3,8 +3,8 @@ apt-get update apt-get install vim imagemagick optipng --yes -wget https://s3.amazonaws.com/sl-public-dev-assets/ghostscript-9.15.tar.gz -O /tmp/ghostscript-9.15.tar.gz -ls -al /tmp +wget -q https://s3.amazonaws.com/sl-public-dev-assets/ghostscript-9.15.tar.gz -O /tmp/ghostscript-9.15.tar.gz +cd /tmp tar -xvf /tmp/ghostscript-9.15.tar.gz cd /tmp/ghostscript-9.15 && ./configure && make && make install From de74ec5094754ea52eb8ba545542573b2f325d74 Mon Sep 17 00:00:00 2001 From: Christopher Hoskin Date: Fri, 18 May 2018 13:47:06 +0100 Subject: [PATCH 214/555] Change build and publish to use the GCR of one of my projects (hard code for now) --- services/filestore/Makefile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/filestore/Makefile b/services/filestore/Makefile index 3ebf4c4a34..89ac367ccb 100644 --- a/services/filestore/Makefile +++ b/services/filestore/Makefile @@ -34,10 +34,10 @@ test_acceptance: ci_clean # clear the database before each acceptance test run $(DOCKER_COMPOSE) run --rm test_acceptance -- ${MOCHA_ARGS} build: - docker build --pull --tag quay.io/sharelatex/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) . + docker build --pull --tag gcr.io/csh-gcdm-test/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) . publish: - docker push quay.io/sharelatex/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + docker push gcr.io/csh-gcdm-test/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) ci: # On the CI server, we want to run our tests in the image that we @@ -59,4 +59,4 @@ ci_clean: .PHONY: all install compile clean test test_unit test_acceptance \ test_acceptance_start_service test_acceptance_stop_service \ - test_acceptance_run build publish ci ci_clean \ No newline at end of file + test_acceptance_run build publish ci ci_clean From a2165d85a0cb3f225a7d134dc625b82d02e813c9 Mon Sep 17 00:00:00 2001 From: Christopher Hoskin Date: Fri, 18 May 2018 13:55:33 +0100 Subject: [PATCH 215/555] Update build script to use build-target docker --- services/filestore/Dockerfile | 17 ++++----- services/filestore/Jenkinsfile | 66 ++++++++++++++------------------- services/filestore/Makefile | 62 +++++++++++-------------------- services/filestore/package.json | 16 ++++---- 4 files changed, 65 insertions(+), 96 deletions(-) diff --git a/services/filestore/Dockerfile b/services/filestore/Dockerfile index c3070f5a3e..cb700d7ade 100644 --- a/services/filestore/Dockerfile +++ b/services/filestore/Dockerfile @@ -1,24 +1,21 @@ -FROM node:8.9.1 as app - -COPY ./ /app +FROM node:6.9.5 as app WORKDIR /app -RUN npm install +COPY package.json package-lock.json /app/ + +RUN npm install --quiet + +COPY . /app RUN npm run compile:all -FROM node:8.9.1 +FROM node:6.9.5 COPY --from=app /app /app WORKDIR /app - -# All app and node_modules will be owned by root. -# The app will run as the 'node' user, and so not have write permissions -# on any files it doesn't need. RUN ./install_deps.sh - USER node CMD ["node","app.js"] diff --git a/services/filestore/Jenkinsfile b/services/filestore/Jenkinsfile index 03528475d6..bc9ba0142f 100644 --- a/services/filestore/Jenkinsfile +++ b/services/filestore/Jenkinsfile @@ -1,79 +1,67 @@ -pipeline { - - agent { - docker { - image 'node:6.9.5' - args "-v /var/lib/jenkins/.npm:/tmp/.npm" - } - } +String cron_string = BRANCH_NAME == "master" ? "@daily" : "" - environment { - HOME = "/tmp" - } +pipeline { + agent any triggers { pollSCM('* * * * *') - cron('@daily') + cron(cron_string) } stages { - stage('Set up') { + stage('Build') { steps { - // we need to disable logallrefupdates, else git clones during the npm install will require git to lookup the user id - // which does not exist in the container's /etc/passwd file, causing the clone to fail. - sh 'git config --global core.logallrefupdates false' + sh 'make build' } } - stage('Install') { + + stage('Unit Tests') { steps { - sh 'rm -fr node_modules' - sh 'npm install' - sh 'npm rebuild' - sh 'npm install --quiet grunt-cli' + sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_unit' } } - stage('Compile') { + + stage('Acceptance Tests') { steps { - sh 'node_modules/.bin/grunt compile' + sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_acceptance' } } - stage('Test') { + + stage('Package and publish build') { steps { - sh 'NODE_ENV=development node_modules/.bin/grunt test:unit' + sh 'make publish' } } - stage('Package') { - steps { - sh 'echo ${BUILD_NUMBER} > build_number.txt' - sh 'touch build.tar.gz' // Avoid tar warning about files changing during read - sh 'tar -czf build.tar.gz --exclude=build.tar.gz --exclude-vcs .' - } - } - stage('Publish') { + + stage('Publish build number') { steps { + sh 'echo ${BRANCH_NAME}-${BUILD_NUMBER} > build_number.txt' withAWS(credentials:'S3_CI_BUILDS_AWS_KEYS', region:"${S3_REGION_BUILD_ARTEFACTS}") { - s3Upload(file:'build.tar.gz', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/${BUILD_NUMBER}.tar.gz") // The deployment process uses this file to figure out the latest build s3Upload(file:'build_number.txt', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/latest") } } } } - + post { + always { + sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_clean' + } + failure { - mail(from: "${EMAIL_ALERT_FROM}", - to: "${EMAIL_ALERT_TO}", + mail(from: "${EMAIL_ALERT_FROM}", + to: "${EMAIL_ALERT_TO}", subject: "Jenkins build failed: ${JOB_NAME}:${BUILD_NUMBER}", body: "Build: ${BUILD_URL}") } } - + // The options directive is for configuration that applies to the whole job. options { // we'd like to make sure remove old builds, so we don't fill up our storage! buildDiscarder(logRotator(numToKeepStr:'50')) - + // And we'd really like to be sure that this build doesn't hang forever, so let's time it out after: timeout(time: 30, unit: 'MINUTES') } diff --git a/services/filestore/Makefile b/services/filestore/Makefile index 89ac367ccb..aa6e35142d 100644 --- a/services/filestore/Makefile +++ b/services/filestore/Makefile @@ -1,62 +1,44 @@ -NPM := docker-compose -f docker-compose.yml ${DOCKER_COMPOSE_FLAGS} run --rm npm npm +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/sharelatex/sharelatex-dev-environment +# Version: 1.1.1 + BUILD_NUMBER ?= local BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) PROJECT_NAME = filestore DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml -DOCKER_COMPOSE := docker-compose ${DOCKER_COMPOSE_FLAGS} +DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \ + BRANCH_NAME=$(BRANCH_NAME) \ + PROJECT_NAME=$(PROJECT_NAME) \ + MOCHA_GREP=${MOCHA_GREP} \ + docker-compose ${DOCKER_COMPOSE_FLAGS} -all: install test - @echo "Run:" - @echo " make install to set up the project dependencies (in docker)" - @echo " make test to run all the tests for the project (in docker)" - @echo " make run to run the app (natively)" - -install: - $(NPM) install - -run: - $(NPM) run start clean: rm -f app.js rm -rf app/js rm -rf test/unit/js rm -rf test/acceptance/js - # Deletes node_modules volume - docker-compose down --volumes -test: test_unit test_acceptance +test: test_unit test_acceptance test_unit: - $(DOCKER_COMPOSE) run --rm test_unit -- ${MOCHA_ARGS} + @[ ! -d test/unit ] && echo "filestore has no unit tests" || $(DOCKER_COMPOSE) run --rm test_unit npm run test:unit -- ${MOCHA_ARGS} -test_acceptance: ci_clean # clear the database before each acceptance test run - $(DOCKER_COMPOSE) run --rm test_acceptance -- ${MOCHA_ARGS} +test_acceptance: test_clean test_acceptance_pre_run # clear the database before each acceptance test run + @[ ! -d test/acceptance ] && echo "filestore has no acceptance tests" || $(DOCKER_COMPOSE) run --rm test_acceptance npm run test:acceptance -- ${MOCHA_ARGS} +test_clean: + $(DOCKER_COMPOSE) down -v -t 0 + +test_acceptance_pre_run: + @[ ! -f test/acceptance/scripts/pre-run ] && echo "filestore has no pre acceptance tests task" || $(DOCKER_COMPOSE) run --rm test_acceptance test/acceptance/scripts/pre-run build: + docker build --pull --tag quay.io/sharelatex/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) . docker build --pull --tag gcr.io/csh-gcdm-test/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) . publish: + docker push quay.io/sharelatex/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) docker push gcr.io/csh-gcdm-test/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) -ci: - # On the CI server, we want to run our tests in the image that we - # have built for deployment, which is what the docker-compose.ci.yml - # override does. - PROJECT_NAME=$(PROJECT_NAME) \ - BRANCH_NAME=$(BRANCH_NAME) \ - BUILD_NUMBER=$(BUILD_NUMBER) \ - DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" \ - $(MAKE) build test publish - -ci_clean: - PROJECT_NAME=$(PROJECT_NAME) \ - BRANCH_NAME=$(BRANCH_NAME) \ - BUILD_NUMBER=$(BUILD_NUMBER) \ - DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" \ - $(DOCKER_COMPOSE) down - -.PHONY: - all install compile clean test test_unit test_acceptance \ - test_acceptance_start_service test_acceptance_stop_service \ - test_acceptance_run build publish ci ci_clean +.PHONY: clean test test_unit test_acceptance test_clean build publish diff --git a/services/filestore/package.json b/services/filestore/package.json index 75adf6aec8..a3e679ce59 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -8,15 +8,17 @@ }, "scripts": { "test:acceptance:run": "mocha --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", - "test:acceptance": "npm run compile:app && npm run compile:acceptance_tests && npm run test:acceptance:run -- $@", + "test:acceptance": "npm run compile:app && npm run compile:acceptance_tests && npm run test:acceptance:_run -- $@", "test:unit:run": "mocha --recursive --reporter spec $@ test/unit/js", - "test:unit": "npm run compile:app && npm run compile:unit_tests && npm run test:unit:run -- $@", - "compile:unit_tests": "coffee -o test/unit/js -c test/unit/coffee", - "compile:acceptance_tests": "coffee -o test/acceptance/js -c test/acceptance/coffee", - "compile:app": "coffee -o app/js -c app/coffee && coffee -c app.coffee", + "test:unit": "npm run compile:app && npm run compile:unit_tests && npm run test:unit:_run -- $@", + "compile:unit_tests": "[ ! -e test/unit/coffee ] && echo 'No unit tests to compile' || coffee -o test/unit/js -c test/unit/coffee", + "compile:acceptance_tests": "[ ! -e test/acceptance/coffee ] && echo 'No acceptance tests to compile' || coffee -o test/acceptance/js -c test/acceptance/coffee", + "compile:app": "([ -e app/coffee ] && coffee $COFFEE_OPTIONS -o app/js -c app/coffee || echo 'No CoffeeScript folder to compile') && ( [ -e app.coffee ] && coffee $COFFEE_OPTIONS -c app.coffee || echo 'No CoffeeScript app to compile')", "compile:all": "npm run compile:app && npm run compile:unit_tests && npm run compile:acceptance_tests", - "start": "npm run compile:app && node app.js", - "nodemon": "nodemon --config nodemon.json" + "start": "npm run compile:app && node $NODE_APP_OPTIONS app.js", + "nodemon": "nodemon --config nodemon.json", + "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", + "test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js" }, "dependencies": { "async": "~0.2.10", From 5d5cbbef7ec709582b6bc70e710bce475ca4cae8 Mon Sep 17 00:00:00 2001 From: Christopher Hoskin Date: Fri, 18 May 2018 14:15:02 +0100 Subject: [PATCH 216/555] Create fake package-lock.json during build tagret --- services/filestore/Makefile | 1 + 1 file changed, 1 insertion(+) diff --git a/services/filestore/Makefile b/services/filestore/Makefile index aa6e35142d..1d5e77efe7 100644 --- a/services/filestore/Makefile +++ b/services/filestore/Makefile @@ -34,6 +34,7 @@ test_clean: test_acceptance_pre_run: @[ ! -f test/acceptance/scripts/pre-run ] && echo "filestore has no pre acceptance tests task" || $(DOCKER_COMPOSE) run --rm test_acceptance test/acceptance/scripts/pre-run build: + touch package-lock.json docker build --pull --tag quay.io/sharelatex/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) . docker build --pull --tag gcr.io/csh-gcdm-test/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) . From 8db861258ad7489b3330a20d16cd08d700a56bca Mon Sep 17 00:00:00 2001 From: Christopher Hoskin Date: Fri, 18 May 2018 14:27:46 +0100 Subject: [PATCH 217/555] Add files generated by bin/update_build_scripts --- services/filestore/.dockerignore | 3 ++ services/filestore/docker-compose.ci.yml | 31 +++++++++++++++++++ services/filestore/docker-compose.yml | 39 ++++++++++++++++++++++++ services/filestore/nodemon.json | 19 ++++++++++++ services/filestore/package-lock.json | 0 5 files changed, 92 insertions(+) create mode 100644 services/filestore/.dockerignore create mode 100644 services/filestore/docker-compose.ci.yml create mode 100644 services/filestore/docker-compose.yml create mode 100644 services/filestore/nodemon.json create mode 100644 services/filestore/package-lock.json diff --git a/services/filestore/.dockerignore b/services/filestore/.dockerignore new file mode 100644 index 0000000000..a99835353f --- /dev/null +++ b/services/filestore/.dockerignore @@ -0,0 +1,3 @@ +node_modules/* +app.js +**/js/* diff --git a/services/filestore/docker-compose.ci.yml b/services/filestore/docker-compose.ci.yml new file mode 100644 index 0000000000..e979b7d4c5 --- /dev/null +++ b/services/filestore/docker-compose.ci.yml @@ -0,0 +1,31 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/sharelatex/sharelatex-dev-environment +# Version: 1.1.1 + +version: "2" + +services: + test_unit: + image: quay.io/sharelatex/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + user: node + command: npm run test:unit:_run + + test_acceptance: + build: . + image: quay.io/sharelatex/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + environment: + REDIS_HOST: redis + MONGO_HOST: mongo + POSTGRES_HOST: postgres + depends_on: + - mongo + - redis + user: node + command: npm run test:acceptance:_run + redis: + image: redis + + mongo: + image: mongo:3.4 + diff --git a/services/filestore/docker-compose.yml b/services/filestore/docker-compose.yml new file mode 100644 index 0000000000..6d11493b66 --- /dev/null +++ b/services/filestore/docker-compose.yml @@ -0,0 +1,39 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/sharelatex/sharelatex-dev-environment +# Version: 1.1.1 + +version: "2" + +services: + test_unit: + image: node:6.9.5 + volumes: + - .:/app + working_dir: /app + environment: + MOCHA_GREP: ${MOCHA_GREP} + command: npm run test:unit + user: node + + test_acceptance: + build: . + volumes: + - .:/app + working_dir: /app + environment: + REDIS_HOST: redis + MONGO_HOST: mongo + POSTGRES_HOST: postgres + MOCHA_GREP: ${MOCHA_GREP} + user: node + depends_on: + - mongo + - redis + command: npm run test:acceptance + redis: + image: redis + + mongo: + image: mongo:3.4 + diff --git a/services/filestore/nodemon.json b/services/filestore/nodemon.json new file mode 100644 index 0000000000..98db38d71b --- /dev/null +++ b/services/filestore/nodemon.json @@ -0,0 +1,19 @@ +{ + "ignore": [ + ".git", + "node_modules/" + ], + "verbose": true, + "legacyWatch": true, + "execMap": { + "js": "npm run start" + }, + + "watch": [ + "app/coffee/", + "app.coffee", + "config/" + ], + "ext": "coffee" + +} diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json new file mode 100644 index 0000000000..e69de29bb2 From 7fc7252c35bd6de417fc27af725d6197abfa019c Mon Sep 17 00:00:00 2001 From: Christopher Hoskin Date: Fri, 18 May 2018 14:29:20 +0100 Subject: [PATCH 218/555] Remove touch package-lock.json --- services/filestore/Makefile | 1 - 1 file changed, 1 deletion(-) diff --git a/services/filestore/Makefile b/services/filestore/Makefile index 1d5e77efe7..aa6e35142d 100644 --- a/services/filestore/Makefile +++ b/services/filestore/Makefile @@ -34,7 +34,6 @@ test_clean: test_acceptance_pre_run: @[ ! -f test/acceptance/scripts/pre-run ] && echo "filestore has no pre acceptance tests task" || $(DOCKER_COMPOSE) run --rm test_acceptance test/acceptance/scripts/pre-run build: - touch package-lock.json docker build --pull --tag quay.io/sharelatex/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) . docker build --pull --tag gcr.io/csh-gcdm-test/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) . From b32c63e16c3cb5ac069732f67c7ac2fa05bbcb85 Mon Sep 17 00:00:00 2001 From: Christopher Hoskin Date: Fri, 18 May 2018 14:59:34 +0100 Subject: [PATCH 219/555] Remove --exit option to support older mocha --- services/filestore/package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/filestore/package.json b/services/filestore/package.json index a3e679ce59..9694fc6679 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -7,7 +7,7 @@ "url": "https://github.com/sharelatex/filestore-sharelatex.git" }, "scripts": { - "test:acceptance:run": "mocha --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", + "test:acceptance:run": "mocha --recursive --reporter spec --timeout 15000 $@ test/acceptance/js", "test:acceptance": "npm run compile:app && npm run compile:acceptance_tests && npm run test:acceptance:_run -- $@", "test:unit:run": "mocha --recursive --reporter spec $@ test/unit/js", "test:unit": "npm run compile:app && npm run compile:unit_tests && npm run test:unit:_run -- $@", @@ -17,7 +17,7 @@ "compile:all": "npm run compile:app && npm run compile:unit_tests && npm run compile:acceptance_tests", "start": "npm run compile:app && node $NODE_APP_OPTIONS app.js", "nodemon": "nodemon --config nodemon.json", - "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", + "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 15000 $@ test/acceptance/js", "test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js" }, "dependencies": { From fcb2d18f8b6c510a0dfcad1cbe871b26a628fa61 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Fri, 18 May 2018 16:06:03 +0100 Subject: [PATCH 220/555] don't set command in make test acceptence --- services/filestore/Makefile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/filestore/Makefile b/services/filestore/Makefile index aa6e35142d..8748455b23 100644 --- a/services/filestore/Makefile +++ b/services/filestore/Makefile @@ -6,7 +6,7 @@ BUILD_NUMBER ?= local BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) PROJECT_NAME = filestore -DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml +DOCKER_COMPOSE_FLAGS ?= -f docker-compose.ci.yml DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \ BRANCH_NAME=$(BRANCH_NAME) \ PROJECT_NAME=$(PROJECT_NAME) \ @@ -23,10 +23,10 @@ clean: test: test_unit test_acceptance test_unit: - @[ ! -d test/unit ] && echo "filestore has no unit tests" || $(DOCKER_COMPOSE) run --rm test_unit npm run test:unit -- ${MOCHA_ARGS} + @[ ! -d test/unit ] && echo "filestore has no unit tests" || $(DOCKER_COMPOSE) run --rm test_unit test_acceptance: test_clean test_acceptance_pre_run # clear the database before each acceptance test run - @[ ! -d test/acceptance ] && echo "filestore has no acceptance tests" || $(DOCKER_COMPOSE) run --rm test_acceptance npm run test:acceptance -- ${MOCHA_ARGS} + @[ ! -d test/acceptance ] && echo "filestore has no acceptance tests" || $(DOCKER_COMPOSE) run --rm test_acceptance test_clean: $(DOCKER_COMPOSE) down -v -t 0 From 088d8850babc7c139d106ac63ad35610880d439d Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Fri, 18 May 2018 16:48:59 +0100 Subject: [PATCH 221/555] don't run acceptence tests yet --- services/filestore/Jenkinsfile | 6 +----- services/filestore/Makefile | 2 +- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/services/filestore/Jenkinsfile b/services/filestore/Jenkinsfile index bc9ba0142f..ffc7265888 100644 --- a/services/filestore/Jenkinsfile +++ b/services/filestore/Jenkinsfile @@ -21,11 +21,7 @@ pipeline { } } - stage('Acceptance Tests') { - steps { - sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_acceptance' - } - } + stage('Package and publish build') { steps { diff --git a/services/filestore/Makefile b/services/filestore/Makefile index 8748455b23..bb7eab8037 100644 --- a/services/filestore/Makefile +++ b/services/filestore/Makefile @@ -6,7 +6,7 @@ BUILD_NUMBER ?= local BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) PROJECT_NAME = filestore -DOCKER_COMPOSE_FLAGS ?= -f docker-compose.ci.yml +DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \ BRANCH_NAME=$(BRANCH_NAME) \ PROJECT_NAME=$(PROJECT_NAME) \ From bfb98050e3d75acb429dcb9ca643a737afcadb80 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Fri, 18 May 2018 16:57:53 +0100 Subject: [PATCH 222/555] don't push to quay.io --- services/filestore/Makefile | 2 -- 1 file changed, 2 deletions(-) diff --git a/services/filestore/Makefile b/services/filestore/Makefile index bb7eab8037..ef9dd9d63b 100644 --- a/services/filestore/Makefile +++ b/services/filestore/Makefile @@ -34,11 +34,9 @@ test_clean: test_acceptance_pre_run: @[ ! -f test/acceptance/scripts/pre-run ] && echo "filestore has no pre acceptance tests task" || $(DOCKER_COMPOSE) run --rm test_acceptance test/acceptance/scripts/pre-run build: - docker build --pull --tag quay.io/sharelatex/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) . docker build --pull --tag gcr.io/csh-gcdm-test/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) . publish: - docker push quay.io/sharelatex/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) docker push gcr.io/csh-gcdm-test/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) .PHONY: clean test test_unit test_acceptance test_clean build publish From 28fc39d6c920b9ae4265cf52bfa5f494a6b8830d Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Fri, 18 May 2018 17:08:18 +0100 Subject: [PATCH 223/555] don't use quay.io in image name --- services/filestore/docker-compose.ci.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/services/filestore/docker-compose.ci.yml b/services/filestore/docker-compose.ci.yml index e979b7d4c5..a9c5d8969c 100644 --- a/services/filestore/docker-compose.ci.yml +++ b/services/filestore/docker-compose.ci.yml @@ -7,13 +7,13 @@ version: "2" services: test_unit: - image: quay.io/sharelatex/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + # image: quay.io/sharelatex/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER user: node command: npm run test:unit:_run test_acceptance: build: . - image: quay.io/sharelatex/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + # image: quay.io/sharelatex/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER environment: REDIS_HOST: redis MONGO_HOST: mongo @@ -23,6 +23,7 @@ services: - redis user: node command: npm run test:acceptance:_run + redis: image: redis From df6b5203a10e1867a864677428af6d73f3197b8e Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Fri, 18 May 2018 17:18:39 +0100 Subject: [PATCH 224/555] point ci to gcr.io/csh-gcdm-test/ --- services/filestore/docker-compose.ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/filestore/docker-compose.ci.yml b/services/filestore/docker-compose.ci.yml index a9c5d8969c..a1cdf244d9 100644 --- a/services/filestore/docker-compose.ci.yml +++ b/services/filestore/docker-compose.ci.yml @@ -7,13 +7,13 @@ version: "2" services: test_unit: - # image: quay.io/sharelatex/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + image: gcr.io/csh-gcdm-test/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER user: node command: npm run test:unit:_run test_acceptance: build: . - # image: quay.io/sharelatex/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + image: gcr.io/csh-gcdm-test/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER environment: REDIS_HOST: redis MONGO_HOST: mongo From fda6cb008437483ff1724a8cebeb6cbb18e2c629 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Mon, 21 May 2018 09:37:54 +0100 Subject: [PATCH 225/555] fix acceptence tests, add them back into jenkins, upgrade express --- services/filestore/Jenkinsfile | 6 ++++- services/filestore/app.coffee | 19 ++++++++++----- .../filestore/app/coffee/FileConverter.coffee | 2 +- services/filestore/package.json | 2 +- .../acceptance/coffee/FilestoreApp.coffee | 24 +++++++++++++++++++ .../acceptance/coffee/SendingFileTest.coffee | 7 +++--- 6 files changed, 48 insertions(+), 12 deletions(-) create mode 100644 services/filestore/test/acceptance/coffee/FilestoreApp.coffee diff --git a/services/filestore/Jenkinsfile b/services/filestore/Jenkinsfile index ffc7265888..e1a34a5235 100644 --- a/services/filestore/Jenkinsfile +++ b/services/filestore/Jenkinsfile @@ -21,7 +21,11 @@ pipeline { } } - + stage('Acceptance Tests') { + steps { + sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_acceptance' + } + } stage('Package and publish build') { steps { diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index bd693af655..a6d6d11119 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -110,9 +110,8 @@ app.get "/health_check", healthCheckController.check app.get '*', (req, res)-> res.send 404 -server = require('http').createServer(app) -port = settings.internal.filestore.port or 3009 -host = "0.0.0.0" + + beginShutdown = () -> if appIsOk @@ -122,14 +121,22 @@ beginShutdown = () -> process.exit 1 , 120*1000 killTimer.unref?() # prevent timer from keeping process alive - server.close () -> + app.close () -> logger.log "closed all connections" Metrics.close() process.disconnect?() logger.log "server will stop accepting connections" -server.listen port, -> - logger.info "Filestore starting up, listening on #{host}:#{port}" + +port = settings.internal.filestore.port or 3009 +host = "0.0.0.0" + +if !module.parent # Called directly + app.listen port, host, (error) -> + logger.info "Filestore starting up, listening on #{host}:#{port}" + + +module.exports = app process.on 'SIGTERM', () -> logger.log("filestore got SIGTERM, shutting down gracefully") diff --git a/services/filestore/app/coffee/FileConverter.coffee b/services/filestore/app/coffee/FileConverter.coffee index 429fa7fccb..894b0e89fd 100644 --- a/services/filestore/app/coffee/FileConverter.coffee +++ b/services/filestore/app/coffee/FileConverter.coffee @@ -34,11 +34,11 @@ module.exports = callback(err, destPath) thumbnail: (sourcePath, callback)-> - logger.log sourcePath:sourcePath, "thumbnail convert file" destPath = "#{sourcePath}.png" sourcePath = "#{sourcePath}[0]" width = "260x" command = ["convert", "-flatten", "-background", "white", "-density", "300", "-define", "pdf:fit-page=#{width}", sourcePath, "-resize", width, destPath] + logger.log sourcePath:sourcePath, destPath:destPath, command:command, "thumbnail convert file" command = Settings.commands.convertCommandPrefix.concat(command) safe_exec command, childProcessOpts, (err, stdout, stderr)-> if err? diff --git a/services/filestore/package.json b/services/filestore/package.json index 9694fc6679..198a6ec366 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -24,7 +24,7 @@ "async": "~0.2.10", "aws-sdk": "^2.1.39", "coffee-script": "~1.7.1", - "express": "~3.4.8", + "express": "^4.2.0", "fs-extra": "^1.0.0", "heapdump": "^0.3.2", "knox": "~0.9.1", diff --git a/services/filestore/test/acceptance/coffee/FilestoreApp.coffee b/services/filestore/test/acceptance/coffee/FilestoreApp.coffee new file mode 100644 index 0000000000..818e90ec6f --- /dev/null +++ b/services/filestore/test/acceptance/coffee/FilestoreApp.coffee @@ -0,0 +1,24 @@ +app = require('../../../app') +require("logger-sharelatex").logger.level("info") +logger = require("logger-sharelatex") +Settings = require("settings-sharelatex") + +module.exports = + running: false + initing: false + callbacks: [] + ensureRunning: (callback = (error) ->) -> + if @running + return callback() + else if @initing + @callbacks.push callback + else + @initing = true + @callbacks.push callback + app.listen Settings.internal?.filestore?.port, "localhost", (error) => + throw error if error? + @running = true + logger.log("filestore running in dev mode") + + for callback in @callbacks + callback() \ No newline at end of file diff --git a/services/filestore/test/acceptance/coffee/SendingFileTest.coffee b/services/filestore/test/acceptance/coffee/SendingFileTest.coffee index 2731e25565..1b20e6b44f 100644 --- a/services/filestore/test/acceptance/coffee/SendingFileTest.coffee +++ b/services/filestore/test/acceptance/coffee/SendingFileTest.coffee @@ -1,4 +1,3 @@ - assert = require("chai").assert sinon = require('sinon') chai = require('chai') @@ -9,6 +8,7 @@ SandboxedModule = require('sandboxed-module') fs = require("fs") request = require("request") settings = require("settings-sharelatex") +FilestoreApp = require "./FilestoreApp" describe "Filestore", -> @@ -26,8 +26,9 @@ describe "Filestore", -> @filestoreUrl = "http://localhost:#{settings.internal.filestore.port}" beforeEach (done)-> - fs.unlink @localFileWritePath, => - done() + FilestoreApp.ensureRunning => + fs.unlink @localFileWritePath, -> + done() From d9eac8dd8b3044d8429ffe6f5b58b0fa86ea835f Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Mon, 21 May 2018 09:41:14 +0100 Subject: [PATCH 226/555] upgrade mocha --- services/filestore/package.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/services/filestore/package.json b/services/filestore/package.json index 198a6ec366..85ea112dd6 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -40,7 +40,8 @@ "rimraf": "2.2.8", "settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.0.0", "stream-buffers": "~0.2.5", - "underscore": "~1.5.2" + "underscore": "~1.5.2", + "mocha": "^4.0.1" }, "devDependencies": { "sinon": "", From ac990f2aba8761f0227a1357d343789b4f461950 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Mon, 21 May 2018 09:49:32 +0100 Subject: [PATCH 227/555] remove app.configure --- services/filestore/app.coffee | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index a6d6d11119..f7ef31e687 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -16,16 +16,7 @@ Metrics.open_sockets.monitor(logger) Metrics.event_loop?.monitor(logger) Metrics.memory.monitor(logger) -app.configure -> - app.use Metrics.http.monitor(logger) - -app.configure 'development', -> - console.log "Development Enviroment" - app.use express.errorHandler({ dumpExceptions: true, showStack: true }) - -app.configure 'production', -> - console.log "Production Enviroment" - app.use express.errorHandler() +app.use Metrics.http.monitor(logger) Metrics.inc "startup" From 3cf2e1d7c34bdd93b6ce89c60f337be6723d4577 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Mon, 21 May 2018 10:11:47 +0100 Subject: [PATCH 228/555] add null checks and new body parser --- services/filestore/app.coffee | 5 +++-- services/filestore/app/coffee/PersistorManager.coffee | 2 +- services/filestore/package.json | 3 ++- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index f7ef31e687..426996eccf 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -1,4 +1,5 @@ express = require('express') +bodyParser = require "body-parser" logger = require('logger-sharelatex') logger.initialize("filestore") settings = require("settings-sharelatex") @@ -61,7 +62,7 @@ app.use (req, res, next) -> app.get "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.getFile app.post "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.insertFile -app.put "/project/:project_id/file/:file_id", keyBuilder.userFileKey, express.bodyParser(), fileController.copyFile +app.put "/project/:project_id/file/:file_id", keyBuilder.userFileKey, bodyParser.json(), fileController.copyFile app.del "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.deleteFile app.get "/template/:template_id/v/:version/:format", keyBuilder.templateFileKey, fileController.getFile @@ -72,7 +73,7 @@ app.post "/template/:template_id/v/:version/:format", keyBuilder.templateFileKey app.get "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.getFile app.post "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.insertFile -app.put "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, express.bodyParser(), fileController.copyFile +app.put "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, bodyParser.json(), fileController.copyFile app.del "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.deleteFile app.get "/project/:project_id/size", keyBuilder.publicProjectKey, fileController.directorySize diff --git a/services/filestore/app/coffee/PersistorManager.coffee b/services/filestore/app/coffee/PersistorManager.coffee index aa5c80599d..a3b08420a6 100644 --- a/services/filestore/app/coffee/PersistorManager.coffee +++ b/services/filestore/app/coffee/PersistorManager.coffee @@ -2,7 +2,7 @@ settings = require("settings-sharelatex") logger = require("logger-sharelatex") # assume s3 if none specified -settings.filestore.backend ||= "s3" +settings?.filestore?.backend ||= "s3" logger.log backend:settings.filestore.backend, "Loading backend" diff --git a/services/filestore/package.json b/services/filestore/package.json index 85ea112dd6..897d195b63 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -41,7 +41,8 @@ "settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.0.0", "stream-buffers": "~0.2.5", "underscore": "~1.5.2", - "mocha": "^4.0.1" + "mocha": "^4.0.1", + "body-parser": "^1.2.0" }, "devDependencies": { "sinon": "", From 09527d804e54ef89c06fd130999e344175b430b5 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Mon, 21 May 2018 10:21:13 +0100 Subject: [PATCH 229/555] ignore js acceptence tests --- services/filestore/.gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/services/filestore/.gitignore b/services/filestore/.gitignore index d51931c976..9cb1da9a39 100644 --- a/services/filestore/.gitignore +++ b/services/filestore/.gitignore @@ -56,6 +56,7 @@ public/minjs/ test/unit/js/ test/acceptence/js cluster.js +test/acceptance/js/ user_files/* template_files/* From 768a4cc1adf10442b582a219ab7c74f85e8e3d72 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Mon, 21 May 2018 10:34:16 +0100 Subject: [PATCH 230/555] remove console.log from settings --- services/filestore/config/settings.defaults.coffee | 2 -- 1 file changed, 2 deletions(-) diff --git a/services/filestore/config/settings.defaults.coffee b/services/filestore/config/settings.defaults.coffee index 37800eecc7..e9e24cd355 100644 --- a/services/filestore/config/settings.defaults.coffee +++ b/services/filestore/config/settings.defaults.coffee @@ -49,5 +49,3 @@ if process.env['HEALTH_CHECK_PROJECT_ID']? and process.env['HEALTH_CHECK_FILE_ID file_id: process.env['HEALTH_CHECK_FILE_ID'] module.exports = settings - -console.log module.exports \ No newline at end of file From e3514e84cd9b846bb3d8014ac8512e72995f93f6 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Mon, 21 May 2018 11:02:53 +0100 Subject: [PATCH 231/555] fix up mocha migration tests --- .../app/coffee/PersistorManager.coffee | 5 ++--- .../app/coffee/S3PersistorManager.coffee | 2 +- .../coffee/FSPersistorManagerTests.coffee | 5 ++--- .../unit/coffee/PersistorManagerTests.coffee | 4 ++-- .../coffee/S3PersistorManagerTests.coffee | 20 +++++++------------ 5 files changed, 14 insertions(+), 22 deletions(-) diff --git a/services/filestore/app/coffee/PersistorManager.coffee b/services/filestore/app/coffee/PersistorManager.coffee index a3b08420a6..85c9dd83cd 100644 --- a/services/filestore/app/coffee/PersistorManager.coffee +++ b/services/filestore/app/coffee/PersistorManager.coffee @@ -4,9 +4,8 @@ logger = require("logger-sharelatex") # assume s3 if none specified settings?.filestore?.backend ||= "s3" - -logger.log backend:settings.filestore.backend, "Loading backend" -module.exports = switch settings.filestore.backend +logger.log backend:settings?.filestore?.backend, "Loading backend" +module.exports = switch settings?.filestore?.backend when "aws-sdk" require "./AWSSDKPersistorManager" when "s3" diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index b1a03fb4f4..0fae57df96 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -77,7 +77,7 @@ module.exports = if res.statusCode == 404 logger.log bucketName:bucketName, key:key, "file not found in s3" return callback new Errors.NotFoundError("File not found in S3: #{bucketName}:#{key}"), null - if res.statusCode not in [200, 206] + else if res.statusCode not in [200, 206] logger.log bucketName:bucketName, key:key, "error getting file from s3: #{res.statusCode}" return callback new Error("Got non-200 response from S3: #{res.statusCode}"), null callback null, res diff --git a/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee b/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee index 46def22a05..da59f859b9 100644 --- a/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee @@ -33,8 +33,6 @@ describe "FSPersistorManagerTests", -> err:-> "response":response "rimraf":@Rimraf - "./Errors": @Errors = - NotFoundError: sinon.stub() @location = "/tmp" @name1 = "530f2407e7ef165704000007/530f838b46d9a9e859000008" @name1Filtered ="530f2407e7ef165704000007_530f838b46d9a9e859000008" @@ -102,6 +100,7 @@ describe "FSPersistorManagerTests", -> @Fs.createReadStream.returns( on: (key, callback) => err = new Error() + err.message = "this is from a test" err.code = @fakeCode callback(err, null) ) @@ -115,7 +114,7 @@ describe "FSPersistorManagerTests", -> @FSPersistorManager.getFileStream @location, @name1, @opts, (err,res)=> expect(res).to.equal null expect(err).to.not.equal null - expect(err instanceof @Errors.NotFoundError).to.equal true + expect(err.name == "NotFoundError").to.equal true done() describe "when some other error happens", -> diff --git a/services/filestore/test/unit/coffee/PersistorManagerTests.coffee b/services/filestore/test/unit/coffee/PersistorManagerTests.coffee index ca7a82cbaa..3c4ca329e1 100644 --- a/services/filestore/test/unit/coffee/PersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/PersistorManagerTests.coffee @@ -66,14 +66,14 @@ describe "PersistorManagerTests", -> describe "test unspecified mixins", -> it "should load s3 when no wrapper specified", (done) -> - @settings = + @settings = {filestore:{}} @requires = "./S3PersistorManager": @S3PersistorManager "settings-sharelatex": @settings "logger-sharelatex": log:-> err:-> - @PersistorManager=SandboxedModule.require modulePath, requires: @requires + @PersistorManager = SandboxedModule.require modulePath, requires: @requires @PersistorManager.should.respondTo("getFileStream") @PersistorManager.getFileStream() @S3PersistorManager.getFileStream.calledOnce.should.equal true diff --git a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee index 3a3e7b0d86..abc303d131 100644 --- a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee @@ -35,8 +35,6 @@ describe "S3PersistorManagerTests", -> "logger-sharelatex": log:-> err:-> - "./Errors": @Errors = - NotFoundError: sinon.stub() @key = "my/key" @bucketName = "my-bucket" @error = "my errror" @@ -84,35 +82,31 @@ describe "S3PersistorManagerTests", -> describe "when the file doesn't exist", -> beforeEach -> - @fakeResponse = - statusCode: 404 + @bucketName = "mybucket" + @key = "somekey" + @fakeResponse.statusCode = 404 it "should produce a NotFoundError", (done) -> @S3PersistorManager.getFileStream @bucketName, @key, @opts, (err, stream)=> # empty callback expect(stream).to.equal null expect(err).to.not.equal null - expect(err instanceof @Errors.NotFoundError).to.equal true + expect(err.name == "NotFoundError").to.equal true done() it "should have bucket and key in the Error message", (done) -> @S3PersistorManager.getFileStream @bucketName, @key, @opts, (err, stream)=> # empty callback - error_message = @Errors.NotFoundError.lastCall.args[0] - expect(error_message).to.not.equal null - error_message.should.match(new RegExp(".*#{@bucketName}.*")) - error_message.should.match(new RegExp(".*#{@key}.*")) + expect(err.message).to.not.equal null + err.message.should.match(new RegExp(".*#{@bucketName}.*")) + err.message.should.match(new RegExp(".*#{@key}.*")) done() describe "when the S3 service produces an error", -> - beforeEach -> - @fakeResponse = - statusCode: 500 it "should produce an error", (done) -> @S3PersistorManager.getFileStream @bucketName, @key, @opts, (err, stream)=> # empty callback expect(stream).to.equal null expect(err).to.not.equal null expect(err instanceof Error).to.equal true - @Errors.NotFoundError.called.should.equal false done() describe "sendFile", -> From 6657b6c1dc9f6ac11fbd2c57fc7b9c951f4379d7 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Mon, 21 May 2018 11:13:52 +0100 Subject: [PATCH 232/555] make all needed directories in install deps --- services/filestore/install_deps.sh | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/services/filestore/install_deps.sh b/services/filestore/install_deps.sh index 52b6bde59d..2bf275b1cb 100755 --- a/services/filestore/install_deps.sh +++ b/services/filestore/install_deps.sh @@ -7,8 +7,9 @@ wget -q https://s3.amazonaws.com/sl-public-dev-assets/ghostscript-9.15.tar.gz -O cd /tmp tar -xvf /tmp/ghostscript-9.15.tar.gz cd /tmp/ghostscript-9.15 && ./configure && make && make install - npm rebuild +mkdir /app/user_files/ /app/uploads/ /app/template_files/ +chown -R node:node /app/user_files chown -R node:node /app/uploads - -echo gs --version \ No newline at end of file +chown -R node:node /app/template_files +ls -al /app \ No newline at end of file From 8efd562eb3a5f5af9df9dfed6a7cbd21226e7d1c Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Mon, 21 May 2018 13:40:03 +0100 Subject: [PATCH 233/555] fix broken acceptence tests, bad quotes in file path --- services/filestore/app/coffee/FileHandler.coffee | 2 ++ .../filestore/test/acceptance/coffee/SendingFileTest.coffee | 6 +++--- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/services/filestore/app/coffee/FileHandler.coffee b/services/filestore/app/coffee/FileHandler.coffee index 93cad984dd..442ac13cab 100644 --- a/services/filestore/app/coffee/FileHandler.coffee +++ b/services/filestore/app/coffee/FileHandler.coffee @@ -76,6 +76,8 @@ module.exports = LocalFileWriter.deleteFile originalFsPath, -> callback(err, destPath, originalFsPath) + logger.log opts:opts, "converting file depending on opts" + if opts.format? FileConverter.convert originalFsPath, opts.format, done else if opts.style == "thumbnail" diff --git a/services/filestore/test/acceptance/coffee/SendingFileTest.coffee b/services/filestore/test/acceptance/coffee/SendingFileTest.coffee index 1b20e6b44f..cee06e82a3 100644 --- a/services/filestore/test/acceptance/coffee/SendingFileTest.coffee +++ b/services/filestore/test/acceptance/coffee/SendingFileTest.coffee @@ -136,18 +136,18 @@ describe "Filestore", -> describe "getting the preview image", -> beforeEach -> - @fileUrl = @fileUrl + '?style=preview' + @previewFileUrl = "#{@fileUrl}?style=preview" it "should not time out", (done) -> @timeout(1000 * 20) - request.get @fileUrl, (err, response, body) => + request.get @previewFileUrl, (err, response, body) => expect(response).to.not.equal null done() it "should respond with image data", (done) -> # note: this test relies of the imagemagick conversion working @timeout(1000 * 20) - request.get @fileUrl, (err, response, body) => + request.get @previewFileUrl, (err, response, body) => expect(response.statusCode).to.equal 200 expect(body.length).to.be.greaterThan 400 done() From ee81a6af00ecf888f70170ae8cbc81030497a242 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Mon, 21 May 2018 15:00:32 +0100 Subject: [PATCH 234/555] updated mocha and added --exit back in --- services/filestore/npm-shrinkwrap.json | 2268 +++++++++++++++--------- services/filestore/package.json | 6 +- 2 files changed, 1433 insertions(+), 841 deletions(-) diff --git a/services/filestore/npm-shrinkwrap.json b/services/filestore/npm-shrinkwrap.json index a0bcfff9ff..6f8d9e3f17 100644 --- a/services/filestore/npm-shrinkwrap.json +++ b/services/filestore/npm-shrinkwrap.json @@ -5,1021 +5,1396 @@ "requires": true, "dependencies": { "abbrev": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", - "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==", - "dev": true + "version": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", + "integrity": "sha1-+PLIh60Qv2f2NPAFtph/7TF5qsg=" }, "accept-encoding": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/accept-encoding/-/accept-encoding-0.1.0.tgz", + "version": "https://registry.npmjs.org/accept-encoding/-/accept-encoding-0.1.0.tgz", "integrity": "sha1-XdiLjfcfHcLlzGuVZezOHjmaMz4=" }, + "ajv": { + "version": "https://registry.npmjs.org/ajv/-/ajv-5.5.2.tgz", + "integrity": "sha1-c7Xuyj+rZT49P5Qis0GtQiBdyWU=", + "dev": true, + "requires": { + "co": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", + "fast-deep-equal": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-1.1.0.tgz", + "fast-json-stable-stringify": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz", + "json-schema-traverse": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz" + } + }, "ansi-styles": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-1.0.0.tgz", + "version": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-1.0.0.tgz", "integrity": "sha1-yxAt8cVvUSPquLZ817mAJ6AnkXg=", "dev": true }, "argparse": { - "version": "0.1.16", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-0.1.16.tgz", + "version": "https://registry.npmjs.org/argparse/-/argparse-0.1.16.tgz", "integrity": "sha1-z9AeD7uj1srtBJ+9dY1A9lGW9Xw=", "dev": true, "requires": { - "underscore": "1.5.2", - "underscore.string": "2.4.0" + "underscore": "https://registry.npmjs.org/underscore/-/underscore-1.7.0.tgz", + "underscore.string": "https://registry.npmjs.org/underscore.string/-/underscore.string-2.4.0.tgz" }, "dependencies": { + "underscore": { + "version": "https://registry.npmjs.org/underscore/-/underscore-1.7.0.tgz", + "integrity": "sha1-a7rwh3UA02vjTsqlhODbn+8DUgk=", + "dev": true + }, "underscore.string": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/underscore.string/-/underscore.string-2.4.0.tgz", + "version": "https://registry.npmjs.org/underscore.string/-/underscore.string-2.4.0.tgz", "integrity": "sha1-jN2PusTi0uoefi6Al8QvRCKA+Fs=", "dev": true } } }, + "asn1": { + "version": "https://registry.npmjs.org/asn1/-/asn1-0.2.3.tgz", + "integrity": "sha1-2sh4dxPJlmhJ/IGAd36+nB3fO4Y=", + "dev": true + }, + "assert-plus": { + "version": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=", + "dev": true + }, "assertion-error": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", - "integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==" + "version": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", + "integrity": "sha1-5gtrDo8wG9l+U3UhW9pAbIURjAs=" }, "async": { - "version": "0.2.10", - "resolved": "https://registry.npmjs.org/async/-/async-0.2.10.tgz", + "version": "https://registry.npmjs.org/async/-/async-0.2.10.tgz", "integrity": "sha1-trvgsGdLnXGXCMo43owjfLUmw9E=" }, + "asynckit": { + "version": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=", + "dev": true + }, "aws-sdk": { - "version": "2.116.0", - "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.116.0.tgz", + "version": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.116.0.tgz", "integrity": "sha1-2UpsZnvuY++PvZRBzMIitTn3y+w=", "requires": { - "buffer": "4.9.1", - "crypto-browserify": "1.0.9", - "events": "1.1.1", - "jmespath": "0.15.0", - "querystring": "0.2.0", - "sax": "1.2.1", - "url": "0.10.3", - "uuid": "3.0.1", - "xml2js": "0.4.17", - "xmlbuilder": "4.2.1" + "buffer": "https://registry.npmjs.org/buffer/-/buffer-4.9.1.tgz", + "crypto-browserify": "https://registry.npmjs.org/crypto-browserify/-/crypto-browserify-1.0.9.tgz", + "events": "https://registry.npmjs.org/events/-/events-1.1.1.tgz", + "jmespath": "https://registry.npmjs.org/jmespath/-/jmespath-0.15.0.tgz", + "querystring": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", + "sax": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz", + "url": "https://registry.npmjs.org/url/-/url-0.10.3.tgz", + "uuid": "https://registry.npmjs.org/uuid/-/uuid-3.0.1.tgz", + "xml2js": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.17.tgz", + "xmlbuilder": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-4.2.1.tgz" }, "dependencies": { "uuid": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.0.1.tgz", + "version": "https://registry.npmjs.org/uuid/-/uuid-3.0.1.tgz", "integrity": "sha1-ZUS7ot/ajBzxfmKaOjBeK7H+5sE=" } } }, "aws-sign": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/aws-sign/-/aws-sign-0.2.0.tgz", + "version": "https://registry.npmjs.org/aws-sign/-/aws-sign-0.2.0.tgz", "integrity": "sha1-xVAThWyBlOyFSgy+yQqrWgTOOsU=" }, + "aws-sign2": { + "version": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", + "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=", + "dev": true + }, + "aws4": { + "version": "https://registry.npmjs.org/aws4/-/aws4-1.7.0.tgz", + "integrity": "sha1-1NDpudv8p3vwjusKikcVUP454ok=", + "dev": true + }, "balanced-match": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", + "version": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=" }, "base64-js": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.2.1.tgz", - "integrity": "sha512-dwVUVIXsBZXwTuwnXI9RK8sBmgq09NDHzyR9SAph9eqk76gKK2JSQmZARC2zRC81JC2QTtxD0ARU5qTS25gIGw==" + "version": "https://registry.npmjs.org/base64-js/-/base64-js-1.2.1.tgz", + "integrity": "sha1-qRlH2h9KUW6jjltOwOw3c2deCIY=" }, "batch": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/batch/-/batch-0.5.0.tgz", + "version": "https://registry.npmjs.org/batch/-/batch-0.5.0.tgz", "integrity": "sha1-/S4Fp6XWlrTbkxQBPihdj/NVfsM=" }, + "bcrypt-pbkdf": { + "version": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.1.tgz", + "integrity": "sha1-Y7xdy2EzG5K8Bf1SiVPDNGKgb40=", + "dev": true, + "optional": true, + "requires": { + "tweetnacl": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz" + } + }, "best-encoding": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/best-encoding/-/best-encoding-0.1.1.tgz", + "version": "https://registry.npmjs.org/best-encoding/-/best-encoding-0.1.1.tgz", "integrity": "sha1-GVIT2rysBFgYuAe3ox+Dn63cl04=", "requires": { - "accept-encoding": "0.1.0" + "accept-encoding": "https://registry.npmjs.org/accept-encoding/-/accept-encoding-0.1.0.tgz" } }, "bl": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/bl/-/bl-0.7.0.tgz", + "version": "https://registry.npmjs.org/bl/-/bl-0.7.0.tgz", "integrity": "sha1-P7BnBgKsKHjrdw3CA58YNr5irls=", "requires": { - "readable-stream": "1.0.34" + "readable-stream": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz" }, "dependencies": { "isarray": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "version": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" }, "readable-stream": { - "version": "1.0.34", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz", + "version": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz", "integrity": "sha1-Elgg40vIQtLyqq+v5MKRbuMsFXw=", "requires": { - "core-util-is": "1.0.2", - "inherits": "2.0.3", - "isarray": "0.0.1", - "string_decoder": "0.10.31" + "core-util-is": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "inherits": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "isarray": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "string_decoder": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" + } + } + } + }, + "body-parser": { + "version": "https://registry.npmjs.org/body-parser/-/body-parser-1.18.3.tgz", + "integrity": "sha1-WykhmP/dVTs6DyDe0FkrlWlVyLQ=", + "requires": { + "bytes": "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz", + "content-type": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz", + "debug": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "depd": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", + "http-errors": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz", + "iconv-lite": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.23.tgz", + "on-finished": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", + "qs": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", + "raw-body": "https://registry.npmjs.org/raw-body/-/raw-body-2.3.3.tgz", + "type-is": "https://registry.npmjs.org/type-is/-/type-is-1.6.16.tgz" + }, + "dependencies": { + "bytes": { + "version": "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz", + "integrity": "sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg=" + }, + "debug": { + "version": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha1-XRKFFd8TT/Mn6QpMk/Tgd6U2NB8=", + "requires": { + "ms": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz" + } + }, + "iconv-lite": { + "version": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.23.tgz", + "integrity": "sha1-KXhx9jvlB63Pv8pxXQzQ7thOmmM=", + "requires": { + "safer-buffer": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz" + } + }, + "qs": { + "version": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", + "integrity": "sha1-yzroBuh0BERYTvFUzo7pjUA/PjY=" + }, + "raw-body": { + "version": "https://registry.npmjs.org/raw-body/-/raw-body-2.3.3.tgz", + "integrity": "sha1-GzJOzmtXBuFThVvBFIxlu39uoMM=", + "requires": { + "bytes": "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz", + "http-errors": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz", + "iconv-lite": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.23.tgz", + "unpipe": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz" } } } }, "boom": { - "version": "0.3.8", - "resolved": "https://registry.npmjs.org/boom/-/boom-0.3.8.tgz", + "version": "https://registry.npmjs.org/boom/-/boom-0.3.8.tgz", "integrity": "sha1-yM2wQUNZEnQWKMBE7Mcy0dF8Ceo=", "requires": { - "hoek": "0.7.6" + "hoek": "https://registry.npmjs.org/hoek/-/hoek-0.7.6.tgz" } }, "brace-expansion": { - "version": "1.1.8", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.8.tgz", + "version": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.8.tgz", "integrity": "sha1-wHshHHyVLsH479Uad+8NHTmQopI=", "requires": { - "balanced-match": "1.0.0", - "concat-map": "0.0.1" + "balanced-match": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", + "concat-map": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" } }, "broadway": { - "version": "0.3.6", - "resolved": "https://registry.npmjs.org/broadway/-/broadway-0.3.6.tgz", + "version": "https://registry.npmjs.org/broadway/-/broadway-0.3.6.tgz", "integrity": "sha1-fb7waLlUt5B5Jf1USWO1eKkCuno=", "dev": true, "requires": { - "cliff": "0.1.9", - "eventemitter2": "0.4.14", - "nconf": "0.6.9", - "utile": "0.2.1", - "winston": "0.8.0" + "cliff": "https://registry.npmjs.org/cliff/-/cliff-0.1.9.tgz", + "eventemitter2": "https://registry.npmjs.org/eventemitter2/-/eventemitter2-0.4.14.tgz", + "nconf": "https://registry.npmjs.org/nconf/-/nconf-0.6.9.tgz", + "utile": "https://registry.npmjs.org/utile/-/utile-0.2.1.tgz", + "winston": "https://registry.npmjs.org/winston/-/winston-0.8.0.tgz" }, "dependencies": { "cliff": { - "version": "0.1.9", - "resolved": "https://registry.npmjs.org/cliff/-/cliff-0.1.9.tgz", + "version": "https://registry.npmjs.org/cliff/-/cliff-0.1.9.tgz", "integrity": "sha1-ohHgnGo947oa8n0EnTASUNGIErw=", "dev": true, "requires": { - "colors": "0.6.2", - "eyes": "0.1.8", - "winston": "0.8.0" + "colors": "https://registry.npmjs.org/colors/-/colors-0.6.2.tgz", + "eyes": "https://registry.npmjs.org/eyes/-/eyes-0.1.8.tgz", + "winston": "https://registry.npmjs.org/winston/-/winston-0.8.0.tgz" } }, "winston": { - "version": "0.8.0", - "resolved": "https://registry.npmjs.org/winston/-/winston-0.8.0.tgz", + "version": "https://registry.npmjs.org/winston/-/winston-0.8.0.tgz", "integrity": "sha1-YdCDD6aZcGISIGsKK1ymmpMENmg=", "dev": true, "requires": { - "async": "0.2.10", - "colors": "0.6.2", - "cycle": "1.0.3", - "eyes": "0.1.8", - "pkginfo": "0.3.1", - "stack-trace": "0.0.7" + "async": "https://registry.npmjs.org/async/-/async-0.2.10.tgz", + "colors": "https://registry.npmjs.org/colors/-/colors-0.6.2.tgz", + "cycle": "https://registry.npmjs.org/cycle/-/cycle-1.0.3.tgz", + "eyes": "https://registry.npmjs.org/eyes/-/eyes-0.1.8.tgz", + "pkginfo": "https://registry.npmjs.org/pkginfo/-/pkginfo-0.3.1.tgz", + "stack-trace": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.7.tgz" } } } }, + "browser-stdout": { + "version": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", + "integrity": "sha1-uqVZ7hTO1zRSIputcyZGfGH6vWA=" + }, "buffer": { - "version": "4.9.1", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.1.tgz", + "version": "https://registry.npmjs.org/buffer/-/buffer-4.9.1.tgz", "integrity": "sha1-bRu2AbB6TvztlwlBMgkwJ8lbwpg=", "requires": { - "base64-js": "1.2.1", - "ieee754": "1.1.8", - "isarray": "1.0.0" + "base64-js": "https://registry.npmjs.org/base64-js/-/base64-js-1.2.1.tgz", + "ieee754": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.8.tgz", + "isarray": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz" } }, "buffer-crc32": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.1.tgz", + "version": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.1.tgz", "integrity": "sha1-vj5TgvwCttYySVasGvmKqYsIU0w=" }, "bunyan": { - "version": "1.3.6", - "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.3.6.tgz", + "version": "https://registry.npmjs.org/bunyan/-/bunyan-1.3.6.tgz", "integrity": "sha1-xDM3p7Dc5mPYIjNnurBRlbkLR2o=", "requires": { - "dtrace-provider": "0.4.0", - "mv": "2.1.1", - "safe-json-stringify": "1.0.4" + "dtrace-provider": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.4.0.tgz", + "mv": "https://registry.npmjs.org/mv/-/mv-2.1.1.tgz", + "safe-json-stringify": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.0.4.tgz" } }, "bytes": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/bytes/-/bytes-0.2.1.tgz", + "version": "https://registry.npmjs.org/bytes/-/bytes-0.2.1.tgz", "integrity": "sha1-VVsIq8sGP4l1kFMCUj5M1P/f3zE=" }, "caseless": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.3.0.tgz", + "version": "https://registry.npmjs.org/caseless/-/caseless-0.3.0.tgz", "integrity": "sha1-U06XkWOH07cGtk/eu6xGQ4RQk08=" }, "chai": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/chai/-/chai-4.1.2.tgz", + "version": "https://registry.npmjs.org/chai/-/chai-4.1.2.tgz", "integrity": "sha1-D2RYS6ZC8PKs4oBiefTwbKI61zw=", "requires": { - "assertion-error": "1.1.0", - "check-error": "1.0.2", - "deep-eql": "3.0.1", - "get-func-name": "2.0.0", - "pathval": "1.1.0", - "type-detect": "4.0.5" + "assertion-error": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", + "check-error": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz", + "deep-eql": "https://registry.npmjs.org/deep-eql/-/deep-eql-3.0.1.tgz", + "get-func-name": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz", + "pathval": "https://registry.npmjs.org/pathval/-/pathval-1.1.0.tgz", + "type-detect": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.5.tgz" } }, "chalk": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-0.4.0.tgz", + "version": "https://registry.npmjs.org/chalk/-/chalk-0.4.0.tgz", "integrity": "sha1-UZmj3c0MHv4jvAjBsCewYXbgxk8=", "dev": true, "requires": { - "ansi-styles": "1.0.0", - "has-color": "0.1.7", - "strip-ansi": "0.1.1" + "ansi-styles": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-1.0.0.tgz", + "has-color": "https://registry.npmjs.org/has-color/-/has-color-0.1.7.tgz", + "strip-ansi": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-0.1.1.tgz" } }, "check-error": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz", + "version": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz", "integrity": "sha1-V00xLt2Iu13YkS6Sht1sCu1KrII=" }, "cliff": { - "version": "0.1.10", - "resolved": "https://registry.npmjs.org/cliff/-/cliff-0.1.10.tgz", + "version": "https://registry.npmjs.org/cliff/-/cliff-0.1.10.tgz", "integrity": "sha1-U74z6p9ZvshWCe4wCsQgdgPlIBM=", "dev": true, "requires": { - "colors": "1.0.3", - "eyes": "0.1.8", - "winston": "0.8.3" + "colors": "https://registry.npmjs.org/colors/-/colors-1.0.3.tgz", + "eyes": "https://registry.npmjs.org/eyes/-/eyes-0.1.8.tgz", + "winston": "https://registry.npmjs.org/winston/-/winston-0.8.3.tgz" }, "dependencies": { "colors": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/colors/-/colors-1.0.3.tgz", + "version": "https://registry.npmjs.org/colors/-/colors-1.0.3.tgz", "integrity": "sha1-BDP0TYCWgP3rYO0mDxsMJi6CpAs=", "dev": true } } }, + "co": { + "version": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", + "integrity": "sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ=", + "dev": true + }, "coffee-script": { - "version": "1.7.1", - "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.7.1.tgz", + "version": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.7.1.tgz", "integrity": "sha1-YplqhheAx15tUGnROCJyO3NAS/w=", "requires": { - "mkdirp": "0.3.5" + "mkdirp": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.5.tgz" } }, "colors": { - "version": "0.6.2", - "resolved": "https://registry.npmjs.org/colors/-/colors-0.6.2.tgz", + "version": "https://registry.npmjs.org/colors/-/colors-0.6.2.tgz", "integrity": "sha1-JCP+ZnisDF2uiFLl0OW+CMmXq8w=", "dev": true }, "combined-stream": { - "version": "0.0.7", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-0.0.7.tgz", + "version": "https://registry.npmjs.org/combined-stream/-/combined-stream-0.0.7.tgz", "integrity": "sha1-ATfmV7qlp1QcV6w3rF/AfXO03B8=", "requires": { - "delayed-stream": "0.0.5" + "delayed-stream": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz" } }, "commander": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/commander/-/commander-1.3.2.tgz", + "version": "https://registry.npmjs.org/commander/-/commander-1.3.2.tgz", "integrity": "sha1-io8w7GcKb91kr1LxkUuQfXnq1bU=", "requires": { - "keypress": "0.1.0" + "keypress": "https://registry.npmjs.org/keypress/-/keypress-0.1.0.tgz" } }, "concat-map": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "version": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" }, "configstore": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/configstore/-/configstore-0.3.2.tgz", + "version": "https://registry.npmjs.org/configstore/-/configstore-0.3.2.tgz", "integrity": "sha1-JeTBbDdoq/dcWmW8YXYfSVBVtFk=", "dev": true, "requires": { - "graceful-fs": "4.1.11", - "js-yaml": "3.10.0", - "mkdirp": "0.3.5", - "object-assign": "2.1.1", - "osenv": "0.1.4", - "user-home": "1.1.1", - "uuid": "2.0.3", - "xdg-basedir": "1.0.1" + "graceful-fs": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-3.0.11.tgz", + "js-yaml": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.10.0.tgz", + "mkdirp": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", + "object-assign": "https://registry.npmjs.org/object-assign/-/object-assign-2.1.1.tgz", + "osenv": "https://registry.npmjs.org/osenv/-/osenv-0.1.4.tgz", + "user-home": "https://registry.npmjs.org/user-home/-/user-home-1.1.1.tgz", + "uuid": "https://registry.npmjs.org/uuid/-/uuid-2.0.3.tgz", + "xdg-basedir": "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-1.0.1.tgz" }, "dependencies": { "argparse": { - "version": "1.0.9", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.9.tgz", + "version": "https://registry.npmjs.org/argparse/-/argparse-1.0.9.tgz", "integrity": "sha1-c9g7wmP4bpf4zE9rrhsOkKfSLIY=", "dev": true, "requires": { - "sprintf-js": "1.0.3" + "sprintf-js": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz" } }, "esprima": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.0.tgz", - "integrity": "sha512-oftTcaMu/EGrEIu904mWteKIv8vMuOgGYo7EhVJJN00R/EED9DCua/xxHRdYnKtcECzVg7xOWhflvJMnqcFZjw==", + "version": "https://registry.npmjs.org/esprima/-/esprima-4.0.0.tgz", + "integrity": "sha1-RJnt3NERDgshi6zy+n9/WfVcqAQ=", "dev": true }, - "js-yaml": { - "version": "3.10.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.10.0.tgz", - "integrity": "sha512-O2v52ffjLa9VeM43J4XocZE//WT9N0IiwDa3KSHH7Tu8CtH+1qM8SIZvnsTh6v+4yFy5KUY3BHUVwjpfAWsjIA==", + "graceful-fs": { + "version": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-3.0.11.tgz", + "integrity": "sha1-dhPHeKGv6mLyXGMKCG1/Osu92Bg=", "dev": true, "requires": { - "argparse": "1.0.9", - "esprima": "4.0.0" + "natives": "https://registry.npmjs.org/natives/-/natives-1.1.3.tgz" + } + }, + "js-yaml": { + "version": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.10.0.tgz", + "integrity": "sha1-LnhEFka9RoLpY/IrbpKCPDCcYtw=", + "dev": true, + "requires": { + "argparse": "https://registry.npmjs.org/argparse/-/argparse-1.0.9.tgz", + "esprima": "https://registry.npmjs.org/esprima/-/esprima-4.0.0.tgz" + } + }, + "mkdirp": { + "version": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", + "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", + "dev": true, + "requires": { + "minimist": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz" } } } }, "connect": { - "version": "2.12.0", - "resolved": "https://registry.npmjs.org/connect/-/connect-2.12.0.tgz", + "version": "https://registry.npmjs.org/connect/-/connect-2.12.0.tgz", "integrity": "sha1-Mdj6DcrN8ZCNgivSkjvootKn7Zo=", "requires": { - "batch": "0.5.0", - "buffer-crc32": "0.2.1", - "bytes": "0.2.1", - "cookie": "0.1.0", - "cookie-signature": "1.0.1", - "debug": "0.8.1", - "fresh": "0.2.0", - "methods": "0.1.0", - "multiparty": "2.2.0", - "negotiator": "0.3.0", - "pause": "0.0.1", - "qs": "0.6.6", - "raw-body": "1.1.2", - "send": "0.1.4", - "uid2": "0.0.3" + "batch": "https://registry.npmjs.org/batch/-/batch-0.5.0.tgz", + "buffer-crc32": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.1.tgz", + "bytes": "https://registry.npmjs.org/bytes/-/bytes-0.2.1.tgz", + "cookie": "https://registry.npmjs.org/cookie/-/cookie-0.1.0.tgz", + "cookie-signature": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.1.tgz", + "debug": "https://registry.npmjs.org/debug/-/debug-0.8.1.tgz", + "fresh": "https://registry.npmjs.org/fresh/-/fresh-0.2.0.tgz", + "methods": "https://registry.npmjs.org/methods/-/methods-0.1.0.tgz", + "multiparty": "https://registry.npmjs.org/multiparty/-/multiparty-2.2.0.tgz", + "negotiator": "https://registry.npmjs.org/negotiator/-/negotiator-0.3.0.tgz", + "pause": "https://registry.npmjs.org/pause/-/pause-0.0.1.tgz", + "qs": "https://registry.npmjs.org/qs/-/qs-0.6.6.tgz", + "raw-body": "https://registry.npmjs.org/raw-body/-/raw-body-1.1.2.tgz", + "send": "https://registry.npmjs.org/send/-/send-0.1.4.tgz", + "uid2": "https://registry.npmjs.org/uid2/-/uid2-0.0.3.tgz" } }, + "content-type": { + "version": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz", + "integrity": "sha1-4TjMdeBAxyexlm/l5fjJruJW/js=" + }, "cookie": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.1.0.tgz", + "version": "https://registry.npmjs.org/cookie/-/cookie-0.1.0.tgz", "integrity": "sha1-kOtGndzpBchm3mh+/EMTHYgB+dA=" }, "cookie-jar": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/cookie-jar/-/cookie-jar-0.2.0.tgz", + "version": "https://registry.npmjs.org/cookie-jar/-/cookie-jar-0.2.0.tgz", "integrity": "sha1-ZOzAasl423leS1KQy+SLo3gUAPo=" }, "cookie-signature": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.1.tgz", + "version": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.1.tgz", "integrity": "sha1-ROByFIrwHm6OJK+/EmkNaK5pjss=" }, "core-util-is": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "version": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" }, "cryptiles": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-0.1.3.tgz", + "version": "https://registry.npmjs.org/cryptiles/-/cryptiles-0.1.3.tgz", "integrity": "sha1-GlVnNPBtJLo0hirpy55wmjr7/xw=", "requires": { - "boom": "0.3.8" + "boom": "https://registry.npmjs.org/boom/-/boom-0.3.8.tgz" } }, "crypto-browserify": { - "version": "1.0.9", - "resolved": "https://registry.npmjs.org/crypto-browserify/-/crypto-browserify-1.0.9.tgz", + "version": "https://registry.npmjs.org/crypto-browserify/-/crypto-browserify-1.0.9.tgz", "integrity": "sha1-zFRJaF37hesRyYKKzHy4erW7/MA=" }, "cycle": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/cycle/-/cycle-1.0.3.tgz", + "version": "https://registry.npmjs.org/cycle/-/cycle-1.0.3.tgz", "integrity": "sha1-IegLK+hYD5i0aPN5QwZisEbDStI=", "dev": true }, + "dashdash": { + "version": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", + "integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=", + "dev": true, + "requires": { + "assert-plus": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz" + } + }, "dateformat": { - "version": "1.0.2-1.2.3", - "resolved": "https://registry.npmjs.org/dateformat/-/dateformat-1.0.2-1.2.3.tgz", + "version": "https://registry.npmjs.org/dateformat/-/dateformat-1.0.2-1.2.3.tgz", "integrity": "sha1-sCIMAt6YYXQztyhRz0fePfLNvuk=", "dev": true }, "debug": { - "version": "0.8.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-0.8.1.tgz", + "version": "https://registry.npmjs.org/debug/-/debug-0.8.1.tgz", "integrity": "sha1-IP9NJvXkIstoobrLu2EDmtjBwTA=" }, "deep-eql": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-3.0.1.tgz", - "integrity": "sha512-+QeIQyN5ZuO+3Uk5DYh6/1eKO0m0YmJFGNmFHGACpf1ClL1nmlV/p4gNgbl2pJGxgXb4faqo6UE+M5ACEMyVcw==", + "version": "https://registry.npmjs.org/deep-eql/-/deep-eql-3.0.1.tgz", + "integrity": "sha1-38lARACtHI/gI+faHfHBR8S0RN8=", "requires": { - "type-detect": "4.0.5" + "type-detect": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.5.tgz" } }, "deep-equal": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/deep-equal/-/deep-equal-1.0.1.tgz", + "version": "https://registry.npmjs.org/deep-equal/-/deep-equal-1.0.1.tgz", "integrity": "sha1-9dJgKStmDghO/0zbyfCK0yR0SLU=", "dev": true }, "delayed-stream": { - "version": "0.0.5", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz", + "version": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz", "integrity": "sha1-1LH0OpPoKW3+AmlPRoC8N6MTxz8=" }, + "depd": { + "version": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", + "integrity": "sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=" + }, "diff": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/diff/-/diff-1.0.7.tgz", - "integrity": "sha1-JLuwAcSn1VIhaefKvbLCgU7ZHPQ=", - "dev": true + "version": "https://registry.npmjs.org/diff/-/diff-1.0.7.tgz", + "integrity": "sha1-JLuwAcSn1VIhaefKvbLCgU7ZHPQ=" }, "director": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/director/-/director-1.2.7.tgz", + "version": "https://registry.npmjs.org/director/-/director-1.2.7.tgz", "integrity": "sha1-v9N0EHX9f7GlsuE2WMX0vsd3NvM=", "dev": true }, "dtrace-provider": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.4.0.tgz", + "version": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.4.0.tgz", "integrity": "sha1-C2e8HMd+eb+IuHrSBmT0p1POPyY=", "optional": true, "requires": { - "nan": "1.5.3" + "nan": "https://registry.npmjs.org/nan/-/nan-1.5.3.tgz" } }, + "ecc-jsbn": { + "version": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz", + "integrity": "sha1-D8c6ntXw1Tw4GTOYUj735UN3dQU=", + "dev": true, + "optional": true, + "requires": { + "jsbn": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz" + } + }, + "ee-first": { + "version": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=" + }, + "escape-string-regexp": { + "version": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=" + }, "esprima": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/esprima/-/esprima-1.0.4.tgz", + "version": "https://registry.npmjs.org/esprima/-/esprima-1.0.4.tgz", "integrity": "sha1-n1V+CPw7TSbs6d00+Pv0drYlha0=", "dev": true }, "event-stream": { - "version": "0.5.3", - "resolved": "https://registry.npmjs.org/event-stream/-/event-stream-0.5.3.tgz", + "version": "https://registry.npmjs.org/event-stream/-/event-stream-0.5.3.tgz", "integrity": "sha1-t3uTCfcQet3+q2PwwOr9jbC9jBw=", "dev": true, "requires": { - "optimist": "0.2.8" + "optimist": "https://registry.npmjs.org/optimist/-/optimist-0.2.8.tgz" }, "dependencies": { "optimist": { - "version": "0.2.8", - "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.2.8.tgz", + "version": "https://registry.npmjs.org/optimist/-/optimist-0.2.8.tgz", "integrity": "sha1-6YGrfiaLRXlIWTtVZ0wJmoFcrDE=", "dev": true, "requires": { - "wordwrap": "0.0.3" + "wordwrap": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz" } } } }, "eventemitter2": { - "version": "0.4.14", - "resolved": "https://registry.npmjs.org/eventemitter2/-/eventemitter2-0.4.14.tgz", + "version": "https://registry.npmjs.org/eventemitter2/-/eventemitter2-0.4.14.tgz", "integrity": "sha1-j2G3XN4BKy6esoTUVFWDtWQ7Yas=", "dev": true }, "events": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/events/-/events-1.1.1.tgz", + "version": "https://registry.npmjs.org/events/-/events-1.1.1.tgz", "integrity": "sha1-nr23Y1rQmccNzEwqH1AEKI6L2SQ=" }, "express": { - "version": "3.4.8", - "resolved": "https://registry.npmjs.org/express/-/express-3.4.8.tgz", + "version": "https://registry.npmjs.org/express/-/express-3.4.8.tgz", "integrity": "sha1-qnqJht4HBTM39Lxe2aZFPZzI4uE=", "requires": { - "buffer-crc32": "0.2.1", - "commander": "1.3.2", - "connect": "2.12.0", - "cookie": "0.1.0", - "cookie-signature": "1.0.1", - "debug": "0.8.1", - "fresh": "0.2.0", - "merge-descriptors": "0.0.1", - "methods": "0.1.0", - "mkdirp": "0.3.5", - "range-parser": "0.0.4", - "send": "0.1.4" + "buffer-crc32": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.1.tgz", + "commander": "https://registry.npmjs.org/commander/-/commander-1.3.2.tgz", + "connect": "https://registry.npmjs.org/connect/-/connect-2.12.0.tgz", + "cookie": "https://registry.npmjs.org/cookie/-/cookie-0.1.0.tgz", + "cookie-signature": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.1.tgz", + "debug": "https://registry.npmjs.org/debug/-/debug-0.8.1.tgz", + "fresh": "https://registry.npmjs.org/fresh/-/fresh-0.2.0.tgz", + "merge-descriptors": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-0.0.1.tgz", + "methods": "https://registry.npmjs.org/methods/-/methods-0.1.0.tgz", + "mkdirp": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.5.tgz", + "range-parser": "https://registry.npmjs.org/range-parser/-/range-parser-0.0.4.tgz", + "send": "https://registry.npmjs.org/send/-/send-0.1.4.tgz" }, "dependencies": { "range-parser": { - "version": "0.0.4", - "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-0.0.4.tgz", + "version": "https://registry.npmjs.org/range-parser/-/range-parser-0.0.4.tgz", "integrity": "sha1-wEJ//vUcEKy6B4KkbJYC50T/Ygs=" } } }, + "extend": { + "version": "https://registry.npmjs.org/extend/-/extend-3.0.1.tgz", + "integrity": "sha1-p1Xqe8Gt/MWjHOfnYtuq3F5jZEQ=", + "dev": true + }, + "extsprintf": { + "version": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", + "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=", + "dev": true + }, "eyes": { - "version": "0.1.8", - "resolved": "https://registry.npmjs.org/eyes/-/eyes-0.1.8.tgz", + "version": "https://registry.npmjs.org/eyes/-/eyes-0.1.8.tgz", "integrity": "sha1-Ys8SAjTGg3hdkCNIqADvPgzCC8A=", "dev": true }, + "fast-deep-equal": { + "version": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-1.1.0.tgz", + "integrity": "sha1-wFNHeBfIa1HaqFPIHgWbcz0CNhQ=", + "dev": true + }, + "fast-json-stable-stringify": { + "version": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz", + "integrity": "sha1-1RQsDK7msRifh9OnYREGT4bIu/I=", + "dev": true + }, "faye-websocket": { - "version": "0.4.4", - "resolved": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.4.4.tgz", + "version": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.4.4.tgz", "integrity": "sha1-wUxbO/FNdBf/v9mQwKdJXNnzN7w=", "dev": true }, "findup-sync": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/findup-sync/-/findup-sync-0.1.3.tgz", + "version": "https://registry.npmjs.org/findup-sync/-/findup-sync-0.1.3.tgz", "integrity": "sha1-fz56l7gjksZTvwZYm9hRkOk8NoM=", "dev": true, "requires": { - "glob": "6.0.4", - "lodash": "4.17.4" + "glob": "https://registry.npmjs.org/glob/-/glob-3.2.11.tgz", + "lodash": "https://registry.npmjs.org/lodash/-/lodash-2.4.2.tgz" + }, + "dependencies": { + "glob": { + "version": "https://registry.npmjs.org/glob/-/glob-3.2.11.tgz", + "integrity": "sha1-Spc/Y1uRkPcV0QmH1cAP0oFevj0=", + "dev": true, + "requires": { + "inherits": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "minimatch": "https://registry.npmjs.org/minimatch/-/minimatch-0.3.0.tgz" + } + }, + "lodash": { + "version": "https://registry.npmjs.org/lodash/-/lodash-2.4.2.tgz", + "integrity": "sha1-+t2DS5aDBz2hebPq5tnA0VBT9z4=", + "dev": true + }, + "minimatch": { + "version": "https://registry.npmjs.org/minimatch/-/minimatch-0.3.0.tgz", + "integrity": "sha1-J12O2qxPG7MyZHIInnlJyDlGmd0=", + "dev": true, + "requires": { + "lru-cache": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.7.3.tgz", + "sigmund": "https://registry.npmjs.org/sigmund/-/sigmund-1.0.1.tgz" + }, + "dependencies": { + "lru-cache": { + "version": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.7.3.tgz", + "integrity": "sha1-bUUk6LlV+V1PW1iFHOId1y+06VI=", + "dev": true + }, + "sigmund": { + "version": "https://registry.npmjs.org/sigmund/-/sigmund-1.0.1.tgz", + "integrity": "sha1-P/IfGYytIXX587eBhT/ZTQ0ZtZA=", + "dev": true + } + } + } } }, "flatiron": { - "version": "0.4.3", - "resolved": "https://registry.npmjs.org/flatiron/-/flatiron-0.4.3.tgz", + "version": "https://registry.npmjs.org/flatiron/-/flatiron-0.4.3.tgz", "integrity": "sha1-JIz3mj2n19w3nioRySonGcu1QPY=", "dev": true, "requires": { - "broadway": "0.3.6", - "director": "1.2.7", - "optimist": "0.6.0", - "prompt": "0.2.14" + "broadway": "https://registry.npmjs.org/broadway/-/broadway-0.3.6.tgz", + "director": "https://registry.npmjs.org/director/-/director-1.2.7.tgz", + "optimist": "https://registry.npmjs.org/optimist/-/optimist-0.6.0.tgz", + "prompt": "https://registry.npmjs.org/prompt/-/prompt-0.2.14.tgz" }, "dependencies": { "optimist": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.0.tgz", + "version": "https://registry.npmjs.org/optimist/-/optimist-0.6.0.tgz", "integrity": "sha1-aUJIJvNAX3nxQub8PZrljU27kgA=", "dev": true, "requires": { - "minimist": "0.0.8", - "wordwrap": "0.0.3" + "minimist": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", + "wordwrap": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz" } } } }, "forever": { - "version": "0.14.2", - "resolved": "https://registry.npmjs.org/forever/-/forever-0.14.2.tgz", + "version": "https://registry.npmjs.org/forever/-/forever-0.14.2.tgz", "integrity": "sha1-6Tsr2UxXBavBmxXlTDEz1puinGs=", "dev": true, "requires": { - "cliff": "0.1.10", - "colors": "0.6.2", - "flatiron": "0.4.3", - "forever-monitor": "1.5.2", - "nconf": "0.6.9", - "nssocket": "0.5.3", - "optimist": "0.6.1", - "timespan": "2.3.0", - "utile": "0.2.1", - "winston": "0.8.3" + "cliff": "https://registry.npmjs.org/cliff/-/cliff-0.1.10.tgz", + "colors": "https://registry.npmjs.org/colors/-/colors-0.6.2.tgz", + "flatiron": "https://registry.npmjs.org/flatiron/-/flatiron-0.4.3.tgz", + "forever-monitor": "https://registry.npmjs.org/forever-monitor/-/forever-monitor-1.5.2.tgz", + "nconf": "https://registry.npmjs.org/nconf/-/nconf-0.6.9.tgz", + "nssocket": "https://registry.npmjs.org/nssocket/-/nssocket-0.5.3.tgz", + "optimist": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz", + "timespan": "https://registry.npmjs.org/timespan/-/timespan-2.3.0.tgz", + "utile": "https://registry.npmjs.org/utile/-/utile-0.2.1.tgz", + "winston": "https://registry.npmjs.org/winston/-/winston-0.8.3.tgz" } }, "forever-agent": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.2.0.tgz", + "version": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.2.0.tgz", "integrity": "sha1-4cJcetROCcOPIzh2x2/MJP+EOx8=" }, "forever-monitor": { - "version": "1.5.2", - "resolved": "https://registry.npmjs.org/forever-monitor/-/forever-monitor-1.5.2.tgz", + "version": "https://registry.npmjs.org/forever-monitor/-/forever-monitor-1.5.2.tgz", "integrity": "sha1-J5OI36k7CFNj1rKKgj7wpq7rNdc=", "dev": true, "requires": { - "broadway": "0.3.6", - "minimatch": "3.0.4", - "ps-tree": "0.0.3", - "utile": "0.2.1", - "watch": "0.13.0" + "broadway": "https://registry.npmjs.org/broadway/-/broadway-0.3.6.tgz", + "minimatch": "https://registry.npmjs.org/minimatch/-/minimatch-1.0.0.tgz", + "ps-tree": "https://registry.npmjs.org/ps-tree/-/ps-tree-0.0.3.tgz", + "utile": "https://registry.npmjs.org/utile/-/utile-0.2.1.tgz", + "watch": "https://registry.npmjs.org/watch/-/watch-0.13.0.tgz" + }, + "dependencies": { + "minimatch": { + "version": "https://registry.npmjs.org/minimatch/-/minimatch-1.0.0.tgz", + "integrity": "sha1-4N0hILSeG3JM6NcUxSCCKpQ4V20=", + "dev": true, + "requires": { + "lru-cache": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.7.3.tgz", + "sigmund": "https://registry.npmjs.org/sigmund/-/sigmund-1.0.1.tgz" + }, + "dependencies": { + "lru-cache": { + "version": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.7.3.tgz", + "integrity": "sha1-bUUk6LlV+V1PW1iFHOId1y+06VI=", + "dev": true + }, + "sigmund": { + "version": "https://registry.npmjs.org/sigmund/-/sigmund-1.0.1.tgz", + "integrity": "sha1-P/IfGYytIXX587eBhT/ZTQ0ZtZA=", + "dev": true + } + } + } } }, "form-data": { - "version": "0.0.10", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-0.0.10.tgz", + "version": "https://registry.npmjs.org/form-data/-/form-data-0.0.10.tgz", "integrity": "sha1-2zRaU3jYau6x7V1VO4aawZLS9e0=", "requires": { - "async": "0.2.10", - "combined-stream": "0.0.7", - "mime": "1.2.11" + "async": "https://registry.npmjs.org/async/-/async-0.2.10.tgz", + "combined-stream": "https://registry.npmjs.org/combined-stream/-/combined-stream-0.0.7.tgz", + "mime": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz" } }, "formatio": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/formatio/-/formatio-1.2.0.tgz", + "version": "https://registry.npmjs.org/formatio/-/formatio-1.2.0.tgz", "integrity": "sha1-87IWfZBoxGmKjVH092CjmlTYGOs=", "requires": { - "samsam": "1.3.0" + "samsam": "https://registry.npmjs.org/samsam/-/samsam-1.3.0.tgz" } }, "fresh": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.2.0.tgz", + "version": "https://registry.npmjs.org/fresh/-/fresh-0.2.0.tgz", "integrity": "sha1-v9lALPPfEsSkwxDHn5mj3eE9NKc=" }, "fs-extra": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-1.0.0.tgz", + "version": "https://registry.npmjs.org/fs-extra/-/fs-extra-1.0.0.tgz", "integrity": "sha1-zTzl9+fLYUWIP8rjGR6Yd/hYeVA=", "requires": { - "graceful-fs": "4.1.11", - "jsonfile": "2.4.0", - "klaw": "1.3.1" + "graceful-fs": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.11.tgz", + "jsonfile": "https://registry.npmjs.org/jsonfile/-/jsonfile-2.4.0.tgz", + "klaw": "https://registry.npmjs.org/klaw/-/klaw-1.3.1.tgz" } }, + "fs.realpath": { + "version": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" + }, "gaze": { - "version": "0.4.3", - "resolved": "https://registry.npmjs.org/gaze/-/gaze-0.4.3.tgz", + "version": "https://registry.npmjs.org/gaze/-/gaze-0.4.3.tgz", "integrity": "sha1-5Tj0/15P5kj0c6l+HrslPS3hJ7U=", "dev": true, "requires": { - "globule": "0.1.0" + "globule": "https://registry.npmjs.org/globule/-/globule-0.1.0.tgz" } }, "get-func-name": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz", + "version": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz", "integrity": "sha1-6td0q+5y4gQJQzoGY2YCPdaIekE=" }, + "getpass": { + "version": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", + "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=", + "dev": true, + "requires": { + "assert-plus": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz" + } + }, "gettemporaryfilepath": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/gettemporaryfilepath/-/gettemporaryfilepath-0.0.1.tgz", + "version": "https://registry.npmjs.org/gettemporaryfilepath/-/gettemporaryfilepath-0.0.1.tgz", "integrity": "sha1-uKLHAUu1zUFTTpg7XKFgo3RwhGk=" }, "glob": { - "version": "6.0.4", - "resolved": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz", + "version": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz", "integrity": "sha1-DwiGD2oVUSey+t1PnOJLGqtuTSI=", + "optional": true, "requires": { - "inflight": "1.0.6", - "inherits": "2.0.3", - "minimatch": "3.0.4", - "once": "1.4.0", - "path-is-absolute": "1.0.1" + "inflight": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "inherits": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "minimatch": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "once": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "path-is-absolute": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz" } }, "globule": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/globule/-/globule-0.1.0.tgz", + "version": "https://registry.npmjs.org/globule/-/globule-0.1.0.tgz", "integrity": "sha1-2cjt3h2nnRJaFRt5UzuXhnY0auU=", "dev": true, "requires": { - "glob": "6.0.4", - "lodash": "4.17.4", - "minimatch": "3.0.4" + "glob": "https://registry.npmjs.org/glob/-/glob-3.1.21.tgz", + "lodash": "https://registry.npmjs.org/lodash/-/lodash-1.0.2.tgz", + "minimatch": "https://registry.npmjs.org/minimatch/-/minimatch-0.2.14.tgz" + }, + "dependencies": { + "glob": { + "version": "https://registry.npmjs.org/glob/-/glob-3.1.21.tgz", + "integrity": "sha1-0p4KBV3qUTj00H7UDomC6DwgZs0=", + "dev": true, + "requires": { + "graceful-fs": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-1.2.3.tgz", + "inherits": "https://registry.npmjs.org/inherits/-/inherits-1.0.2.tgz", + "minimatch": "https://registry.npmjs.org/minimatch/-/minimatch-0.2.14.tgz" + } + }, + "graceful-fs": { + "version": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-1.2.3.tgz", + "integrity": "sha1-FaSAaldUfLLS2/J/QuiajDRRs2Q=", + "dev": true + }, + "inherits": { + "version": "https://registry.npmjs.org/inherits/-/inherits-1.0.2.tgz", + "integrity": "sha1-ykMJ2t7mtUzAuNJH6NfHoJdb3Js=", + "dev": true + }, + "lodash": { + "version": "https://registry.npmjs.org/lodash/-/lodash-1.0.2.tgz", + "integrity": "sha1-j1dWDIO1n8JwvT1WG2kAQ0MOJVE=", + "dev": true + }, + "minimatch": { + "version": "https://registry.npmjs.org/minimatch/-/minimatch-0.2.14.tgz", + "integrity": "sha1-x054BXT2PG+aCQ6Q775u9TpqdWo=", + "dev": true, + "requires": { + "lru-cache": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.7.3.tgz", + "sigmund": "https://registry.npmjs.org/sigmund/-/sigmund-1.0.1.tgz" + }, + "dependencies": { + "lru-cache": { + "version": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.7.3.tgz", + "integrity": "sha1-bUUk6LlV+V1PW1iFHOId1y+06VI=", + "dev": true + }, + "sigmund": { + "version": "https://registry.npmjs.org/sigmund/-/sigmund-1.0.1.tgz", + "integrity": "sha1-P/IfGYytIXX587eBhT/ZTQ0ZtZA=", + "dev": true + } + } + } } }, "graceful-fs": { - "version": "4.1.11", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.11.tgz", + "version": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.11.tgz", "integrity": "sha1-Dovf5NHduIVNZOBOp8AOKgJuVlg=" }, "growl": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/growl/-/growl-1.7.0.tgz", - "integrity": "sha1-3i1mE20ALhErpw8/EMMc98NQsto=", - "dev": true + "version": "https://registry.npmjs.org/growl/-/growl-1.7.0.tgz", + "integrity": "sha1-3i1mE20ALhErpw8/EMMc98NQsto=" }, "grunt": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/grunt/-/grunt-0.4.1.tgz", + "version": "https://registry.npmjs.org/grunt/-/grunt-0.4.1.tgz", "integrity": "sha1-1YkuVoCt2e0b796apjXPRrj0lyk=", "dev": true, "requires": { - "async": "0.2.10", - "coffee-script": "1.7.1", - "colors": "0.6.2", - "dateformat": "1.0.2-1.2.3", - "eventemitter2": "0.4.14", - "findup-sync": "0.1.3", - "glob": "6.0.4", - "hooker": "0.2.3", - "iconv-lite": "0.2.11", - "js-yaml": "2.0.5", - "lodash": "4.17.4", - "minimatch": "3.0.4", - "nopt": "1.0.10", - "rimraf": "2.2.8", - "underscore.string": "2.2.1", - "which": "1.0.9" + "async": "https://registry.npmjs.org/async/-/async-0.1.22.tgz", + "coffee-script": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.3.3.tgz", + "colors": "https://registry.npmjs.org/colors/-/colors-0.6.2.tgz", + "dateformat": "https://registry.npmjs.org/dateformat/-/dateformat-1.0.2-1.2.3.tgz", + "eventemitter2": "https://registry.npmjs.org/eventemitter2/-/eventemitter2-0.4.14.tgz", + "findup-sync": "https://registry.npmjs.org/findup-sync/-/findup-sync-0.1.3.tgz", + "glob": "https://registry.npmjs.org/glob/-/glob-3.1.21.tgz", + "hooker": "https://registry.npmjs.org/hooker/-/hooker-0.2.3.tgz", + "iconv-lite": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.2.11.tgz", + "js-yaml": "https://registry.npmjs.org/js-yaml/-/js-yaml-2.0.5.tgz", + "lodash": "https://registry.npmjs.org/lodash/-/lodash-0.9.2.tgz", + "minimatch": "https://registry.npmjs.org/minimatch/-/minimatch-0.2.14.tgz", + "nopt": "https://registry.npmjs.org/nopt/-/nopt-1.0.10.tgz", + "rimraf": "https://registry.npmjs.org/rimraf/-/rimraf-2.0.3.tgz", + "underscore.string": "https://registry.npmjs.org/underscore.string/-/underscore.string-2.2.1.tgz", + "which": "https://registry.npmjs.org/which/-/which-1.0.9.tgz" + }, + "dependencies": { + "async": { + "version": "https://registry.npmjs.org/async/-/async-0.1.22.tgz", + "integrity": "sha1-D8GqoIig4+8Ovi2IMbqw3PiEUGE=", + "dev": true + }, + "coffee-script": { + "version": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.3.3.tgz", + "integrity": "sha1-FQ1rTLUiiUNp7+1qIQHCC8f0pPQ=", + "dev": true + }, + "glob": { + "version": "https://registry.npmjs.org/glob/-/glob-3.1.21.tgz", + "integrity": "sha1-0p4KBV3qUTj00H7UDomC6DwgZs0=", + "dev": true, + "requires": { + "graceful-fs": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-1.2.3.tgz", + "inherits": "https://registry.npmjs.org/inherits/-/inherits-1.0.2.tgz", + "minimatch": "https://registry.npmjs.org/minimatch/-/minimatch-0.2.14.tgz" + } + }, + "graceful-fs": { + "version": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-1.2.3.tgz", + "integrity": "sha1-FaSAaldUfLLS2/J/QuiajDRRs2Q=", + "dev": true + }, + "inherits": { + "version": "https://registry.npmjs.org/inherits/-/inherits-1.0.2.tgz", + "integrity": "sha1-ykMJ2t7mtUzAuNJH6NfHoJdb3Js=", + "dev": true + }, + "lodash": { + "version": "https://registry.npmjs.org/lodash/-/lodash-0.9.2.tgz", + "integrity": "sha1-jzSZxSRdNG1oLlsNO0B2fgnxqSw=", + "dev": true + }, + "minimatch": { + "version": "https://registry.npmjs.org/minimatch/-/minimatch-0.2.14.tgz", + "integrity": "sha1-x054BXT2PG+aCQ6Q775u9TpqdWo=", + "dev": true, + "requires": { + "lru-cache": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.7.3.tgz", + "sigmund": "https://registry.npmjs.org/sigmund/-/sigmund-1.0.1.tgz" + }, + "dependencies": { + "lru-cache": { + "version": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.7.3.tgz", + "integrity": "sha1-bUUk6LlV+V1PW1iFHOId1y+06VI=", + "dev": true + }, + "sigmund": { + "version": "https://registry.npmjs.org/sigmund/-/sigmund-1.0.1.tgz", + "integrity": "sha1-P/IfGYytIXX587eBhT/ZTQ0ZtZA=", + "dev": true + } + } + }, + "rimraf": { + "version": "https://registry.npmjs.org/rimraf/-/rimraf-2.0.3.tgz", + "integrity": "sha1-9QopZecUTpr9mYmC8V33BnMPVqk=", + "dev": true, + "requires": { + "graceful-fs": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-1.1.14.tgz" + }, + "dependencies": { + "graceful-fs": { + "version": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-1.1.14.tgz", + "integrity": "sha1-BweNtfY3f2Mh/Oqu30l94STclGU=", + "dev": true, + "optional": true + } + } + } } }, "grunt-bunyan": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/grunt-bunyan/-/grunt-bunyan-0.5.0.tgz", + "version": "https://registry.npmjs.org/grunt-bunyan/-/grunt-bunyan-0.5.0.tgz", "integrity": "sha1-aCnXbgGZQ9owQTk2MaNuKsgpsWw=", "dev": true, "requires": { - "lodash": "4.17.4" + "lodash": "https://registry.npmjs.org/lodash/-/lodash-2.4.2.tgz" + }, + "dependencies": { + "lodash": { + "version": "https://registry.npmjs.org/lodash/-/lodash-2.4.2.tgz", + "integrity": "sha1-+t2DS5aDBz2hebPq5tnA0VBT9z4=", + "dev": true + } } }, "grunt-concurrent": { - "version": "0.4.2", - "resolved": "https://registry.npmjs.org/grunt-concurrent/-/grunt-concurrent-0.4.2.tgz", + "version": "https://registry.npmjs.org/grunt-concurrent/-/grunt-concurrent-0.4.2.tgz", "integrity": "sha1-Mf2Qbm4X2oTXgLLOZNn4QGX3PgY=", "dev": true, "requires": { - "async": "0.2.10", - "lpad": "0.1.0" + "async": "https://registry.npmjs.org/async/-/async-0.2.10.tgz", + "lpad": "https://registry.npmjs.org/lpad/-/lpad-0.1.0.tgz" } }, "grunt-contrib-clean": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/grunt-contrib-clean/-/grunt-contrib-clean-0.5.0.tgz", + "version": "https://registry.npmjs.org/grunt-contrib-clean/-/grunt-contrib-clean-0.5.0.tgz", "integrity": "sha1-9T397ghJsce0Dp67umn0jExgecU=", "dev": true, "requires": { - "rimraf": "2.2.8" + "rimraf": "https://registry.npmjs.org/rimraf/-/rimraf-2.2.8.tgz" } }, "grunt-contrib-coffee": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/grunt-contrib-coffee/-/grunt-contrib-coffee-0.7.0.tgz", + "version": "https://registry.npmjs.org/grunt-contrib-coffee/-/grunt-contrib-coffee-0.7.0.tgz", "integrity": "sha1-ixIme3TnM4sfKcW4txj7n4mYLxM=", "dev": true, "requires": { - "coffee-script": "1.7.1" + "coffee-script": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.3.tgz" + }, + "dependencies": { + "coffee-script": { + "version": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.3.tgz", + "integrity": "sha1-Y1XTLPGwTN/2tITl5xF4Ky8MOb4=", + "dev": true + } } }, "grunt-contrib-requirejs": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/grunt-contrib-requirejs/-/grunt-contrib-requirejs-0.4.1.tgz", + "version": "https://registry.npmjs.org/grunt-contrib-requirejs/-/grunt-contrib-requirejs-0.4.1.tgz", "integrity": "sha1-hiuhZxQbio82r1RE/qsycruM9L0=", "dev": true, "requires": { - "requirejs": "2.1.22" + "requirejs": "https://registry.npmjs.org/requirejs/-/requirejs-2.1.22.tgz" } }, "grunt-contrib-watch": { - "version": "0.5.3", - "resolved": "https://registry.npmjs.org/grunt-contrib-watch/-/grunt-contrib-watch-0.5.3.tgz", + "version": "https://registry.npmjs.org/grunt-contrib-watch/-/grunt-contrib-watch-0.5.3.tgz", "integrity": "sha1-fZ61Rl1Qb6FPqspH5uh5CoLBye4=", "dev": true, "requires": { - "gaze": "0.4.3", - "tiny-lr": "0.0.4" + "gaze": "https://registry.npmjs.org/gaze/-/gaze-0.4.3.tgz", + "tiny-lr": "https://registry.npmjs.org/tiny-lr/-/tiny-lr-0.0.4.tgz" } }, "grunt-execute": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/grunt-execute/-/grunt-execute-0.2.2.tgz", + "version": "https://registry.npmjs.org/grunt-execute/-/grunt-execute-0.2.2.tgz", "integrity": "sha1-TpRf5XlZzA3neZCDtrQq7ZYWNQo=", "dev": true }, "grunt-forever": { - "version": "0.4.7", - "resolved": "https://registry.npmjs.org/grunt-forever/-/grunt-forever-0.4.7.tgz", + "version": "https://registry.npmjs.org/grunt-forever/-/grunt-forever-0.4.7.tgz", "integrity": "sha1-dHDb4a2hFFAhZKTCoAOHXfj+EzA=", "dev": true, "requires": { - "forever": "0.14.2" + "forever": "https://registry.npmjs.org/forever/-/forever-0.14.2.tgz" } }, "grunt-mocha-test": { - "version": "0.8.2", - "resolved": "https://registry.npmjs.org/grunt-mocha-test/-/grunt-mocha-test-0.8.2.tgz", + "version": "https://registry.npmjs.org/grunt-mocha-test/-/grunt-mocha-test-0.8.2.tgz", "integrity": "sha1-emGEuYhg0Phb3qrWvqob199bvus=", - "dev": true, "requires": { - "mocha": "1.14.0" + "mocha": "https://registry.npmjs.org/mocha/-/mocha-1.14.0.tgz" + }, + "dependencies": { + "commander": { + "version": "https://registry.npmjs.org/commander/-/commander-2.0.0.tgz", + "integrity": "sha1-0bhvkB+LZL2UG96tr5JFMDk76Sg=" + }, + "glob": { + "version": "https://registry.npmjs.org/glob/-/glob-3.2.3.tgz", + "integrity": "sha1-4xPusknHr/qlxHUoaw4RW1mDlGc=", + "requires": { + "graceful-fs": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-2.0.3.tgz", + "inherits": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "minimatch": "https://registry.npmjs.org/minimatch/-/minimatch-0.2.14.tgz" + } + }, + "graceful-fs": { + "version": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-2.0.3.tgz", + "integrity": "sha1-fNLNsiiko/Nule+mzBQt59GhNtA=" + }, + "minimatch": { + "version": "https://registry.npmjs.org/minimatch/-/minimatch-0.2.14.tgz", + "integrity": "sha1-x054BXT2PG+aCQ6Q775u9TpqdWo=", + "requires": { + "lru-cache": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.7.3.tgz", + "sigmund": "https://registry.npmjs.org/sigmund/-/sigmund-1.0.1.tgz" + } + }, + "mocha": { + "version": "https://registry.npmjs.org/mocha/-/mocha-1.14.0.tgz", + "integrity": "sha1-cT223FAAGRqdA1gZXQkIeQ7LYVc=", + "requires": { + "commander": "https://registry.npmjs.org/commander/-/commander-2.0.0.tgz", + "debug": "https://registry.npmjs.org/debug/-/debug-0.8.1.tgz", + "diff": "https://registry.npmjs.org/diff/-/diff-1.0.7.tgz", + "glob": "https://registry.npmjs.org/glob/-/glob-3.2.3.tgz", + "growl": "https://registry.npmjs.org/growl/-/growl-1.7.0.tgz", + "jade": "https://registry.npmjs.org/jade/-/jade-0.26.3.tgz", + "mkdirp": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.5.tgz" + } + } } }, "grunt-nodemon": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/grunt-nodemon/-/grunt-nodemon-0.2.1.tgz", + "version": "https://registry.npmjs.org/grunt-nodemon/-/grunt-nodemon-0.2.1.tgz", "integrity": "sha1-G48kiVKSCX3IFNFgOpfo/sHJJPM=", "dev": true, "requires": { - "nodemon": "1.0.20" + "nodemon": "https://registry.npmjs.org/nodemon/-/nodemon-1.0.20.tgz" + } + }, + "har-schema": { + "version": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", + "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=", + "dev": true + }, + "har-validator": { + "version": "https://registry.npmjs.org/har-validator/-/har-validator-5.0.3.tgz", + "integrity": "sha1-ukAsJmGU8VlW7xXg/PJCmT9qff0=", + "dev": true, + "requires": { + "ajv": "https://registry.npmjs.org/ajv/-/ajv-5.5.2.tgz", + "har-schema": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz" } }, "has-color": { - "version": "0.1.7", - "resolved": "https://registry.npmjs.org/has-color/-/has-color-0.1.7.tgz", + "version": "https://registry.npmjs.org/has-color/-/has-color-0.1.7.tgz", "integrity": "sha1-ZxRKUmDDT8PMpnfQQdr1L+e3iy8=", "dev": true }, "has-flag": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-2.0.0.tgz", + "version": "https://registry.npmjs.org/has-flag/-/has-flag-2.0.0.tgz", "integrity": "sha1-6CB68cx7MNRGzHC3NLXovhj4jVE=" }, "hawk": { - "version": "0.10.2", - "resolved": "https://registry.npmjs.org/hawk/-/hawk-0.10.2.tgz", + "version": "https://registry.npmjs.org/hawk/-/hawk-0.10.2.tgz", "integrity": "sha1-mzYd7pWpMWQObVBOBWCaj8OsRdI=", "requires": { - "boom": "0.3.8", - "cryptiles": "0.1.3", - "hoek": "0.7.6", - "sntp": "0.1.4" + "boom": "https://registry.npmjs.org/boom/-/boom-0.3.8.tgz", + "cryptiles": "https://registry.npmjs.org/cryptiles/-/cryptiles-0.1.3.tgz", + "hoek": "https://registry.npmjs.org/hoek/-/hoek-0.7.6.tgz", + "sntp": "https://registry.npmjs.org/sntp/-/sntp-0.1.4.tgz" } }, + "he": { + "version": "https://registry.npmjs.org/he/-/he-1.1.1.tgz", + "integrity": "sha1-k0EP0hsAlzUVH4howvJx80J+I/0=" + }, "heapdump": { - "version": "0.3.9", - "resolved": "https://registry.npmjs.org/heapdump/-/heapdump-0.3.9.tgz", + "version": "https://registry.npmjs.org/heapdump/-/heapdump-0.3.9.tgz", "integrity": "sha1-A8dOsN9dZ74Jgug0KbqcnSs7f3g=" }, "hoek": { - "version": "0.7.6", - "resolved": "https://registry.npmjs.org/hoek/-/hoek-0.7.6.tgz", + "version": "https://registry.npmjs.org/hoek/-/hoek-0.7.6.tgz", "integrity": "sha1-YPvZBFV1Qc0rh5Wr8wihs3cOFVo=" }, "hooker": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/hooker/-/hooker-0.2.3.tgz", + "version": "https://registry.npmjs.org/hooker/-/hooker-0.2.3.tgz", "integrity": "sha1-uDT3I8xKJCqmWWNFnfbZhMXT2Vk=", "dev": true }, + "http-errors": { + "version": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz", + "integrity": "sha1-i1VoC7S+KDoLW/TqLjhYC+HZMg0=", + "requires": { + "depd": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", + "inherits": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "setprototypeof": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.0.tgz", + "statuses": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz" + } + }, + "http-signature": { + "version": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", + "integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=", + "dev": true, + "requires": { + "assert-plus": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "jsprim": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", + "sshpk": "https://registry.npmjs.org/sshpk/-/sshpk-1.14.1.tgz" + } + }, "i": { - "version": "0.3.6", - "resolved": "https://registry.npmjs.org/i/-/i-0.3.6.tgz", + "version": "https://registry.npmjs.org/i/-/i-0.3.6.tgz", "integrity": "sha1-2WyScyB28HJxG2sQ/X1PZa2O4j0=", "dev": true }, "iconv-lite": { - "version": "0.2.11", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.2.11.tgz", + "version": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.2.11.tgz", "integrity": "sha1-HOYKOleGSiktEyH/RgnKS7llrcg=", "dev": true }, "ieee754": { - "version": "1.1.8", - "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.8.tgz", + "version": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.8.tgz", "integrity": "sha1-vjPUCsEO8ZJnAfbwii2G+/0a0+Q=" }, "inflight": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "version": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", "requires": { - "once": "1.4.0", - "wrappy": "1.0.2" + "once": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "wrappy": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" } }, "inherits": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "version": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=" }, "ini": { - "version": "1.3.5", - "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.5.tgz", - "integrity": "sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw==", + "version": "https://registry.npmjs.org/ini/-/ini-1.3.5.tgz", + "integrity": "sha1-7uJfVtscnsYIXgwid4CD9Zar+Sc=", + "dev": true + }, + "is-typedarray": { + "version": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", + "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=", "dev": true }, "isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "version": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" }, "isstream": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", + "version": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=", "dev": true }, "jade": { - "version": "0.26.3", - "resolved": "https://registry.npmjs.org/jade/-/jade-0.26.3.tgz", + "version": "https://registry.npmjs.org/jade/-/jade-0.26.3.tgz", "integrity": "sha1-jxDXl32NefL2/4YqgbBRPMslaGw=", - "dev": true, "requires": { - "commander": "1.3.2", - "mkdirp": "0.3.5" + "commander": "https://registry.npmjs.org/commander/-/commander-0.6.1.tgz", + "mkdirp": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.0.tgz" + }, + "dependencies": { + "commander": { + "version": "https://registry.npmjs.org/commander/-/commander-0.6.1.tgz", + "integrity": "sha1-+mihT2qUXVTbvlDYzbMyDp47GgY=" + }, + "mkdirp": { + "version": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.0.tgz", + "integrity": "sha1-G79asbqCevI1dRQ0kEJkVfSB/h4=" + } } }, "jmespath": { - "version": "0.15.0", - "resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.15.0.tgz", + "version": "https://registry.npmjs.org/jmespath/-/jmespath-0.15.0.tgz", "integrity": "sha1-o/Iiqarp+Wb10nx5ZRDigJF2Qhc=" }, "js-yaml": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-2.0.5.tgz", + "version": "https://registry.npmjs.org/js-yaml/-/js-yaml-2.0.5.tgz", "integrity": "sha1-olrmUJmZ6X3yeMZxnaEb0Gh3Q6g=", "dev": true, "requires": { - "argparse": "0.1.16", - "esprima": "1.0.4" + "argparse": "https://registry.npmjs.org/argparse/-/argparse-0.1.16.tgz", + "esprima": "https://registry.npmjs.org/esprima/-/esprima-1.0.4.tgz" } }, + "jsbn": { + "version": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", + "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=", + "dev": true, + "optional": true + }, + "json-schema": { + "version": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", + "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=", + "dev": true + }, + "json-schema-traverse": { + "version": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz", + "integrity": "sha1-NJptRMU6Ud6JtAgFxdXlm0F9M0A=", + "dev": true + }, "json-stringify-safe": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-3.0.0.tgz", + "version": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-3.0.0.tgz", "integrity": "sha1-nbew5TDH8onF6MhDKvGRwv91pbM=" }, "jsonfile": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-2.4.0.tgz", + "version": "https://registry.npmjs.org/jsonfile/-/jsonfile-2.4.0.tgz", "integrity": "sha1-NzaitCi4e72gzIO1P6PWM6NcKug=", "requires": { - "graceful-fs": "4.1.11" + "graceful-fs": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.11.tgz" + } + }, + "jsprim": { + "version": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", + "integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=", + "dev": true, + "requires": { + "assert-plus": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "extsprintf": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", + "json-schema": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", + "verror": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz" } }, "just-extend": { - "version": "1.1.27", - "resolved": "https://registry.npmjs.org/just-extend/-/just-extend-1.1.27.tgz", - "integrity": "sha512-mJVp13Ix6gFo3SBAy9U/kL+oeZqzlYYYLQBwXVBlVzIsZwBqGREnOro24oC/8s8aox+rJhtZ2DiQof++IrkA+g==" + "version": "https://registry.npmjs.org/just-extend/-/just-extend-1.1.27.tgz", + "integrity": "sha1-7G55QQ/5FORyZSq/oOYDwD1g6QU=" }, "keypress": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/keypress/-/keypress-0.1.0.tgz", + "version": "https://registry.npmjs.org/keypress/-/keypress-0.1.0.tgz", "integrity": "sha1-SjGI1CkbZrT2XtuZ+AaqmuKTWSo=" }, "klaw": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/klaw/-/klaw-1.3.1.tgz", + "version": "https://registry.npmjs.org/klaw/-/klaw-1.3.1.tgz", "integrity": "sha1-QIhDO0azsbolnXh4XY6W9zugJDk=", "requires": { - "graceful-fs": "4.1.11" + "graceful-fs": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.11.tgz" } }, "knox": { - "version": "0.9.2", - "resolved": "https://registry.npmjs.org/knox/-/knox-0.9.2.tgz", + "version": "https://registry.npmjs.org/knox/-/knox-0.9.2.tgz", "integrity": "sha1-NzZZNmniTwJP2vcjtqHcSv2DmnE=", "requires": { - "debug": "1.0.4", - "mime": "1.3.4", - "once": "1.4.0", - "stream-counter": "1.0.0", - "xml2js": "0.4.17" + "debug": "https://registry.npmjs.org/debug/-/debug-1.0.4.tgz", + "mime": "https://registry.npmjs.org/mime/-/mime-1.3.4.tgz", + "once": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "stream-counter": "https://registry.npmjs.org/stream-counter/-/stream-counter-1.0.0.tgz", + "xml2js": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.17.tgz" }, "dependencies": { "debug": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-1.0.4.tgz", + "version": "https://registry.npmjs.org/debug/-/debug-1.0.4.tgz", "integrity": "sha1-W5wla9VLbsAigxdvqKDt5tFUy/g=", "requires": { - "ms": "0.6.2" + "ms": "https://registry.npmjs.org/ms/-/ms-0.6.2.tgz" }, "dependencies": { "ms": { - "version": "0.6.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-0.6.2.tgz", + "version": "https://registry.npmjs.org/ms/-/ms-0.6.2.tgz", "integrity": "sha1-2JwhJMb9wTU9Zai3e/GqxLGTcIw=" } } }, "mime": { - "version": "1.3.4", - "resolved": "https://registry.npmjs.org/mime/-/mime-1.3.4.tgz", + "version": "https://registry.npmjs.org/mime/-/mime-1.3.4.tgz", "integrity": "sha1-EV+eO2s9rylZmDyzjxSaLUDrXVM=" }, "once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "version": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", "requires": { - "wrappy": "1.0.2" + "wrappy": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" }, "dependencies": { "wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "version": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" } } }, "stream-counter": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/stream-counter/-/stream-counter-1.0.0.tgz", + "version": "https://registry.npmjs.org/stream-counter/-/stream-counter-1.0.0.tgz", "integrity": "sha1-kc8lac5NxQYf6816yyY5SloRR1E=" }, "xml2js": { - "version": "0.4.17", - "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.17.tgz", + "version": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.17.tgz", "integrity": "sha1-F76T6q4/O3eTWceVtBlwWogX6Gg=", "requires": { - "sax": "1.2.2", - "xmlbuilder": "4.2.1" + "sax": "https://registry.npmjs.org/sax/-/sax-1.2.2.tgz", + "xmlbuilder": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-4.2.1.tgz" }, "dependencies": { "sax": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.2.tgz", + "version": "https://registry.npmjs.org/sax/-/sax-1.2.2.tgz", "integrity": "sha1-/YYxojvHgmvvXYcb24c3jJVkeCg=" }, "xmlbuilder": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-4.2.1.tgz", + "version": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-4.2.1.tgz", "integrity": "sha1-qlijBBoGb5DqoWwvU4n/GfP0YaU=", "requires": { - "lodash": "4.17.4" + "lodash": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz" }, "dependencies": { "lodash": { - "version": "4.17.4", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz", + "version": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz", "integrity": "sha1-eCA6TRwyiuHYbcpkYONptX9AVa4=" } } @@ -1029,555 +1404,607 @@ } }, "lazy": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/lazy/-/lazy-1.0.11.tgz", + "version": "https://registry.npmjs.org/lazy/-/lazy-1.0.11.tgz", "integrity": "sha1-2qBoIGKCVCwIgojpdcKXwa53tpA=", "dev": true }, "lodash": { - "version": "4.17.4", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz", + "version": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz", "integrity": "sha1-eCA6TRwyiuHYbcpkYONptX9AVa4=" }, "lodash.get": { - "version": "4.4.2", - "resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz", + "version": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz", "integrity": "sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk=" }, "logger-sharelatex": { "version": "git+https://github.com/sharelatex/logger-sharelatex.git#5a3ea8e655f23e76a77bbc207c012d3fc944c8d8", + "integrity": "sha1-n2Pr/oN5cipmntO9qeX7OR5Ezy4=", "requires": { - "bunyan": "1.3.6", - "coffee-script": "1.4.0", - "raven": "0.8.1" + "bunyan": "https://registry.npmjs.org/bunyan/-/bunyan-1.3.6.tgz", + "coffee-script": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.4.0.tgz", + "raven": "https://registry.npmjs.org/raven/-/raven-0.8.1.tgz" }, "dependencies": { "coffee-script": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.4.0.tgz", + "version": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.4.0.tgz", "integrity": "sha1-XjvIqsJsAajie/EHcixWVfWtfTY=" } } }, "lolex": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/lolex/-/lolex-2.3.1.tgz", - "integrity": "sha512-mQuW55GhduF3ppo+ZRUTz1PRjEh1hS5BbqU7d8D0ez2OKxHDod7StPPeAVKisZR5aLkHZjdGWSL42LSONUJsZw==" + "version": "https://registry.npmjs.org/lolex/-/lolex-2.3.1.tgz", + "integrity": "sha1-PSMZiURx6glQ72RpLq0qUxjP82I=" }, "lpad": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/lpad/-/lpad-0.1.0.tgz", + "version": "https://registry.npmjs.org/lpad/-/lpad-0.1.0.tgz", "integrity": "sha1-5MYMKROTIcWXDeSTtJauDXdM0qc=", "dev": true }, + "lru-cache": { + "version": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.7.3.tgz", + "integrity": "sha1-bUUk6LlV+V1PW1iFHOId1y+06VI=" + }, "lsmod": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/lsmod/-/lsmod-0.0.3.tgz", + "version": "https://registry.npmjs.org/lsmod/-/lsmod-0.0.3.tgz", "integrity": "sha1-F+E9ThrpF1DqVlNUjNiecUetAkQ=" }, "lynx": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/lynx/-/lynx-0.1.1.tgz", + "version": "https://registry.npmjs.org/lynx/-/lynx-0.1.1.tgz", "integrity": "sha1-Mxjc7xaQi4KG6Bisz9sxzXQkj50=", "requires": { - "mersenne": "0.0.4", - "statsd-parser": "0.0.4" + "mersenne": "https://registry.npmjs.org/mersenne/-/mersenne-0.0.4.tgz", + "statsd-parser": "https://registry.npmjs.org/statsd-parser/-/statsd-parser-0.0.4.tgz" } }, + "media-typer": { + "version": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "integrity": "sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=" + }, "merge-descriptors": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-0.0.1.tgz", + "version": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-0.0.1.tgz", "integrity": "sha1-L/CYDJJM+B0LXR+2ARd8uLtWwNA=" }, "mersenne": { - "version": "0.0.4", - "resolved": "https://registry.npmjs.org/mersenne/-/mersenne-0.0.4.tgz", + "version": "https://registry.npmjs.org/mersenne/-/mersenne-0.0.4.tgz", "integrity": "sha1-QB/ex+whzbngPNPTAhOY2iGycIU=" }, "methods": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/methods/-/methods-0.1.0.tgz", + "version": "https://registry.npmjs.org/methods/-/methods-0.1.0.tgz", "integrity": "sha1-M11Cnu/SG3us8unJIqjSvRSjDk8=" }, "metrics-sharelatex": { "version": "git+https://github.com/sharelatex/metrics-sharelatex.git#080c4aeb696edcd5d6d86f202f2c528f0661d7a6", + "integrity": "sha1-t7F6ccpirqby9zwCId81YpksE1M=", "requires": { - "coffee-script": "1.6.0", - "lynx": "0.1.1", - "underscore": "1.6.0" + "coffee-script": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz", + "lynx": "https://registry.npmjs.org/lynx/-/lynx-0.1.1.tgz", + "underscore": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz" }, "dependencies": { "coffee-script": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz", + "version": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz", "integrity": "sha1-gIs5bhEPU9AhoZpO8fZb4OjjX6M=" }, "underscore": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz", + "version": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz", "integrity": "sha1-izixDKze9jM3uLJOT/htRa6lKag=" } } }, "mime": { - "version": "1.2.11", - "resolved": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz", + "version": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz", "integrity": "sha1-WCA+7Ybjpe8XrtK32evUfwpg3RA=" }, - "minimatch": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "mime-db": { + "version": "https://registry.npmjs.org/mime-db/-/mime-db-1.33.0.tgz", + "integrity": "sha1-o0kgUKXLm2NFBUHjnZeI0icng9s=" + }, + "mime-types": { + "version": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.18.tgz", + "integrity": "sha1-bzI/YKg9ERRvgx/xH9ZuL+VQO7g=", "requires": { - "brace-expansion": "1.1.8" + "mime-db": "https://registry.npmjs.org/mime-db/-/mime-db-1.33.0.tgz" + } + }, + "minimatch": { + "version": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha1-UWbihkV/AzBgZL5Ul+jbsMPTIIM=", + "requires": { + "brace-expansion": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.8.tgz" } }, "minimist": { - "version": "0.0.8", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", + "version": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=" }, "mkdirp": { - "version": "0.3.5", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.5.tgz", + "version": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.5.tgz", "integrity": "sha1-3j5fiWHIjHh+4TaN+EmsRBPsqNc=" }, "mocha": { - "version": "1.14.0", - "resolved": "https://registry.npmjs.org/mocha/-/mocha-1.14.0.tgz", - "integrity": "sha1-cT223FAAGRqdA1gZXQkIeQ7LYVc=", - "dev": true, + "version": "https://registry.npmjs.org/mocha/-/mocha-5.2.0.tgz", + "integrity": "sha1-bYrlCPWRZ/lA8rWzxKYSrlDJCuY=", "requires": { - "commander": "1.3.2", - "debug": "0.8.1", - "diff": "1.0.7", - "glob": "6.0.4", - "growl": "1.7.0", - "jade": "0.26.3", - "mkdirp": "0.3.5" + "browser-stdout": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", + "commander": "https://registry.npmjs.org/commander/-/commander-2.15.1.tgz", + "debug": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz", + "diff": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz", + "escape-string-regexp": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "glob": "https://registry.npmjs.org/glob/-/glob-7.1.2.tgz", + "growl": "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz", + "he": "https://registry.npmjs.org/he/-/he-1.1.1.tgz", + "minimatch": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "mkdirp": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", + "supports-color": "https://registry.npmjs.org/supports-color/-/supports-color-5.4.0.tgz" + }, + "dependencies": { + "commander": { + "version": "https://registry.npmjs.org/commander/-/commander-2.15.1.tgz", + "integrity": "sha1-30boZ9D8Kuxmo0ZitAapzK//Ww8=" + }, + "debug": { + "version": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz", + "integrity": "sha1-W7WgZyYotkFJVmuhaBnmFRjGcmE=", + "requires": { + "ms": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz" + } + }, + "diff": { + "version": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz", + "integrity": "sha1-gAwN0eCov7yVg1wgKtIg/jF+WhI=" + }, + "glob": { + "version": "https://registry.npmjs.org/glob/-/glob-7.1.2.tgz", + "integrity": "sha1-wZyd+aAocC1nhhI4SmVSQExjbRU=", + "requires": { + "fs.realpath": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "inflight": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "inherits": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "minimatch": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "once": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "path-is-absolute": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz" + } + }, + "growl": { + "version": "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz", + "integrity": "sha1-8nNdwig2dPpnR4sQGBBZNVw2nl4=" + }, + "has-flag": { + "version": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=" + }, + "mkdirp": { + "version": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", + "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", + "requires": { + "minimist": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz" + } + }, + "supports-color": { + "version": "https://registry.npmjs.org/supports-color/-/supports-color-5.4.0.tgz", + "integrity": "sha1-HGszdALCE3YF7+GfEP7DkPb6q1Q=", + "requires": { + "has-flag": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz" + } + } } }, + "ms": { + "version": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + }, "multiparty": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/multiparty/-/multiparty-2.2.0.tgz", + "version": "https://registry.npmjs.org/multiparty/-/multiparty-2.2.0.tgz", "integrity": "sha1-pWfCrwAK0i3I8qZT2Rl4rh9TFvQ=", "requires": { - "readable-stream": "1.1.14", - "stream-counter": "0.2.0" + "readable-stream": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.14.tgz", + "stream-counter": "https://registry.npmjs.org/stream-counter/-/stream-counter-0.2.0.tgz" } }, "mute-stream": { - "version": "0.0.7", - "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz", + "version": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz", "integrity": "sha1-MHXOk7whuPq0PhvE2n6BFe0ee6s=", "dev": true }, "mv": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/mv/-/mv-2.1.1.tgz", + "version": "https://registry.npmjs.org/mv/-/mv-2.1.1.tgz", "integrity": "sha1-rmzg1vbV4KT32JN5jQPB6pVZtqI=", "optional": true, "requires": { - "mkdirp": "0.5.1", - "ncp": "2.0.0", - "rimraf": "2.4.5" + "mkdirp": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", + "ncp": "https://registry.npmjs.org/ncp/-/ncp-2.0.0.tgz", + "rimraf": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.5.tgz" }, "dependencies": { "mkdirp": { - "version": "0.5.1", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", + "version": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", "optional": true, "requires": { - "minimist": "0.0.8" + "minimist": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz" } }, "rimraf": { - "version": "2.4.5", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.5.tgz", + "version": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.5.tgz", "integrity": "sha1-7nEM5dk6j9uFb7Xqj/Di11k0sto=", "optional": true, "requires": { - "glob": "6.0.4" + "glob": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz" } } } }, "nan": { - "version": "1.5.3", - "resolved": "https://registry.npmjs.org/nan/-/nan-1.5.3.tgz", + "version": "https://registry.npmjs.org/nan/-/nan-1.5.3.tgz", "integrity": "sha1-TNDswTO3sHAKSSpkat1CeuijGOs=", "optional": true }, + "natives": { + "version": "https://registry.npmjs.org/natives/-/natives-1.1.3.tgz", + "integrity": "sha1-RKV5vmRQfqLW7RygSpQVkVz3VVg=", + "dev": true + }, "nconf": { - "version": "0.6.9", - "resolved": "https://registry.npmjs.org/nconf/-/nconf-0.6.9.tgz", + "version": "https://registry.npmjs.org/nconf/-/nconf-0.6.9.tgz", "integrity": "sha1-lXDvFe1vmuays8jV5xtm0xk81mE=", "dev": true, "requires": { - "async": "0.2.10", - "ini": "1.3.5", - "optimist": "0.6.0" + "async": "https://registry.npmjs.org/async/-/async-0.2.9.tgz", + "ini": "https://registry.npmjs.org/ini/-/ini-1.3.5.tgz", + "optimist": "https://registry.npmjs.org/optimist/-/optimist-0.6.0.tgz" }, "dependencies": { + "async": { + "version": "https://registry.npmjs.org/async/-/async-0.2.9.tgz", + "integrity": "sha1-32MGD789Myhqdqr21Vophtn/hhk=", + "dev": true + }, "optimist": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.0.tgz", + "version": "https://registry.npmjs.org/optimist/-/optimist-0.6.0.tgz", "integrity": "sha1-aUJIJvNAX3nxQub8PZrljU27kgA=", "dev": true, "requires": { - "minimist": "0.0.8", - "wordwrap": "0.0.3" + "minimist": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", + "wordwrap": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz" } } } }, "ncp": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ncp/-/ncp-2.0.0.tgz", - "integrity": "sha1-GVoh1sRuNh0vsSgbo4uR6d9727M=" + "version": "https://registry.npmjs.org/ncp/-/ncp-2.0.0.tgz", + "integrity": "sha1-GVoh1sRuNh0vsSgbo4uR6d9727M=", + "optional": true }, "negotiator": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.3.0.tgz", + "version": "https://registry.npmjs.org/negotiator/-/negotiator-0.3.0.tgz", "integrity": "sha1-cG1pLv7d9XTVfqn7GriaT6fuj2A=" }, "nise": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/nise/-/nise-1.2.0.tgz", - "integrity": "sha512-q9jXh3UNsMV28KeqI43ILz5+c3l+RiNW8mhurEwCKckuHQbL+hTJIKKTiUlCPKlgQ/OukFvSnKB/Jk3+sFbkGA==", + "version": "https://registry.npmjs.org/nise/-/nise-1.2.0.tgz", + "integrity": "sha1-B51srbvLErow448cmZ82rU1rqlM=", "requires": { - "formatio": "1.2.0", - "just-extend": "1.1.27", - "lolex": "1.6.0", - "path-to-regexp": "1.7.0", - "text-encoding": "0.6.4" + "formatio": "https://registry.npmjs.org/formatio/-/formatio-1.2.0.tgz", + "just-extend": "https://registry.npmjs.org/just-extend/-/just-extend-1.1.27.tgz", + "lolex": "https://registry.npmjs.org/lolex/-/lolex-1.6.0.tgz", + "path-to-regexp": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.7.0.tgz", + "text-encoding": "https://registry.npmjs.org/text-encoding/-/text-encoding-0.6.4.tgz" }, "dependencies": { "lolex": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/lolex/-/lolex-1.6.0.tgz", + "version": "https://registry.npmjs.org/lolex/-/lolex-1.6.0.tgz", "integrity": "sha1-OpoCg0UqR9dDnnJzG54H1zhuSfY=" } } }, "node-transloadit": { - "version": "0.0.4", - "resolved": "https://registry.npmjs.org/node-transloadit/-/node-transloadit-0.0.4.tgz", + "version": "https://registry.npmjs.org/node-transloadit/-/node-transloadit-0.0.4.tgz", "integrity": "sha1-4ZoHheON94NblO2AANHjXmg7zsE=", "requires": { - "request": "2.16.6", - "underscore": "1.2.1" + "request": "https://registry.npmjs.org/request/-/request-2.16.6.tgz", + "underscore": "https://registry.npmjs.org/underscore/-/underscore-1.2.1.tgz" }, "dependencies": { "qs": { - "version": "0.5.6", - "resolved": "https://registry.npmjs.org/qs/-/qs-0.5.6.tgz", + "version": "https://registry.npmjs.org/qs/-/qs-0.5.6.tgz", "integrity": "sha1-MbGtBYVnZRxSaSFQa5qHk5EaA4Q=" }, "request": { - "version": "2.16.6", - "resolved": "https://registry.npmjs.org/request/-/request-2.16.6.tgz", + "version": "https://registry.npmjs.org/request/-/request-2.16.6.tgz", "integrity": "sha1-hy/kRa5y3iZrN4edatfclI+gHK0=", "requires": { - "aws-sign": "0.2.0", - "cookie-jar": "0.2.0", - "forever-agent": "0.2.0", - "form-data": "0.0.10", - "hawk": "0.10.2", - "json-stringify-safe": "3.0.0", - "mime": "1.2.11", - "node-uuid": "1.4.8", - "oauth-sign": "0.2.0", - "qs": "0.5.6", - "tunnel-agent": "0.2.0" + "aws-sign": "https://registry.npmjs.org/aws-sign/-/aws-sign-0.2.0.tgz", + "cookie-jar": "https://registry.npmjs.org/cookie-jar/-/cookie-jar-0.2.0.tgz", + "forever-agent": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.2.0.tgz", + "form-data": "https://registry.npmjs.org/form-data/-/form-data-0.0.10.tgz", + "hawk": "https://registry.npmjs.org/hawk/-/hawk-0.10.2.tgz", + "json-stringify-safe": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-3.0.0.tgz", + "mime": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz", + "node-uuid": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.8.tgz", + "oauth-sign": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.2.0.tgz", + "qs": "https://registry.npmjs.org/qs/-/qs-0.5.6.tgz", + "tunnel-agent": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.2.0.tgz" } }, "underscore": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.2.1.tgz", + "version": "https://registry.npmjs.org/underscore/-/underscore-1.2.1.tgz", "integrity": "sha1-/FxrB2VnPZKi1KyLTcCqiHAuK9Q=" } } }, "node-uuid": { - "version": "1.4.8", - "resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.8.tgz", + "version": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.8.tgz", "integrity": "sha1-sEDrCSOWivq/jTL7HxfxFn/auQc=" }, "nodemon": { - "version": "1.0.20", - "resolved": "https://registry.npmjs.org/nodemon/-/nodemon-1.0.20.tgz", + "version": "https://registry.npmjs.org/nodemon/-/nodemon-1.0.20.tgz", "integrity": "sha1-vBOKNwaMt426UIhbYkl6/f7u3aQ=", "dev": true, "requires": { - "minimatch": "3.0.4", - "ps-tree": "0.0.3", - "update-notifier": "0.1.10" + "minimatch": "https://registry.npmjs.org/minimatch/-/minimatch-0.2.14.tgz", + "ps-tree": "https://registry.npmjs.org/ps-tree/-/ps-tree-0.0.3.tgz", + "update-notifier": "https://registry.npmjs.org/update-notifier/-/update-notifier-0.1.10.tgz" + }, + "dependencies": { + "minimatch": { + "version": "https://registry.npmjs.org/minimatch/-/minimatch-0.2.14.tgz", + "integrity": "sha1-x054BXT2PG+aCQ6Q775u9TpqdWo=", + "dev": true, + "requires": { + "lru-cache": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.7.3.tgz", + "sigmund": "https://registry.npmjs.org/sigmund/-/sigmund-1.0.1.tgz" + }, + "dependencies": { + "lru-cache": { + "version": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.7.3.tgz", + "integrity": "sha1-bUUk6LlV+V1PW1iFHOId1y+06VI=", + "dev": true + }, + "sigmund": { + "version": "https://registry.npmjs.org/sigmund/-/sigmund-1.0.1.tgz", + "integrity": "sha1-P/IfGYytIXX587eBhT/ZTQ0ZtZA=", + "dev": true + } + } + } } }, "nopt": { - "version": "1.0.10", - "resolved": "https://registry.npmjs.org/nopt/-/nopt-1.0.10.tgz", + "version": "https://registry.npmjs.org/nopt/-/nopt-1.0.10.tgz", "integrity": "sha1-bd0hvSoxQXuScn3Vhfim83YI6+4=", "dev": true, "requires": { - "abbrev": "1.1.1" + "abbrev": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz" } }, "noptify": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/noptify/-/noptify-0.0.3.tgz", + "version": "https://registry.npmjs.org/noptify/-/noptify-0.0.3.tgz", "integrity": "sha1-WPZUpz2XU98MUdlobckhBKZ/S7s=", - "dev": true, "requires": { - "nopt": "2.0.0" + "nopt": "https://registry.npmjs.org/nopt/-/nopt-2.0.0.tgz" }, "dependencies": { "nopt": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/nopt/-/nopt-2.0.0.tgz", + "version": "https://registry.npmjs.org/nopt/-/nopt-2.0.0.tgz", "integrity": "sha1-ynQW8gpeP5w7hhgPlilfo9C1Lg0=", - "dev": true, "requires": { - "abbrev": "1.1.1" + "abbrev": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz" } } } }, "nssocket": { - "version": "0.5.3", - "resolved": "https://registry.npmjs.org/nssocket/-/nssocket-0.5.3.tgz", + "version": "https://registry.npmjs.org/nssocket/-/nssocket-0.5.3.tgz", "integrity": "sha1-iDyi7GBfXtZKTVGQsmJUAZKPj40=", "dev": true, "requires": { - "eventemitter2": "0.4.14", - "lazy": "1.0.11" + "eventemitter2": "https://registry.npmjs.org/eventemitter2/-/eventemitter2-0.4.14.tgz", + "lazy": "https://registry.npmjs.org/lazy/-/lazy-1.0.11.tgz" } }, "oauth-sign": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.2.0.tgz", + "version": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.2.0.tgz", "integrity": "sha1-oOahcV2u0GLzIrYit/5a/RA1tuI=" }, "object-assign": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-2.1.1.tgz", + "version": "https://registry.npmjs.org/object-assign/-/object-assign-2.1.1.tgz", "integrity": "sha1-Q8NuXVaf+OSBbE76i+AtJpZ8GKo=", "dev": true }, + "on-finished": { + "version": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", + "integrity": "sha1-IPEzZIGwg811M3mSoWlxqi2QaUc=", + "requires": { + "ee-first": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz" + } + }, "once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "version": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", "requires": { - "wrappy": "1.0.2" + "wrappy": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" } }, "optimist": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz", + "version": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz", "integrity": "sha1-2j6nRob6IaGaERwybpDrFaAZZoY=", "dev": true, "requires": { - "minimist": "0.0.8", - "wordwrap": "0.0.3" + "minimist": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", + "wordwrap": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz" } }, "os-homedir": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz", + "version": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz", "integrity": "sha1-/7xJiDNuDoM94MFox+8VISGqf7M=", "dev": true }, "os-tmpdir": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", + "version": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", "integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=", "dev": true }, "osenv": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/osenv/-/osenv-0.1.4.tgz", + "version": "https://registry.npmjs.org/osenv/-/osenv-0.1.4.tgz", "integrity": "sha1-Qv5tWVPfBsgGS+bxdsPQWqqjRkQ=", "dev": true, "requires": { - "os-homedir": "1.0.2", - "os-tmpdir": "1.0.2" + "os-homedir": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz", + "os-tmpdir": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz" } }, "path-is-absolute": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "version": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=" }, "path-to-regexp": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.7.0.tgz", + "version": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.7.0.tgz", "integrity": "sha1-Wf3g9DW62suhA6hOnTvGTpa5k30=", "requires": { - "isarray": "0.0.1" + "isarray": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" }, "dependencies": { "isarray": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "version": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" } } }, "pathval": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.0.tgz", + "version": "https://registry.npmjs.org/pathval/-/pathval-1.1.0.tgz", "integrity": "sha1-uULm1L3mUwBe9rcTYd74cn0GReA=" }, "pause": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/pause/-/pause-0.0.1.tgz", + "version": "https://registry.npmjs.org/pause/-/pause-0.0.1.tgz", "integrity": "sha1-HUCLP9t2kjuVQ9lvtMnf1TXZy10=" }, + "performance-now": { + "version": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", + "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=", + "dev": true + }, "pkginfo": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/pkginfo/-/pkginfo-0.3.1.tgz", + "version": "https://registry.npmjs.org/pkginfo/-/pkginfo-0.3.1.tgz", "integrity": "sha1-Wyn2qB9wcXFC4J52W76rl7T4HiE=", "dev": true }, "pngcrush": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/pngcrush/-/pngcrush-0.0.3.tgz", + "version": "https://registry.npmjs.org/pngcrush/-/pngcrush-0.0.3.tgz", "integrity": "sha1-v2dW6s2h+rNJwHdo6AXMEA0o+Tc=", "requires": { - "gettemporaryfilepath": "0.0.1" + "gettemporaryfilepath": "https://registry.npmjs.org/gettemporaryfilepath/-/gettemporaryfilepath-0.0.1.tgz" } }, "prompt": { - "version": "0.2.14", - "resolved": "https://registry.npmjs.org/prompt/-/prompt-0.2.14.tgz", + "version": "https://registry.npmjs.org/prompt/-/prompt-0.2.14.tgz", "integrity": "sha1-V3VPZPVD/XsIRXB8gY7OYY8F/9w=", "dev": true, "requires": { - "pkginfo": "0.3.1", - "read": "1.0.7", - "revalidator": "0.1.8", - "utile": "0.2.1", - "winston": "0.8.3" + "pkginfo": "https://registry.npmjs.org/pkginfo/-/pkginfo-0.3.1.tgz", + "read": "https://registry.npmjs.org/read/-/read-1.0.7.tgz", + "revalidator": "https://registry.npmjs.org/revalidator/-/revalidator-0.1.8.tgz", + "utile": "https://registry.npmjs.org/utile/-/utile-0.2.1.tgz", + "winston": "https://registry.npmjs.org/winston/-/winston-0.8.3.tgz" } }, "ps-tree": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/ps-tree/-/ps-tree-0.0.3.tgz", + "version": "https://registry.npmjs.org/ps-tree/-/ps-tree-0.0.3.tgz", "integrity": "sha1-2/jXUqf+Ivp9WGNWiUmWEOknbdw=", "dev": true, "requires": { - "event-stream": "0.5.3" + "event-stream": "https://registry.npmjs.org/event-stream/-/event-stream-0.5.3.tgz" } }, "punycode": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", + "version": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", "integrity": "sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0=" }, "qs": { - "version": "0.6.6", - "resolved": "https://registry.npmjs.org/qs/-/qs-0.6.6.tgz", + "version": "https://registry.npmjs.org/qs/-/qs-0.6.6.tgz", "integrity": "sha1-bgFQmP9RlouKPIGQAdXyyJvEsQc=" }, "querystring": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", + "version": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", "integrity": "sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=" }, "range-parser": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz", + "version": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz", "integrity": "sha1-9JvmtIeJTdxA3MlKMi9hEJLgDV4=" }, "raven": { - "version": "0.8.1", - "resolved": "https://registry.npmjs.org/raven/-/raven-0.8.1.tgz", + "version": "https://registry.npmjs.org/raven/-/raven-0.8.1.tgz", "integrity": "sha1-UVk7tlnHcnjc00gitlq+d7dRuvU=", "requires": { - "cookie": "0.1.0", - "lsmod": "0.0.3", - "node-uuid": "1.4.8", - "stack-trace": "0.0.7" + "cookie": "https://registry.npmjs.org/cookie/-/cookie-0.1.0.tgz", + "lsmod": "https://registry.npmjs.org/lsmod/-/lsmod-0.0.3.tgz", + "node-uuid": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.8.tgz", + "stack-trace": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.7.tgz" } }, "raw-body": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-1.1.2.tgz", + "version": "https://registry.npmjs.org/raw-body/-/raw-body-1.1.2.tgz", "integrity": "sha1-x0swBN6l3v0WlhcRBqx0DsMdYr4=", "requires": { - "bytes": "0.2.1" + "bytes": "https://registry.npmjs.org/bytes/-/bytes-0.2.1.tgz" } }, "read": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/read/-/read-1.0.7.tgz", + "version": "https://registry.npmjs.org/read/-/read-1.0.7.tgz", "integrity": "sha1-s9oZvQUkMal2cdRKQmNK33ELQMQ=", "dev": true, "requires": { - "mute-stream": "0.0.7" + "mute-stream": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz" } }, "readable-stream": { - "version": "1.1.14", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.14.tgz", + "version": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.14.tgz", "integrity": "sha1-fPTFTvZI44EwhMY23SB54WbAgdk=", "requires": { - "core-util-is": "1.0.2", - "inherits": "2.0.3", - "isarray": "0.0.1", - "string_decoder": "0.10.31" + "core-util-is": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "inherits": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "isarray": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "string_decoder": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" }, "dependencies": { "isarray": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "version": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" } } }, "recluster": { - "version": "0.3.7", - "resolved": "https://registry.npmjs.org/recluster/-/recluster-0.3.7.tgz", + "version": "https://registry.npmjs.org/recluster/-/recluster-0.3.7.tgz", "integrity": "sha1-aKRx3ZC2obl3ZjTPdpZAWutWeJU=" }, "request": { - "version": "2.14.0", - "resolved": "https://registry.npmjs.org/request/-/request-2.14.0.tgz", + "version": "https://registry.npmjs.org/request/-/request-2.14.0.tgz", "integrity": "sha1-DYrLsLFMGrguAAt9OB+oyA0afYg=", "requires": { - "form-data": "0.0.7", - "mime": "1.2.9" + "form-data": "https://registry.npmjs.org/form-data/-/form-data-0.0.7.tgz", + "mime": "https://registry.npmjs.org/mime/-/mime-1.2.9.tgz" }, "dependencies": { "form-data": { - "version": "0.0.7", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-0.0.7.tgz", + "version": "https://registry.npmjs.org/form-data/-/form-data-0.0.7.tgz", "integrity": "sha1-chEYKiaiZs45cQ3IvEqBtwQIWb4=", "requires": { - "async": "0.1.22", - "combined-stream": "0.0.4", - "mime": "1.2.9" + "async": "https://registry.npmjs.org/async/-/async-0.1.22.tgz", + "combined-stream": "https://registry.npmjs.org/combined-stream/-/combined-stream-0.0.4.tgz", + "mime": "https://registry.npmjs.org/mime/-/mime-1.2.9.tgz" }, "dependencies": { "async": { - "version": "0.1.22", - "resolved": "https://registry.npmjs.org/async/-/async-0.1.22.tgz", + "version": "https://registry.npmjs.org/async/-/async-0.1.22.tgz", "integrity": "sha1-D8GqoIig4+8Ovi2IMbqw3PiEUGE=" }, "combined-stream": { - "version": "0.0.4", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-0.0.4.tgz", + "version": "https://registry.npmjs.org/combined-stream/-/combined-stream-0.0.4.tgz", "integrity": "sha1-LRpDNH2+lRWkonlnMuW4hHOECyI=", "requires": { - "delayed-stream": "0.0.5" + "delayed-stream": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz" }, "dependencies": { "delayed-stream": { - "version": "0.0.5", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz", + "version": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz", "integrity": "sha1-1LH0OpPoKW3+AmlPRoC8N6MTxz8=" } } @@ -1585,352 +2012,517 @@ } }, "mime": { - "version": "1.2.9", - "resolved": "https://registry.npmjs.org/mime/-/mime-1.2.9.tgz", + "version": "https://registry.npmjs.org/mime/-/mime-1.2.9.tgz", "integrity": "sha1-AJzUCGe9Nd5SGzuWbwTi+NTRPQk=" } } }, "require-like": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz", + "version": "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz", "integrity": "sha1-rW8wwTvs15cBDEaK+ndcDAprR/o=" }, "requirejs": { - "version": "2.1.22", - "resolved": "https://registry.npmjs.org/requirejs/-/requirejs-2.1.22.tgz", + "version": "https://registry.npmjs.org/requirejs/-/requirejs-2.1.22.tgz", "integrity": "sha1-3Xj9LTQYDA1ixyS1uK68BmTgNm8=", "dev": true }, "response": { - "version": "0.14.0", - "resolved": "https://registry.npmjs.org/response/-/response-0.14.0.tgz", + "version": "https://registry.npmjs.org/response/-/response-0.14.0.tgz", "integrity": "sha1-BmNS/z5rAm0EdYCUB2Y7Rob9JpY=", "requires": { - "best-encoding": "0.1.1", - "bl": "0.7.0", - "caseless": "0.3.0", - "mime": "1.2.11" + "best-encoding": "https://registry.npmjs.org/best-encoding/-/best-encoding-0.1.1.tgz", + "bl": "https://registry.npmjs.org/bl/-/bl-0.7.0.tgz", + "caseless": "https://registry.npmjs.org/caseless/-/caseless-0.3.0.tgz", + "mime": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz" } }, "revalidator": { - "version": "0.1.8", - "resolved": "https://registry.npmjs.org/revalidator/-/revalidator-0.1.8.tgz", + "version": "https://registry.npmjs.org/revalidator/-/revalidator-0.1.8.tgz", "integrity": "sha1-/s5hv6DBtSoga9axgZgYS91SOjs=", "dev": true }, "rimraf": { - "version": "2.2.8", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.2.8.tgz", + "version": "https://registry.npmjs.org/rimraf/-/rimraf-2.2.8.tgz", "integrity": "sha1-5Dm+Kq7jJzIZUnMPmaiSnk/FBYI=" }, + "safe-buffer": { + "version": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha1-mR7GnSluAxN0fVm9/St0XDX4go0=", + "dev": true + }, "safe-json-stringify": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.0.4.tgz", + "version": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.0.4.tgz", "integrity": "sha1-gaCY9Efku8P/MxKiQ1IbwGDvWRE=", "optional": true }, + "safer-buffer": { + "version": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha1-RPoWGwGHuVSd2Eu5GAL5vYOFzWo=" + }, "samsam": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/samsam/-/samsam-1.3.0.tgz", - "integrity": "sha512-1HwIYD/8UlOtFS3QO3w7ey+SdSDFE4HRNLZoZRYVQefrOY3l17epswImeB1ijgJFQJodIaHcwkp3r/myBjFVbg==" + "version": "https://registry.npmjs.org/samsam/-/samsam-1.3.0.tgz", + "integrity": "sha1-jR2TUOJWItow3j5EumkrUiGrfFA=" }, "sandboxed-module": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/sandboxed-module/-/sandboxed-module-2.0.3.tgz", + "version": "https://registry.npmjs.org/sandboxed-module/-/sandboxed-module-2.0.3.tgz", "integrity": "sha1-x+VFkzm7y6KMUwPusz9ug4e/upY=", "requires": { - "require-like": "0.1.2", - "stack-trace": "0.0.9" + "require-like": "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz", + "stack-trace": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.9.tgz" }, "dependencies": { "stack-trace": { - "version": "0.0.9", - "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.9.tgz", + "version": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.9.tgz", "integrity": "sha1-qPbq7KkGdMMz58Q5U/J1tFFRBpU=" } } }, "sax": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz", + "version": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz", "integrity": "sha1-e45lYZCyKOgaZq6nSEgNgozS03o=" }, "semver": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-2.3.2.tgz", + "version": "https://registry.npmjs.org/semver/-/semver-2.3.2.tgz", "integrity": "sha1-uYSPJdbPNjMwc+ye+IVtQvEjPlI=", "dev": true }, "send": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/send/-/send-0.1.4.tgz", + "version": "https://registry.npmjs.org/send/-/send-0.1.4.tgz", "integrity": "sha1-vnDY0b4B3mGCGvE3gLUDRaT3Gr0=", "requires": { - "debug": "0.8.1", - "fresh": "0.2.0", - "mime": "1.2.11", - "range-parser": "0.0.4" + "debug": "https://registry.npmjs.org/debug/-/debug-0.8.1.tgz", + "fresh": "https://registry.npmjs.org/fresh/-/fresh-0.2.0.tgz", + "mime": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz", + "range-parser": "https://registry.npmjs.org/range-parser/-/range-parser-0.0.4.tgz" }, "dependencies": { "range-parser": { - "version": "0.0.4", - "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-0.0.4.tgz", + "version": "https://registry.npmjs.org/range-parser/-/range-parser-0.0.4.tgz", "integrity": "sha1-wEJ//vUcEKy6B4KkbJYC50T/Ygs=" } } }, + "setprototypeof": { + "version": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.0.tgz", + "integrity": "sha1-0L2FU2iHtv58DYGMuWLZ2RxU5lY=" + }, "settings-sharelatex": { "version": "git+https://github.com/sharelatex/settings-sharelatex.git#cbc5e41c1dbe6789721a14b3fdae05bf22546559", + "integrity": "sha1-2Bak3mkaxAvXjqsUtpTJhW6yIgQ=", "requires": { - "coffee-script": "1.6.0" + "coffee-script": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz" }, "dependencies": { "coffee-script": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz", + "version": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz", "integrity": "sha1-gIs5bhEPU9AhoZpO8fZb4OjjX6M=" } } }, + "sigmund": { + "version": "https://registry.npmjs.org/sigmund/-/sigmund-1.0.1.tgz", + "integrity": "sha1-P/IfGYytIXX587eBhT/ZTQ0ZtZA=" + }, "sinon": { - "version": "4.1.4", - "resolved": "https://registry.npmjs.org/sinon/-/sinon-4.1.4.tgz", - "integrity": "sha512-ISJZDPf8RS2z4/LAgy1gIimAvF9zg9C9ClQhLTWYWm4HBZjo1WELXlVfkudjdYeN+GtQ2uVBe52m0npIV0gDow==", + "version": "https://registry.npmjs.org/sinon/-/sinon-4.1.4.tgz", + "integrity": "sha1-Nrsje6443fnMktzBsWxR53hbvJw=", "requires": { - "diff": "3.4.0", - "formatio": "1.2.0", - "lodash.get": "4.4.2", - "lolex": "2.3.1", - "nise": "1.2.0", - "supports-color": "4.5.0", - "type-detect": "4.0.5" + "diff": "https://registry.npmjs.org/diff/-/diff-3.4.0.tgz", + "formatio": "https://registry.npmjs.org/formatio/-/formatio-1.2.0.tgz", + "lodash.get": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz", + "lolex": "https://registry.npmjs.org/lolex/-/lolex-2.3.1.tgz", + "nise": "https://registry.npmjs.org/nise/-/nise-1.2.0.tgz", + "supports-color": "https://registry.npmjs.org/supports-color/-/supports-color-4.5.0.tgz", + "type-detect": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.5.tgz" }, "dependencies": { "diff": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/diff/-/diff-3.4.0.tgz", - "integrity": "sha512-QpVuMTEoJMF7cKzi6bvWhRulU1fZqZnvyVQgNhPaxxuTYwyjn/j1v9falseQ/uXWwPnO56RBfwtg4h/EQXmucA==" + "version": "https://registry.npmjs.org/diff/-/diff-3.4.0.tgz", + "integrity": "sha1-sdhVB9rzlkgo3lSzfQ1zumfdpWw=" } } }, "sntp": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/sntp/-/sntp-0.1.4.tgz", + "version": "https://registry.npmjs.org/sntp/-/sntp-0.1.4.tgz", "integrity": "sha1-XvSBuVGnspr/30r9fyaDj8ESD4Q=", "requires": { - "hoek": "0.7.6" + "hoek": "https://registry.npmjs.org/hoek/-/hoek-0.7.6.tgz" } }, "sprintf-js": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "version": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=", "dev": true }, + "sshpk": { + "version": "https://registry.npmjs.org/sshpk/-/sshpk-1.14.1.tgz", + "integrity": "sha1-Ew9Zde3a2WPx1W+SuaxsUfqfg+s=", + "dev": true, + "requires": { + "asn1": "https://registry.npmjs.org/asn1/-/asn1-0.2.3.tgz", + "assert-plus": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "bcrypt-pbkdf": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.1.tgz", + "dashdash": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", + "ecc-jsbn": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz", + "getpass": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", + "jsbn": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", + "tweetnacl": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz" + } + }, "stack-trace": { - "version": "0.0.7", - "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.7.tgz", + "version": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.7.tgz", "integrity": "sha1-xy4Il0T8Nln1CM3ONiGvVjTsD/8=" }, "statsd-parser": { - "version": "0.0.4", - "resolved": "https://registry.npmjs.org/statsd-parser/-/statsd-parser-0.0.4.tgz", + "version": "https://registry.npmjs.org/statsd-parser/-/statsd-parser-0.0.4.tgz", "integrity": "sha1-y9JDlTzELv/VSLXSI4jtaJ7GOb0=" }, + "statuses": { + "version": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", + "integrity": "sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=" + }, "stream-buffers": { - "version": "0.2.6", - "resolved": "https://registry.npmjs.org/stream-buffers/-/stream-buffers-0.2.6.tgz", + "version": "https://registry.npmjs.org/stream-buffers/-/stream-buffers-0.2.6.tgz", "integrity": "sha1-GBwI1bs2kARfaUAbmuanoM8zE/w=" }, "stream-counter": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/stream-counter/-/stream-counter-0.2.0.tgz", + "version": "https://registry.npmjs.org/stream-counter/-/stream-counter-0.2.0.tgz", "integrity": "sha1-3tJmVWMZyLDiIoErnPOyb6fZR94=", "requires": { - "readable-stream": "1.1.14" + "readable-stream": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.14.tgz" } }, "string_decoder": { - "version": "0.10.31", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "version": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=" }, "strip-ansi": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-0.1.1.tgz", + "version": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-0.1.1.tgz", "integrity": "sha1-OeipjQRNFQZgq+SmgIrPcLt7yZE=", "dev": true }, "supports-color": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-4.5.0.tgz", + "version": "https://registry.npmjs.org/supports-color/-/supports-color-4.5.0.tgz", "integrity": "sha1-vnoN5ITexcXN34s9WRJQRJEvY1s=", "requires": { - "has-flag": "2.0.0" + "has-flag": "https://registry.npmjs.org/has-flag/-/has-flag-2.0.0.tgz" } }, "text-encoding": { - "version": "0.6.4", - "resolved": "https://registry.npmjs.org/text-encoding/-/text-encoding-0.6.4.tgz", + "version": "https://registry.npmjs.org/text-encoding/-/text-encoding-0.6.4.tgz", "integrity": "sha1-45mpgiV6J22uQou5KEXLcb3CbRk=" }, "timespan": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/timespan/-/timespan-2.3.0.tgz", + "version": "https://registry.npmjs.org/timespan/-/timespan-2.3.0.tgz", "integrity": "sha1-SQLOBAvRPYRcj1myfp1ZutbzmSk=", "dev": true }, "tiny-lr": { - "version": "0.0.4", - "resolved": "https://registry.npmjs.org/tiny-lr/-/tiny-lr-0.0.4.tgz", + "version": "https://registry.npmjs.org/tiny-lr/-/tiny-lr-0.0.4.tgz", "integrity": "sha1-gGGFR/Y/aX0Fy0DEwsSwg1Ia77Y=", "dev": true, "requires": { - "debug": "0.8.1", - "faye-websocket": "0.4.4", - "noptify": "0.0.3", - "qs": "0.6.6" + "debug": "https://registry.npmjs.org/debug/-/debug-0.7.4.tgz", + "faye-websocket": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.4.4.tgz", + "noptify": "https://registry.npmjs.org/noptify/-/noptify-0.0.3.tgz", + "qs": "https://registry.npmjs.org/qs/-/qs-0.5.6.tgz" + }, + "dependencies": { + "debug": { + "version": "https://registry.npmjs.org/debug/-/debug-0.7.4.tgz", + "integrity": "sha1-BuHqgILCyxTjmAbiLi9vdX+Srzk=", + "dev": true + }, + "nopt": { + "version": "https://registry.npmjs.org/nopt/-/nopt-2.0.0.tgz", + "integrity": "sha1-ynQW8gpeP5w7hhgPlilfo9C1Lg0=", + "dev": true, + "requires": { + "abbrev": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz" + } + }, + "noptify": { + "version": "https://registry.npmjs.org/noptify/-/noptify-0.0.3.tgz", + "integrity": "sha1-WPZUpz2XU98MUdlobckhBKZ/S7s=", + "dev": true, + "requires": { + "nopt": "https://registry.npmjs.org/nopt/-/nopt-2.0.0.tgz" + } + }, + "qs": { + "version": "https://registry.npmjs.org/qs/-/qs-0.5.6.tgz", + "integrity": "sha1-MbGtBYVnZRxSaSFQa5qHk5EaA4Q=", + "dev": true + } + } + }, + "tough-cookie": { + "version": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.3.4.tgz", + "integrity": "sha1-7GDO44rGdQY//JelwYlwV47oNlU=", + "dev": true, + "requires": { + "punycode": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz" + }, + "dependencies": { + "punycode": { + "version": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", + "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4=", + "dev": true + } } }, "tunnel-agent": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.2.0.tgz", + "version": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.2.0.tgz", "integrity": "sha1-aFPCr7GyEJ5FYp5JK9419Fnqaeg=" }, + "tweetnacl": { + "version": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", + "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=", + "dev": true, + "optional": true + }, "type-detect": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.5.tgz", - "integrity": "sha512-N9IvkQslUGYGC24RkJk1ba99foK6TkwC2FHAEBlQFBP0RxQZS8ZpJuAZcwiY/w9ZJHFQb1aOXBI60OdxhTrwEQ==" + "version": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.5.tgz", + "integrity": "sha1-1w5byB223io4G8rKDG4MvcdjXeI=" + }, + "type-is": { + "version": "https://registry.npmjs.org/type-is/-/type-is-1.6.16.tgz", + "integrity": "sha1-+JzjQVQcZysl7nrjxz3uOyvlAZQ=", + "requires": { + "media-typer": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "mime-types": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.18.tgz" + } }, "uid2": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/uid2/-/uid2-0.0.3.tgz", + "version": "https://registry.npmjs.org/uid2/-/uid2-0.0.3.tgz", "integrity": "sha1-SDEm4Rd03y9xuLY53NeZw3YWK4I=" }, "underscore": { - "version": "1.5.2", - "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.5.2.tgz", + "version": "https://registry.npmjs.org/underscore/-/underscore-1.5.2.tgz", "integrity": "sha1-EzXF5PXm0zu7SwBrqMhqAPVW3gg=" }, "underscore.string": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/underscore.string/-/underscore.string-2.2.1.tgz", + "version": "https://registry.npmjs.org/underscore.string/-/underscore.string-2.2.1.tgz", "integrity": "sha1-18D6KvXVoaZ/QlPa7pgTLnM/Dxk=", "dev": true }, + "unpipe": { + "version": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw=" + }, "update-notifier": { - "version": "0.1.10", - "resolved": "https://registry.npmjs.org/update-notifier/-/update-notifier-0.1.10.tgz", + "version": "https://registry.npmjs.org/update-notifier/-/update-notifier-0.1.10.tgz", "integrity": "sha1-IVy+EFM2nw1KRPhLUeuny4BIRpU=", "dev": true, "requires": { - "chalk": "0.4.0", - "configstore": "0.3.2", - "request": "2.14.0", - "semver": "2.3.2" + "chalk": "https://registry.npmjs.org/chalk/-/chalk-0.4.0.tgz", + "configstore": "https://registry.npmjs.org/configstore/-/configstore-0.3.2.tgz", + "request": "https://registry.npmjs.org/request/-/request-2.87.0.tgz", + "semver": "https://registry.npmjs.org/semver/-/semver-2.3.2.tgz" + }, + "dependencies": { + "caseless": { + "version": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", + "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=", + "dev": true + }, + "combined-stream": { + "version": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.6.tgz", + "integrity": "sha1-cj599ugBrFYTETp+RFqbactjKBg=", + "dev": true, + "requires": { + "delayed-stream": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz" + } + }, + "delayed-stream": { + "version": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=", + "dev": true + }, + "forever-agent": { + "version": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", + "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=", + "dev": true + }, + "form-data": { + "version": "https://registry.npmjs.org/form-data/-/form-data-2.3.2.tgz", + "integrity": "sha1-SXBJi+YEwgwAXU9cI67NIda0kJk=", + "dev": true, + "requires": { + "asynckit": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "combined-stream": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.6.tgz", + "mime-types": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.18.tgz" + } + }, + "json-stringify-safe": { + "version": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", + "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=", + "dev": true + }, + "oauth-sign": { + "version": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.8.2.tgz", + "integrity": "sha1-Rqarfwrq2N6unsBWV4C31O/rnUM=", + "dev": true + }, + "qs": { + "version": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", + "integrity": "sha1-yzroBuh0BERYTvFUzo7pjUA/PjY=", + "dev": true + }, + "request": { + "version": "https://registry.npmjs.org/request/-/request-2.87.0.tgz", + "integrity": "sha1-MvACNc0I1IK00NaNuTqCnA7VdW4=", + "dev": true, + "requires": { + "aws-sign2": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", + "aws4": "https://registry.npmjs.org/aws4/-/aws4-1.7.0.tgz", + "caseless": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", + "combined-stream": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.6.tgz", + "extend": "https://registry.npmjs.org/extend/-/extend-3.0.1.tgz", + "forever-agent": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", + "form-data": "https://registry.npmjs.org/form-data/-/form-data-2.3.2.tgz", + "har-validator": "https://registry.npmjs.org/har-validator/-/har-validator-5.0.3.tgz", + "http-signature": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", + "is-typedarray": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", + "isstream": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", + "json-stringify-safe": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", + "mime-types": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.18.tgz", + "oauth-sign": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.8.2.tgz", + "performance-now": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", + "qs": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", + "safe-buffer": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "tough-cookie": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.3.4.tgz", + "tunnel-agent": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", + "uuid": "https://registry.npmjs.org/uuid/-/uuid-3.2.1.tgz" + } + }, + "tunnel-agent": { + "version": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", + "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", + "dev": true, + "requires": { + "safe-buffer": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz" + } + }, + "uuid": { + "version": "https://registry.npmjs.org/uuid/-/uuid-3.2.1.tgz", + "integrity": "sha1-EsUou51Y0LkmXZovbw/ovhf/HxQ=", + "dev": true + } } }, "url": { - "version": "0.10.3", - "resolved": "https://registry.npmjs.org/url/-/url-0.10.3.tgz", + "version": "https://registry.npmjs.org/url/-/url-0.10.3.tgz", "integrity": "sha1-Ah5NnHcF8hu/N9A861h2dAJ3TGQ=", "requires": { - "punycode": "1.3.2", - "querystring": "0.2.0" + "punycode": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", + "querystring": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz" } }, "user-home": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/user-home/-/user-home-1.1.1.tgz", + "version": "https://registry.npmjs.org/user-home/-/user-home-1.1.1.tgz", "integrity": "sha1-K1viOjK2Onyd640PKNSFcko98ZA=", "dev": true }, "utile": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/utile/-/utile-0.2.1.tgz", + "version": "https://registry.npmjs.org/utile/-/utile-0.2.1.tgz", "integrity": "sha1-kwyI6ZCY1iIINMNWy9mncFItkNc=", "dev": true, "requires": { - "async": "0.2.10", - "deep-equal": "1.0.1", - "i": "0.3.6", - "mkdirp": "0.3.5", - "ncp": "2.0.0", - "rimraf": "2.2.8" + "async": "https://registry.npmjs.org/async/-/async-0.2.10.tgz", + "deep-equal": "https://registry.npmjs.org/deep-equal/-/deep-equal-1.0.1.tgz", + "i": "https://registry.npmjs.org/i/-/i-0.3.6.tgz", + "mkdirp": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.5.tgz", + "ncp": "https://registry.npmjs.org/ncp/-/ncp-0.4.2.tgz", + "rimraf": "https://registry.npmjs.org/rimraf/-/rimraf-2.2.8.tgz" + }, + "dependencies": { + "ncp": { + "version": "https://registry.npmjs.org/ncp/-/ncp-0.4.2.tgz", + "integrity": "sha1-q8xsvT7C7Spyn/bnwfqPAXhKhXQ=", + "dev": true + } } }, "uuid": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-2.0.3.tgz", + "version": "https://registry.npmjs.org/uuid/-/uuid-2.0.3.tgz", "integrity": "sha1-Z+LoY3lyFVMN/zGOW/nc6/1Hsho=", "dev": true }, + "verror": { + "version": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", + "integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=", + "dev": true, + "requires": { + "assert-plus": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "core-util-is": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "extsprintf": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz" + } + }, "watch": { - "version": "0.13.0", - "resolved": "https://registry.npmjs.org/watch/-/watch-0.13.0.tgz", + "version": "https://registry.npmjs.org/watch/-/watch-0.13.0.tgz", "integrity": "sha1-/MbSs/DoxzSC61Qjmhn9W8+adTw=", "dev": true, "requires": { - "minimist": "0.0.8" + "minimist": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz" + }, + "dependencies": { + "minimist": { + "version": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz", + "integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ=", + "dev": true + } } }, "which": { - "version": "1.0.9", - "resolved": "https://registry.npmjs.org/which/-/which-1.0.9.tgz", + "version": "https://registry.npmjs.org/which/-/which-1.0.9.tgz", "integrity": "sha1-RgwdoPgQED0DIam2M6+eV15kSG8=", "dev": true }, "winston": { - "version": "0.8.3", - "resolved": "https://registry.npmjs.org/winston/-/winston-0.8.3.tgz", + "version": "https://registry.npmjs.org/winston/-/winston-0.8.3.tgz", "integrity": "sha1-ZLar9M0Brcrv1QCTk7HY6L7BnbA=", "dev": true, "requires": { - "async": "0.2.10", - "colors": "0.6.2", - "cycle": "1.0.3", - "eyes": "0.1.8", - "isstream": "0.1.2", - "pkginfo": "0.3.1", - "stack-trace": "0.0.7" + "async": "https://registry.npmjs.org/async/-/async-0.2.10.tgz", + "colors": "https://registry.npmjs.org/colors/-/colors-0.6.2.tgz", + "cycle": "https://registry.npmjs.org/cycle/-/cycle-1.0.3.tgz", + "eyes": "https://registry.npmjs.org/eyes/-/eyes-0.1.8.tgz", + "isstream": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", + "pkginfo": "https://registry.npmjs.org/pkginfo/-/pkginfo-0.3.1.tgz", + "stack-trace": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.7.tgz" } }, "wordwrap": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz", + "version": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz", "integrity": "sha1-o9XabNXAvAAI03I0u68b7WMFkQc=", "dev": true }, "wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "version": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" }, "xdg-basedir": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-1.0.1.tgz", + "version": "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-1.0.1.tgz", "integrity": "sha1-FP+PY6T9vLBdW27qIrNvMDO58E4=", "dev": true, "requires": { - "user-home": "1.1.1" + "user-home": "https://registry.npmjs.org/user-home/-/user-home-1.1.1.tgz" } }, "xml2js": { - "version": "0.4.17", - "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.17.tgz", + "version": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.17.tgz", "integrity": "sha1-F76T6q4/O3eTWceVtBlwWogX6Gg=", "requires": { - "sax": "1.2.1", - "xmlbuilder": "4.2.1" + "sax": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz", + "xmlbuilder": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-4.2.1.tgz" } }, "xmlbuilder": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-4.2.1.tgz", + "version": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-4.2.1.tgz", "integrity": "sha1-qlijBBoGb5DqoWwvU4n/GfP0YaU=", "requires": { - "lodash": "4.17.4" + "lodash": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz" } } } diff --git a/services/filestore/package.json b/services/filestore/package.json index 897d195b63..f290f4598f 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -17,8 +17,8 @@ "compile:all": "npm run compile:app && npm run compile:unit_tests && npm run compile:acceptance_tests", "start": "npm run compile:app && node $NODE_APP_OPTIONS app.js", "nodemon": "nodemon --config nodemon.json", - "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 15000 $@ test/acceptance/js", - "test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js" + "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", + "test:unit:_run": "mocha --recursive --exit --reporter spec $@ test/unit/js" }, "dependencies": { "async": "~0.2.10", @@ -41,7 +41,7 @@ "settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.0.0", "stream-buffers": "~0.2.5", "underscore": "~1.5.2", - "mocha": "^4.0.1", + "mocha": "5.2.0", "body-parser": "^1.2.0" }, "devDependencies": { From 58ab4c72dcd9eae45c8279fd0828010739d3e1f1 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Mon, 21 May 2018 15:14:24 +0100 Subject: [PATCH 235/555] update build scripts to 1.1.1 --- services/filestore/.dockerignore | 6 ++++++ services/filestore/Jenkinsfile | 6 +++--- services/filestore/Makefile | 10 +++++----- services/filestore/package.json | 6 +++--- 4 files changed, 17 insertions(+), 11 deletions(-) diff --git a/services/filestore/.dockerignore b/services/filestore/.dockerignore index a99835353f..386f26df30 100644 --- a/services/filestore/.dockerignore +++ b/services/filestore/.dockerignore @@ -1,3 +1,9 @@ node_modules/* +gitrev +.git +.gitignore +.npm +.nvmrc +nodemon.json app.js **/js/* diff --git a/services/filestore/Jenkinsfile b/services/filestore/Jenkinsfile index e1a34a5235..bc9ba0142f 100644 --- a/services/filestore/Jenkinsfile +++ b/services/filestore/Jenkinsfile @@ -22,9 +22,9 @@ pipeline { } stage('Acceptance Tests') { - steps { - sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_acceptance' - } + steps { + sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_acceptance' + } } stage('Package and publish build') { diff --git a/services/filestore/Makefile b/services/filestore/Makefile index ef9dd9d63b..dbd5ce92a6 100644 --- a/services/filestore/Makefile +++ b/services/filestore/Makefile @@ -20,13 +20,13 @@ clean: rm -rf test/unit/js rm -rf test/acceptance/js -test: test_unit test_acceptance +test: test_unit test_acceptance -test_unit: - @[ ! -d test/unit ] && echo "filestore has no unit tests" || $(DOCKER_COMPOSE) run --rm test_unit +test_unit: + @[ ! -d test/unit ] && echo "filestore has no unit tests" || $(DOCKER_COMPOSE) run --rm test_unit npm run test:unit -- ${MOCHA_ARGS} -test_acceptance: test_clean test_acceptance_pre_run # clear the database before each acceptance test run - @[ ! -d test/acceptance ] && echo "filestore has no acceptance tests" || $(DOCKER_COMPOSE) run --rm test_acceptance +test_acceptance: test_clean test_acceptance_pre_run # clear the database before each acceptance test run + @[ ! -d test/acceptance ] && echo "filestore has no acceptance tests" || $(DOCKER_COMPOSE) run --rm test_acceptance npm run test:acceptance -- ${MOCHA_ARGS} test_clean: $(DOCKER_COMPOSE) down -v -t 0 diff --git a/services/filestore/package.json b/services/filestore/package.json index f290f4598f..c0971b67f3 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -17,14 +17,14 @@ "compile:all": "npm run compile:app && npm run compile:unit_tests && npm run compile:acceptance_tests", "start": "npm run compile:app && node $NODE_APP_OPTIONS app.js", "nodemon": "nodemon --config nodemon.json", - "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", - "test:unit:_run": "mocha --recursive --exit --reporter spec $@ test/unit/js" + "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", + "test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js" }, "dependencies": { "async": "~0.2.10", "aws-sdk": "^2.1.39", "coffee-script": "~1.7.1", - "express": "^4.2.0", + "express": "^4.2.0", "fs-extra": "^1.0.0", "heapdump": "^0.3.2", "knox": "~0.9.1", From ba8a5cf3e13b8cc9a68314142e981409e19457bd Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Tue, 22 May 2018 13:45:01 +0100 Subject: [PATCH 236/555] bump to 1.1.3 scripts --- services/filestore/Dockerfile | 5 +++-- services/filestore/Makefile | 10 +++++----- services/filestore/docker-compose.ci.yml | 4 ++-- services/filestore/docker-compose.yml | 4 ++-- services/filestore/package.json | 4 ++-- 5 files changed, 14 insertions(+), 13 deletions(-) diff --git a/services/filestore/Dockerfile b/services/filestore/Dockerfile index cb700d7ade..f359ac3d6c 100644 --- a/services/filestore/Dockerfile +++ b/services/filestore/Dockerfile @@ -2,7 +2,8 @@ FROM node:6.9.5 as app WORKDIR /app -COPY package.json package-lock.json /app/ +#wildcard as some files may not be in all repos +COPY package*.json npm-shrink*.json /app/ RUN npm install --quiet @@ -15,7 +16,7 @@ FROM node:6.9.5 COPY --from=app /app /app WORKDIR /app -RUN ./install_deps.sh +RUN chmod 0755 ./install_deps.sh && ./install_deps.sh USER node CMD ["node","app.js"] diff --git a/services/filestore/Makefile b/services/filestore/Makefile index dbd5ce92a6..f940429d87 100644 --- a/services/filestore/Makefile +++ b/services/filestore/Makefile @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.1 +# Version: 1.1.3 BUILD_NUMBER ?= local BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) @@ -22,11 +22,11 @@ clean: test: test_unit test_acceptance -test_unit: - @[ ! -d test/unit ] && echo "filestore has no unit tests" || $(DOCKER_COMPOSE) run --rm test_unit npm run test:unit -- ${MOCHA_ARGS} +test_unit: + @[ ! -d test/unit ] && echo "filestore has no unit tests" || $(DOCKER_COMPOSE) run --rm test_unit -test_acceptance: test_clean test_acceptance_pre_run # clear the database before each acceptance test run - @[ ! -d test/acceptance ] && echo "filestore has no acceptance tests" || $(DOCKER_COMPOSE) run --rm test_acceptance npm run test:acceptance -- ${MOCHA_ARGS} +test_acceptance: test_clean test_acceptance_pre_run # clear the database before each acceptance test run + @[ ! -d test/acceptance ] && echo "filestore has no acceptance tests" || $(DOCKER_COMPOSE) run --rm test_acceptance test_clean: $(DOCKER_COMPOSE) down -v -t 0 diff --git a/services/filestore/docker-compose.ci.yml b/services/filestore/docker-compose.ci.yml index a1cdf244d9..21c006641e 100644 --- a/services/filestore/docker-compose.ci.yml +++ b/services/filestore/docker-compose.ci.yml @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.1 +# Version: 1.1.3 version: "2" @@ -23,7 +23,7 @@ services: - redis user: node command: npm run test:acceptance:_run - + redis: image: redis diff --git a/services/filestore/docker-compose.yml b/services/filestore/docker-compose.yml index 6d11493b66..f24caa8883 100644 --- a/services/filestore/docker-compose.yml +++ b/services/filestore/docker-compose.yml @@ -1,13 +1,13 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.1 +# Version: 1.1.3 version: "2" services: test_unit: - image: node:6.9.5 + build: . volumes: - .:/app working_dir: /app diff --git a/services/filestore/package.json b/services/filestore/package.json index c0971b67f3..0f624c5462 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -8,9 +8,9 @@ }, "scripts": { "test:acceptance:run": "mocha --recursive --reporter spec --timeout 15000 $@ test/acceptance/js", - "test:acceptance": "npm run compile:app && npm run compile:acceptance_tests && npm run test:acceptance:_run -- $@", + "test:acceptance": "npm run compile:app && npm run compile:acceptance_tests && npm run test:acceptance:_run -- --grep=$MOCHA_GREP", "test:unit:run": "mocha --recursive --reporter spec $@ test/unit/js", - "test:unit": "npm run compile:app && npm run compile:unit_tests && npm run test:unit:_run -- $@", + "test:unit": "npm run compile:app && npm run compile:unit_tests && npm run test:unit:_run -- --grep=$MOCHA_GREP", "compile:unit_tests": "[ ! -e test/unit/coffee ] && echo 'No unit tests to compile' || coffee -o test/unit/js -c test/unit/coffee", "compile:acceptance_tests": "[ ! -e test/acceptance/coffee ] && echo 'No acceptance tests to compile' || coffee -o test/acceptance/js -c test/acceptance/coffee", "compile:app": "([ -e app/coffee ] && coffee $COFFEE_OPTIONS -o app/js -c app/coffee || echo 'No CoffeeScript folder to compile') && ( [ -e app.coffee ] && coffee $COFFEE_OPTIONS -c app.coffee || echo 'No CoffeeScript app to compile')", From 17637150044137e0f9f0fe8b3e9acd76a31de227 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Tue, 22 May 2018 13:45:32 +0100 Subject: [PATCH 237/555] remove black package-lock.json --- services/filestore/package-lock.json | 0 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 services/filestore/package-lock.json diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json deleted file mode 100644 index e69de29bb2..0000000000 From 46c61aa78ce19e2fe0ae34bbb5fa4fe1e095d002 Mon Sep 17 00:00:00 2001 From: Christopher Hoskin Date: Wed, 13 Jun 2018 16:21:32 +0100 Subject: [PATCH 238/555] Update build scripts to 1.1.7, add csh-staging GCR --- services/filestore/Dockerfile | 1 + services/filestore/Jenkinsfile | 15 ++++++++++++++- services/filestore/Makefile | 13 ++++++++++--- services/filestore/docker-compose.ci.yml | 9 +++++---- services/filestore/docker-compose.yml | 4 +++- services/filestore/package.json | 7 ++++--- 6 files changed, 37 insertions(+), 12 deletions(-) diff --git a/services/filestore/Dockerfile b/services/filestore/Dockerfile index f359ac3d6c..3ced888df3 100644 --- a/services/filestore/Dockerfile +++ b/services/filestore/Dockerfile @@ -9,6 +9,7 @@ RUN npm install --quiet COPY . /app + RUN npm run compile:all FROM node:6.9.5 diff --git a/services/filestore/Jenkinsfile b/services/filestore/Jenkinsfile index bc9ba0142f..efe844839b 100644 --- a/services/filestore/Jenkinsfile +++ b/services/filestore/Jenkinsfile @@ -29,7 +29,19 @@ pipeline { stage('Package and publish build') { steps { - sh 'make publish' + + withCredentials([file(credentialsId: 'gcr.io_csh-gcdm-test', variable: 'DOCKER_REPO_KEY_PATH')]) { + sh 'docker login -u _json_key --password-stdin https://gcr.io/csh-gcdm-test < ${DOCKER_REPO_KEY_PATH}' + } + sh 'DOCKER_REPO=gcr.io/csh-gcdm-test make publish' + sh 'docker logout https://gcr.io/csh-gcdm-test' + + withCredentials([file(credentialsId: 'gcr.io_csh-staging', variable: 'DOCKER_REPO_KEY_PATH')]) { + sh 'docker login -u _json_key --password-stdin https://gcr.io/csh-staging < ${DOCKER_REPO_KEY_PATH}' + } + sh 'DOCKER_REPO=gcr.io/csh-staging make publish' + sh 'docker logout https://gcr.io/csh-staging' + } } @@ -47,6 +59,7 @@ pipeline { post { always { sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_clean' + sh 'make clean' } failure { diff --git a/services/filestore/Makefile b/services/filestore/Makefile index f940429d87..76d583b35e 100644 --- a/services/filestore/Makefile +++ b/services/filestore/Makefile @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.3 +# Version: 1.1.7 BUILD_NUMBER ?= local BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) @@ -15,6 +15,9 @@ DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \ clean: + docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + docker rmi gcr.io/csh-gcdm-test/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + docker rmi gcr.io/csh-staging/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) rm -f app.js rm -rf app/js rm -rf test/unit/js @@ -34,9 +37,13 @@ test_clean: test_acceptance_pre_run: @[ ! -f test/acceptance/scripts/pre-run ] && echo "filestore has no pre acceptance tests task" || $(DOCKER_COMPOSE) run --rm test_acceptance test/acceptance/scripts/pre-run build: - docker build --pull --tag gcr.io/csh-gcdm-test/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) . + docker build --pull --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ + --tag gcr.io/csh-gcdm-test/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ + --tag gcr.io/csh-staging/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ + . publish: - docker push gcr.io/csh-gcdm-test/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + + docker push $(DOCKER_REPO)/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) .PHONY: clean test test_unit test_acceptance test_clean build publish diff --git a/services/filestore/docker-compose.ci.yml b/services/filestore/docker-compose.ci.yml index 21c006641e..57299e0d2f 100644 --- a/services/filestore/docker-compose.ci.yml +++ b/services/filestore/docker-compose.ci.yml @@ -1,23 +1,25 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.3 +# Version: 1.1.7 version: "2" services: test_unit: - image: gcr.io/csh-gcdm-test/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER user: node command: npm run test:unit:_run test_acceptance: build: . - image: gcr.io/csh-gcdm-test/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER environment: + ELASTIC_SEARCH_DSN: es:9200 REDIS_HOST: redis MONGO_HOST: mongo POSTGRES_HOST: postgres + MOCHA_GREP: ${MOCHA_GREP} depends_on: - mongo - redis @@ -29,4 +31,3 @@ services: mongo: image: mongo:3.4 - diff --git a/services/filestore/docker-compose.yml b/services/filestore/docker-compose.yml index f24caa8883..659bbab129 100644 --- a/services/filestore/docker-compose.yml +++ b/services/filestore/docker-compose.yml @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.3 +# Version: 1.1.7 version: "2" @@ -22,6 +22,7 @@ services: - .:/app working_dir: /app environment: + ELASTIC_SEARCH_DSN: es:9200 REDIS_HOST: redis MONGO_HOST: mongo POSTGRES_HOST: postgres @@ -31,6 +32,7 @@ services: - mongo - redis command: npm run test:acceptance + redis: image: redis diff --git a/services/filestore/package.json b/services/filestore/package.json index 0f624c5462..ee4ac0dbd4 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -14,11 +14,12 @@ "compile:unit_tests": "[ ! -e test/unit/coffee ] && echo 'No unit tests to compile' || coffee -o test/unit/js -c test/unit/coffee", "compile:acceptance_tests": "[ ! -e test/acceptance/coffee ] && echo 'No acceptance tests to compile' || coffee -o test/acceptance/js -c test/acceptance/coffee", "compile:app": "([ -e app/coffee ] && coffee $COFFEE_OPTIONS -o app/js -c app/coffee || echo 'No CoffeeScript folder to compile') && ( [ -e app.coffee ] && coffee $COFFEE_OPTIONS -c app.coffee || echo 'No CoffeeScript app to compile')", - "compile:all": "npm run compile:app && npm run compile:unit_tests && npm run compile:acceptance_tests", + "compile:all": "npm run compile:app && npm run compile:unit_tests && npm run compile:acceptance_tests && npm run compile:smoke_tests", "start": "npm run compile:app && node $NODE_APP_OPTIONS app.js", "nodemon": "nodemon --config nodemon.json", - "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", - "test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js" + "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 30000 --exit $@ test/acceptance/js", + "test:unit:_run": "mocha --recursive --reporter spec --exit $@ test/unit/js", + "compile:smoke_tests": "[ ! -e test/smoke/coffee] && echo 'No smoke tests to compile' || coffee -o test/smoke/js -c test/smoke/coffee" }, "dependencies": { "async": "~0.2.10", From f0cf8e662259d6350944d1af1363a8aa892083f8 Mon Sep 17 00:00:00 2001 From: Christopher Hoskin Date: Thu, 14 Jun 2018 11:37:18 +0100 Subject: [PATCH 239/555] Fix typo in buildscript --- services/filestore/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/package.json b/services/filestore/package.json index ee4ac0dbd4..0f455df13f 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -19,7 +19,7 @@ "nodemon": "nodemon --config nodemon.json", "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 30000 --exit $@ test/acceptance/js", "test:unit:_run": "mocha --recursive --reporter spec --exit $@ test/unit/js", - "compile:smoke_tests": "[ ! -e test/smoke/coffee] && echo 'No smoke tests to compile' || coffee -o test/smoke/js -c test/smoke/coffee" + "compile:smoke_tests": "[ ! -e test/smoke/coffee ] && echo 'No smoke tests to compile' || coffee -o test/smoke/js -c test/smoke/coffee" }, "dependencies": { "async": "~0.2.10", From feca8933f1ac4241242691bc74c05cac685f565a Mon Sep 17 00:00:00 2001 From: Michael Mazour Date: Wed, 4 Jul 2018 11:18:55 +0100 Subject: [PATCH 240/555] Add endpoint for arbitrary bucket fetch Add `/bucket/:bucket/key/*`, which fetches the file from the given bucket at the given path. Uses auth stored at `settings.filestore.s3.{{bucketName}}` if present, and otherwise default auth. --- services/filestore/app.coffee | 7 +- .../app/coffee/BucketController.coffee | 36 ++++++++++ .../app/coffee/S3PersistorManager.coffee | 4 +- .../unit/coffee/BucketControllerTests.coffee | 68 +++++++++++++++++++ .../coffee/S3PersistorManagerTests.coffee | 35 ++++++++++ 5 files changed, 145 insertions(+), 5 deletions(-) create mode 100644 services/filestore/app/coffee/BucketController.coffee create mode 100644 services/filestore/test/unit/coffee/BucketControllerTests.coffee diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index eb97ad48dd..ce10d2c91e 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -4,6 +4,7 @@ logger.initialize("filestore") settings = require("settings-sharelatex") request = require("request") fileController = require("./app/js/FileController") +bucketController = require("./app/js/BucketController") keyBuilder = require("./app/js/KeyBuilder") healthCheckController = require("./app/js/HealthCheckController") domain = require("domain") @@ -18,7 +19,7 @@ Metrics.memory.monitor(logger) app.configure -> app.use Metrics.http.monitor(logger) - + app.configure 'development', -> console.log "Development Enviroment" app.use express.errorHandler({ dumpExceptions: true, showStack: true }) @@ -86,6 +87,8 @@ app.del "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, app.get "/project/:project_id/size", keyBuilder.publicProjectKey, fileController.directorySize +app.get "/bucket/:bucket/key/*", bucketController.getFile + app.get "/heapdump", (req, res)-> require('heapdump').writeSnapshot '/tmp/' + Date.now() + '.filestore.heapsnapshot', (err, filename)-> res.send filename @@ -103,8 +106,6 @@ app.get '/status', (req, res)-> app.get "/health_check", healthCheckController.check - - app.get '*', (req, res)-> diff --git a/services/filestore/app/coffee/BucketController.coffee b/services/filestore/app/coffee/BucketController.coffee new file mode 100644 index 0000000000..cc1ff03c45 --- /dev/null +++ b/services/filestore/app/coffee/BucketController.coffee @@ -0,0 +1,36 @@ +PersistorManager = require("./PersistorManager") +settings = require("settings-sharelatex") +logger = require("logger-sharelatex") +FileHandler = require("./FileHandler") +metrics = require("metrics-sharelatex") +parseRange = require('range-parser') +Errors = require('./Errors') + +oneDayInSeconds = 60 * 60 * 24 +maxSizeInBytes = 1024 * 1024 * 1024 # 1GB + +module.exports = BucketController = + + getFile: (req, res)-> + {bucket} = req + key = req[0] + {format, style} = req.query + credentials = settings.filestore.s3&[bucket] + options = { + key: key, + bucket: bucket, + credentials: credentials + } + metrics.inc "getFile" + logger.log key:key, bucket:bucket, "receiving request to get file from bucket" + FileHandler.getFile bucket, key, options, (err, fileStream)-> + if err? + logger.err err:err, key:key, bucket:bucket, format:format, style:style, "problem getting file from bucket" + if err instanceof Errors.NotFoundError + return res.send 404 + else + return res.send 500 + else + logger.log key:key, bucket:bucket, format:format, style:style, "sending bucket file to response" + fileStream.pipe res + diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index b1a03fb4f4..2bd6eb0e9b 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -68,8 +68,8 @@ module.exports = callback = _.once callback logger.log bucketName:bucketName, key:key, "getting file from s3" s3Client = knox.createClient - key: settings.filestore.s3.key - secret: settings.filestore.s3.secret + key: opts.credentials?.auth_key || settings.filestore.s3.key + secret: opts.credentials?.auth_secret || settings.filestore.s3.secret bucket: bucketName s3Stream = s3Client.get(key, headers) s3Stream.end() diff --git a/services/filestore/test/unit/coffee/BucketControllerTests.coffee b/services/filestore/test/unit/coffee/BucketControllerTests.coffee new file mode 100644 index 0000000000..fc91d08793 --- /dev/null +++ b/services/filestore/test/unit/coffee/BucketControllerTests.coffee @@ -0,0 +1,68 @@ +assert = require("chai").assert +sinon = require('sinon') +chai = require('chai') +should = chai.should() +expect = chai.expect +modulePath = "../../../app/js/BucketController.js" +SandboxedModule = require('sandboxed-module') + +describe "BucketController", -> + + beforeEach -> + @PersistorManager = + sendStream: sinon.stub() + copyFile: sinon.stub() + deleteFile:sinon.stub() + + @settings = + s3: + buckets: + user_files:"user_files" + filestore: + backend: "s3" + s3: + secret: "secret" + key: "this_key" + + @FileHandler = + getFile: sinon.stub() + deleteFile: sinon.stub() + insertFile: sinon.stub() + getDirectorySize: sinon.stub() + @LocalFileWriter = {} + @controller = SandboxedModule.require modulePath, requires: + "./LocalFileWriter":@LocalFileWriter + "./FileHandler": @FileHandler + "./PersistorManager":@PersistorManager + "settings-sharelatex": @settings + "logger-sharelatex": + log:-> + err:-> + @project_id = "project_id" + @file_id = "file_id" + @bucket = "user_files" + @key = "#{@project_id}/#{@file_id}" + @req = + bucket:@bucket + 0:@key + query:{} + headers: {} + @res = + setHeader: -> + @fileStream = {} + + describe "getFile", -> + + it "should pipe the stream", (done)-> + @FileHandler.getFile.callsArgWith(3, null, @fileStream) + @fileStream.pipe = (res)=> + res.should.equal @res + done() + @controller.getFile @req, @res + + it "should send a 500 if there is a problem", (done)-> + @FileHandler.getFile.callsArgWith(3, "error") + @res.send = (code)=> + code.should.equal 500 + done() + @controller.getFile @req, @res diff --git a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee index 3a3e7b0d86..b48fde7820 100644 --- a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee @@ -55,6 +55,41 @@ describe "S3PersistorManagerTests", -> @stubbedKnoxClient.get.calledWith(@key).should.equal true done() + it "should use default auth", (done)-> + @stubbedKnoxClient.get.returns( + on:-> + end:-> + ) + @S3PersistorManager.getFileStream @bucketName, @key, @opts, (err)=> # empty callback + clientParams = + key: @settings.filestore.s3.key + secret: @settings.filestore.s3.secret + bucket: @bucketName + @knox.createClient.calledWith(clientParams).should.equal true + done() + + describe "with supplied auth", -> + beforeEach -> + @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires + @credentials = + auth_key: "that_key" + auth_secret: "that_secret" + @opts = + credentials: @credentials + + it "should use supplied auth", (done)-> + @stubbedKnoxClient.get.returns( + on:-> + end:-> + ) + @S3PersistorManager.getFileStream @bucketName, @key, @opts, (err)=> # empty callback + clientParams = + key: @credentials.auth_key + secret: @credentials.auth_secret + bucket: @bucketName + @knox.createClient.calledWith(clientParams).should.equal true + done() + describe "with start and end options", -> beforeEach -> @opts = From ece650741a4017e4a960eef556b67d15e410e61c Mon Sep 17 00:00:00 2001 From: Michael Mazour Date: Wed, 4 Jul 2018 12:02:09 +0100 Subject: [PATCH 241/555] Amend per several review comments - Removed unused vars - Label the metric with the bucket name --- services/filestore/app/coffee/BucketController.coffee | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/services/filestore/app/coffee/BucketController.coffee b/services/filestore/app/coffee/BucketController.coffee index cc1ff03c45..9192bd147e 100644 --- a/services/filestore/app/coffee/BucketController.coffee +++ b/services/filestore/app/coffee/BucketController.coffee @@ -1,14 +1,9 @@ -PersistorManager = require("./PersistorManager") settings = require("settings-sharelatex") logger = require("logger-sharelatex") FileHandler = require("./FileHandler") metrics = require("metrics-sharelatex") -parseRange = require('range-parser') Errors = require('./Errors') -oneDayInSeconds = 60 * 60 * 24 -maxSizeInBytes = 1024 * 1024 * 1024 # 1GB - module.exports = BucketController = getFile: (req, res)-> @@ -21,7 +16,7 @@ module.exports = BucketController = bucket: bucket, credentials: credentials } - metrics.inc "getFile" + metrics.inc "#{bucket}.getFile" logger.log key:key, bucket:bucket, "receiving request to get file from bucket" FileHandler.getFile bucket, key, options, (err, fileStream)-> if err? @@ -33,4 +28,3 @@ module.exports = BucketController = else logger.log key:key, bucket:bucket, format:format, style:style, "sending bucket file to response" fileStream.pipe res - From cfbf0d81bab33e59d2596589ed45d0b41ca967d1 Mon Sep 17 00:00:00 2001 From: Michael Mazour Date: Wed, 4 Jul 2018 12:11:09 +0100 Subject: [PATCH 242/555] Amend: fix params retrieval --- services/filestore/app/coffee/BucketController.coffee | 4 ++-- .../filestore/test/unit/coffee/BucketControllerTests.coffee | 5 +++-- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/services/filestore/app/coffee/BucketController.coffee b/services/filestore/app/coffee/BucketController.coffee index 9192bd147e..9392090e83 100644 --- a/services/filestore/app/coffee/BucketController.coffee +++ b/services/filestore/app/coffee/BucketController.coffee @@ -7,8 +7,8 @@ Errors = require('./Errors') module.exports = BucketController = getFile: (req, res)-> - {bucket} = req - key = req[0] + {bucket} = req.params + key = req.params[0] {format, style} = req.query credentials = settings.filestore.s3&[bucket] options = { diff --git a/services/filestore/test/unit/coffee/BucketControllerTests.coffee b/services/filestore/test/unit/coffee/BucketControllerTests.coffee index fc91d08793..461f3f03d6 100644 --- a/services/filestore/test/unit/coffee/BucketControllerTests.coffee +++ b/services/filestore/test/unit/coffee/BucketControllerTests.coffee @@ -43,9 +43,10 @@ describe "BucketController", -> @bucket = "user_files" @key = "#{@project_id}/#{@file_id}" @req = - bucket:@bucket - 0:@key query:{} + params: + bucket: @bucket + 0: @key headers: {} @res = setHeader: -> From 600ab3ce67f5635cc7e95f05bb348abfae19d863 Mon Sep 17 00:00:00 2001 From: Michael Mazour Date: Wed, 4 Jul 2018 16:39:41 +0100 Subject: [PATCH 243/555] Amend: remove problematic ampersand --- services/filestore/app/coffee/BucketController.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/app/coffee/BucketController.coffee b/services/filestore/app/coffee/BucketController.coffee index 9392090e83..2b983e77a3 100644 --- a/services/filestore/app/coffee/BucketController.coffee +++ b/services/filestore/app/coffee/BucketController.coffee @@ -10,7 +10,7 @@ module.exports = BucketController = {bucket} = req.params key = req.params[0] {format, style} = req.query - credentials = settings.filestore.s3&[bucket] + credentials = settings.filestore.s3[bucket] options = { key: key, bucket: bucket, From 336a38ec1e435d3bc504c1d08dc8f855b9a673dd Mon Sep 17 00:00:00 2001 From: Michael Mazour Date: Wed, 4 Jul 2018 16:41:31 +0100 Subject: [PATCH 244/555] Amend: scrub secrets from logs Calls to `getFile` can now include S3 credentials in `opts`, so sanitize before writing to opts to log. --- services/filestore/app/coffee/FileHandler.coffee | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/services/filestore/app/coffee/FileHandler.coffee b/services/filestore/app/coffee/FileHandler.coffee index 93cad984dd..2eeb09bc74 100644 --- a/services/filestore/app/coffee/FileHandler.coffee +++ b/services/filestore/app/coffee/FileHandler.coffee @@ -7,7 +7,7 @@ KeyBuilder = require("./KeyBuilder") async = require("async") ImageOptimiser = require("./ImageOptimiser") -module.exports = +module.exports = FileHandler = insertFile: (bucket, key, stream, callback)-> convertedKey = KeyBuilder.getConvertedFolderKey key @@ -23,7 +23,8 @@ module.exports = ], callback getFile: (bucket, key, opts = {}, callback)-> - logger.log bucket:bucket, key:key, opts:opts, "getting file" + # In this call, opts can contain credentials + logger.log bucket:bucket, key:key, opts:@_scrubSecrets(opts), "getting file" if !opts.format? and !opts.style? @_getStandardFile bucket, key, opts, callback else @@ -32,7 +33,7 @@ module.exports = _getStandardFile: (bucket, key, opts, callback)-> PersistorManager.getFileStream bucket, key, opts, (err, fileStream)-> if err? - logger.err bucket:bucket, key:key, opts:opts, "error getting fileStream" + logger.err bucket:bucket, key:key, opts:FileHandler._scrubSecrets(opts), "error getting fileStream" callback err, fileStream _getConvertedFile: (bucket, key, opts, callback)-> @@ -71,7 +72,7 @@ module.exports = return callback(err) done = (err, destPath)-> if err? - logger.err err:err, bucket:bucket, originalKey:originalKey, opts:opts, "error converting file" + logger.err err:err, bucket:bucket, originalKey:originalKey, opts:FileHandler._scrubSecrets(opts), "error converting file" return callback(err) LocalFileWriter.deleteFile originalFsPath, -> callback(err, destPath, originalFsPath) @@ -98,3 +99,8 @@ module.exports = if err? logger.err bucket:bucket, project_id:project_id, "error getting size" callback err, size + + _scrubSecrets: (opts)-> + safe = Object.assign {}, opts + delete safe.credentials + safe From 03033409c6811720e1f3f2673a5c5fcef71f2b54 Mon Sep 17 00:00:00 2001 From: Michael Mazour Date: Fri, 6 Jul 2018 09:12:59 +0100 Subject: [PATCH 245/555] Amend: remove unused params --- services/filestore/app/coffee/BucketController.coffee | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/services/filestore/app/coffee/BucketController.coffee b/services/filestore/app/coffee/BucketController.coffee index 2b983e77a3..ed7781fdda 100644 --- a/services/filestore/app/coffee/BucketController.coffee +++ b/services/filestore/app/coffee/BucketController.coffee @@ -9,7 +9,6 @@ module.exports = BucketController = getFile: (req, res)-> {bucket} = req.params key = req.params[0] - {format, style} = req.query credentials = settings.filestore.s3[bucket] options = { key: key, @@ -20,11 +19,11 @@ module.exports = BucketController = logger.log key:key, bucket:bucket, "receiving request to get file from bucket" FileHandler.getFile bucket, key, options, (err, fileStream)-> if err? - logger.err err:err, key:key, bucket:bucket, format:format, style:style, "problem getting file from bucket" + logger.err err:err, key:key, bucket:bucket, "problem getting file from bucket" if err instanceof Errors.NotFoundError return res.send 404 else return res.send 500 else - logger.log key:key, bucket:bucket, format:format, style:style, "sending bucket file to response" + logger.log key:key, bucket:bucket, "sending bucket file to response" fileStream.pipe res From 7feafccf31b830a3cf39a0d3c3413623178c45bf Mon Sep 17 00:00:00 2001 From: Michael Mazour Date: Fri, 6 Jul 2018 09:28:09 +0100 Subject: [PATCH 246/555] Amend: safely navigate to bucket credentials --- services/filestore/app/coffee/BucketController.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/app/coffee/BucketController.coffee b/services/filestore/app/coffee/BucketController.coffee index ed7781fdda..81c660ec69 100644 --- a/services/filestore/app/coffee/BucketController.coffee +++ b/services/filestore/app/coffee/BucketController.coffee @@ -9,7 +9,7 @@ module.exports = BucketController = getFile: (req, res)-> {bucket} = req.params key = req.params[0] - credentials = settings.filestore.s3[bucket] + credentials = settings.filestore.s3?[bucket] options = { key: key, bucket: bucket, From 2da15f2eb301002a656d53ad8fa3520f1da018c0 Mon Sep 17 00:00:00 2001 From: Michael Mazour Date: Fri, 6 Jul 2018 10:28:02 +0100 Subject: [PATCH 247/555] Amend - improve documentation of settings --- services/filestore/config/settings.defaults.coffee | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/services/filestore/config/settings.defaults.coffee b/services/filestore/config/settings.defaults.coffee index 92c4a7ec8d..ef44bbb604 100644 --- a/services/filestore/config/settings.defaults.coffee +++ b/services/filestore/config/settings.defaults.coffee @@ -25,12 +25,16 @@ module.exports = template_files: Path.resolve(__dirname + "/../template_files") # if you are using S3, then fill in your S3 details below # s3: - # key: "" - # secret: "" + # key: "" # default + # secret: "" # default + # bucketname1: # secrets for bucketname1 + # auth_key: "" + # auth_secret: "" + # bucketname2: # secrets for bucketname2... path: uploadFolder: Path.resolve(__dirname + "/../uploads") - + commands: # Any commands to wrap the convert utility in, for example ["nice"], or ["firejail", "--profile=/etc/firejail/convert.profile"] convertCommandPrefix: [] From 3e1ef3af635bad5d9e40352426a4257b3aa2c4d5 Mon Sep 17 00:00:00 2001 From: Michael Mazour Date: Fri, 6 Jul 2018 11:05:19 +0100 Subject: [PATCH 248/555] Populate S3 settings from environment variable --- services/filestore/config/settings.defaults.coffee | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/services/filestore/config/settings.defaults.coffee b/services/filestore/config/settings.defaults.coffee index ef44bbb604..62d42cf9e3 100644 --- a/services/filestore/config/settings.defaults.coffee +++ b/services/filestore/config/settings.defaults.coffee @@ -23,7 +23,8 @@ module.exports = user_files: Path.resolve(__dirname + "/../user_files") public_files: Path.resolve(__dirname + "/../public_files") template_files: Path.resolve(__dirname + "/../template_files") - # if you are using S3, then fill in your S3 details below + # if you are using S3, then fill in your S3 details below, + # or use env var with the same structure. # s3: # key: "" # default # secret: "" # default @@ -31,6 +32,7 @@ module.exports = # auth_key: "" # auth_secret: "" # bucketname2: # secrets for bucketname2... + s3: JSON.parse process.env['S3_CREDENTIALS'] if process.env['S3_CREDENTIALS'] path: uploadFolder: Path.resolve(__dirname + "/../uploads") From c4e3f9eb02924d53d3a32e58324c2b5ac34aad49 Mon Sep 17 00:00:00 2001 From: Michael Mazour Date: Fri, 6 Jul 2018 11:05:40 +0100 Subject: [PATCH 249/555] Amend: tests for populating S3 settings from environment variable --- .../test/unit/coffee/SettingsTests.coffee | 22 +++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 services/filestore/test/unit/coffee/SettingsTests.coffee diff --git a/services/filestore/test/unit/coffee/SettingsTests.coffee b/services/filestore/test/unit/coffee/SettingsTests.coffee new file mode 100644 index 0000000000..95278622ca --- /dev/null +++ b/services/filestore/test/unit/coffee/SettingsTests.coffee @@ -0,0 +1,22 @@ +assert = require("chai").assert +sinon = require('sinon') +chai = require('chai') +should = chai.should() +expect = chai.expect +modulePath = "../../../app/js/BucketController.js" +SandboxedModule = require('sandboxed-module') + +describe "Settings", -> + describe "s3", -> + it "should use JSONified env var if present", (done)-> + s3_settings = + key: 'default_key' + secret: 'default_secret' + bucket1: + auth_key: 'bucket1_key' + auth_secret: 'bucket1_secret' + process.env['S3_CREDENTIALS'] = JSON.stringify s3_settings + + settings =require('settings-sharelatex') + expect(settings.filestore.s3).to.deep.equal s3_settings + done() From c14476c0c1e346a721cb3ba7ea514e9f061e430d Mon Sep 17 00:00:00 2001 From: Michael Mazour Date: Wed, 4 Jul 2018 11:18:55 +0100 Subject: [PATCH 250/555] Add endpoint for arbitrary bucket fetch Add `/bucket/:bucket/key/*`, which fetches the file from the given bucket at the given path. Uses auth stored at `settings.filestore.s3.{{bucketName}}` if present, and otherwise default auth. --- services/filestore/app.coffee | 7 +- .../app/coffee/BucketController.coffee | 29 ++++++++ .../filestore/app/coffee/FileHandler.coffee | 14 ++-- .../app/coffee/S3PersistorManager.coffee | 4 +- .../filestore/config/settings.defaults.coffee | 10 ++- .../unit/coffee/BucketControllerTests.coffee | 69 +++++++++++++++++++ .../coffee/S3PersistorManagerTests.coffee | 35 ++++++++++ 7 files changed, 156 insertions(+), 12 deletions(-) create mode 100644 services/filestore/app/coffee/BucketController.coffee create mode 100644 services/filestore/test/unit/coffee/BucketControllerTests.coffee diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index eb97ad48dd..ce10d2c91e 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -4,6 +4,7 @@ logger.initialize("filestore") settings = require("settings-sharelatex") request = require("request") fileController = require("./app/js/FileController") +bucketController = require("./app/js/BucketController") keyBuilder = require("./app/js/KeyBuilder") healthCheckController = require("./app/js/HealthCheckController") domain = require("domain") @@ -18,7 +19,7 @@ Metrics.memory.monitor(logger) app.configure -> app.use Metrics.http.monitor(logger) - + app.configure 'development', -> console.log "Development Enviroment" app.use express.errorHandler({ dumpExceptions: true, showStack: true }) @@ -86,6 +87,8 @@ app.del "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, app.get "/project/:project_id/size", keyBuilder.publicProjectKey, fileController.directorySize +app.get "/bucket/:bucket/key/*", bucketController.getFile + app.get "/heapdump", (req, res)-> require('heapdump').writeSnapshot '/tmp/' + Date.now() + '.filestore.heapsnapshot', (err, filename)-> res.send filename @@ -103,8 +106,6 @@ app.get '/status', (req, res)-> app.get "/health_check", healthCheckController.check - - app.get '*', (req, res)-> diff --git a/services/filestore/app/coffee/BucketController.coffee b/services/filestore/app/coffee/BucketController.coffee new file mode 100644 index 0000000000..81c660ec69 --- /dev/null +++ b/services/filestore/app/coffee/BucketController.coffee @@ -0,0 +1,29 @@ +settings = require("settings-sharelatex") +logger = require("logger-sharelatex") +FileHandler = require("./FileHandler") +metrics = require("metrics-sharelatex") +Errors = require('./Errors') + +module.exports = BucketController = + + getFile: (req, res)-> + {bucket} = req.params + key = req.params[0] + credentials = settings.filestore.s3?[bucket] + options = { + key: key, + bucket: bucket, + credentials: credentials + } + metrics.inc "#{bucket}.getFile" + logger.log key:key, bucket:bucket, "receiving request to get file from bucket" + FileHandler.getFile bucket, key, options, (err, fileStream)-> + if err? + logger.err err:err, key:key, bucket:bucket, "problem getting file from bucket" + if err instanceof Errors.NotFoundError + return res.send 404 + else + return res.send 500 + else + logger.log key:key, bucket:bucket, "sending bucket file to response" + fileStream.pipe res diff --git a/services/filestore/app/coffee/FileHandler.coffee b/services/filestore/app/coffee/FileHandler.coffee index 93cad984dd..2eeb09bc74 100644 --- a/services/filestore/app/coffee/FileHandler.coffee +++ b/services/filestore/app/coffee/FileHandler.coffee @@ -7,7 +7,7 @@ KeyBuilder = require("./KeyBuilder") async = require("async") ImageOptimiser = require("./ImageOptimiser") -module.exports = +module.exports = FileHandler = insertFile: (bucket, key, stream, callback)-> convertedKey = KeyBuilder.getConvertedFolderKey key @@ -23,7 +23,8 @@ module.exports = ], callback getFile: (bucket, key, opts = {}, callback)-> - logger.log bucket:bucket, key:key, opts:opts, "getting file" + # In this call, opts can contain credentials + logger.log bucket:bucket, key:key, opts:@_scrubSecrets(opts), "getting file" if !opts.format? and !opts.style? @_getStandardFile bucket, key, opts, callback else @@ -32,7 +33,7 @@ module.exports = _getStandardFile: (bucket, key, opts, callback)-> PersistorManager.getFileStream bucket, key, opts, (err, fileStream)-> if err? - logger.err bucket:bucket, key:key, opts:opts, "error getting fileStream" + logger.err bucket:bucket, key:key, opts:FileHandler._scrubSecrets(opts), "error getting fileStream" callback err, fileStream _getConvertedFile: (bucket, key, opts, callback)-> @@ -71,7 +72,7 @@ module.exports = return callback(err) done = (err, destPath)-> if err? - logger.err err:err, bucket:bucket, originalKey:originalKey, opts:opts, "error converting file" + logger.err err:err, bucket:bucket, originalKey:originalKey, opts:FileHandler._scrubSecrets(opts), "error converting file" return callback(err) LocalFileWriter.deleteFile originalFsPath, -> callback(err, destPath, originalFsPath) @@ -98,3 +99,8 @@ module.exports = if err? logger.err bucket:bucket, project_id:project_id, "error getting size" callback err, size + + _scrubSecrets: (opts)-> + safe = Object.assign {}, opts + delete safe.credentials + safe diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index b1a03fb4f4..2bd6eb0e9b 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -68,8 +68,8 @@ module.exports = callback = _.once callback logger.log bucketName:bucketName, key:key, "getting file from s3" s3Client = knox.createClient - key: settings.filestore.s3.key - secret: settings.filestore.s3.secret + key: opts.credentials?.auth_key || settings.filestore.s3.key + secret: opts.credentials?.auth_secret || settings.filestore.s3.secret bucket: bucketName s3Stream = s3Client.get(key, headers) s3Stream.end() diff --git a/services/filestore/config/settings.defaults.coffee b/services/filestore/config/settings.defaults.coffee index 92c4a7ec8d..ef44bbb604 100644 --- a/services/filestore/config/settings.defaults.coffee +++ b/services/filestore/config/settings.defaults.coffee @@ -25,12 +25,16 @@ module.exports = template_files: Path.resolve(__dirname + "/../template_files") # if you are using S3, then fill in your S3 details below # s3: - # key: "" - # secret: "" + # key: "" # default + # secret: "" # default + # bucketname1: # secrets for bucketname1 + # auth_key: "" + # auth_secret: "" + # bucketname2: # secrets for bucketname2... path: uploadFolder: Path.resolve(__dirname + "/../uploads") - + commands: # Any commands to wrap the convert utility in, for example ["nice"], or ["firejail", "--profile=/etc/firejail/convert.profile"] convertCommandPrefix: [] diff --git a/services/filestore/test/unit/coffee/BucketControllerTests.coffee b/services/filestore/test/unit/coffee/BucketControllerTests.coffee new file mode 100644 index 0000000000..461f3f03d6 --- /dev/null +++ b/services/filestore/test/unit/coffee/BucketControllerTests.coffee @@ -0,0 +1,69 @@ +assert = require("chai").assert +sinon = require('sinon') +chai = require('chai') +should = chai.should() +expect = chai.expect +modulePath = "../../../app/js/BucketController.js" +SandboxedModule = require('sandboxed-module') + +describe "BucketController", -> + + beforeEach -> + @PersistorManager = + sendStream: sinon.stub() + copyFile: sinon.stub() + deleteFile:sinon.stub() + + @settings = + s3: + buckets: + user_files:"user_files" + filestore: + backend: "s3" + s3: + secret: "secret" + key: "this_key" + + @FileHandler = + getFile: sinon.stub() + deleteFile: sinon.stub() + insertFile: sinon.stub() + getDirectorySize: sinon.stub() + @LocalFileWriter = {} + @controller = SandboxedModule.require modulePath, requires: + "./LocalFileWriter":@LocalFileWriter + "./FileHandler": @FileHandler + "./PersistorManager":@PersistorManager + "settings-sharelatex": @settings + "logger-sharelatex": + log:-> + err:-> + @project_id = "project_id" + @file_id = "file_id" + @bucket = "user_files" + @key = "#{@project_id}/#{@file_id}" + @req = + query:{} + params: + bucket: @bucket + 0: @key + headers: {} + @res = + setHeader: -> + @fileStream = {} + + describe "getFile", -> + + it "should pipe the stream", (done)-> + @FileHandler.getFile.callsArgWith(3, null, @fileStream) + @fileStream.pipe = (res)=> + res.should.equal @res + done() + @controller.getFile @req, @res + + it "should send a 500 if there is a problem", (done)-> + @FileHandler.getFile.callsArgWith(3, "error") + @res.send = (code)=> + code.should.equal 500 + done() + @controller.getFile @req, @res diff --git a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee index 3a3e7b0d86..b48fde7820 100644 --- a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee @@ -55,6 +55,41 @@ describe "S3PersistorManagerTests", -> @stubbedKnoxClient.get.calledWith(@key).should.equal true done() + it "should use default auth", (done)-> + @stubbedKnoxClient.get.returns( + on:-> + end:-> + ) + @S3PersistorManager.getFileStream @bucketName, @key, @opts, (err)=> # empty callback + clientParams = + key: @settings.filestore.s3.key + secret: @settings.filestore.s3.secret + bucket: @bucketName + @knox.createClient.calledWith(clientParams).should.equal true + done() + + describe "with supplied auth", -> + beforeEach -> + @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires + @credentials = + auth_key: "that_key" + auth_secret: "that_secret" + @opts = + credentials: @credentials + + it "should use supplied auth", (done)-> + @stubbedKnoxClient.get.returns( + on:-> + end:-> + ) + @S3PersistorManager.getFileStream @bucketName, @key, @opts, (err)=> # empty callback + clientParams = + key: @credentials.auth_key + secret: @credentials.auth_secret + bucket: @bucketName + @knox.createClient.calledWith(clientParams).should.equal true + done() + describe "with start and end options", -> beforeEach -> @opts = From 836ff145b863c28b55130cd12f9b449847e46faa Mon Sep 17 00:00:00 2001 From: Michael Mazour Date: Fri, 6 Jul 2018 11:05:19 +0100 Subject: [PATCH 251/555] Populate S3 settings from environment variable --- .../filestore/config/settings.defaults.coffee | 4 +++- .../test/unit/coffee/SettingsTests.coffee | 22 +++++++++++++++++++ 2 files changed, 25 insertions(+), 1 deletion(-) create mode 100644 services/filestore/test/unit/coffee/SettingsTests.coffee diff --git a/services/filestore/config/settings.defaults.coffee b/services/filestore/config/settings.defaults.coffee index ef44bbb604..62d42cf9e3 100644 --- a/services/filestore/config/settings.defaults.coffee +++ b/services/filestore/config/settings.defaults.coffee @@ -23,7 +23,8 @@ module.exports = user_files: Path.resolve(__dirname + "/../user_files") public_files: Path.resolve(__dirname + "/../public_files") template_files: Path.resolve(__dirname + "/../template_files") - # if you are using S3, then fill in your S3 details below + # if you are using S3, then fill in your S3 details below, + # or use env var with the same structure. # s3: # key: "" # default # secret: "" # default @@ -31,6 +32,7 @@ module.exports = # auth_key: "" # auth_secret: "" # bucketname2: # secrets for bucketname2... + s3: JSON.parse process.env['S3_CREDENTIALS'] if process.env['S3_CREDENTIALS'] path: uploadFolder: Path.resolve(__dirname + "/../uploads") diff --git a/services/filestore/test/unit/coffee/SettingsTests.coffee b/services/filestore/test/unit/coffee/SettingsTests.coffee new file mode 100644 index 0000000000..95278622ca --- /dev/null +++ b/services/filestore/test/unit/coffee/SettingsTests.coffee @@ -0,0 +1,22 @@ +assert = require("chai").assert +sinon = require('sinon') +chai = require('chai') +should = chai.should() +expect = chai.expect +modulePath = "../../../app/js/BucketController.js" +SandboxedModule = require('sandboxed-module') + +describe "Settings", -> + describe "s3", -> + it "should use JSONified env var if present", (done)-> + s3_settings = + key: 'default_key' + secret: 'default_secret' + bucket1: + auth_key: 'bucket1_key' + auth_secret: 'bucket1_secret' + process.env['S3_CREDENTIALS'] = JSON.stringify s3_settings + + settings =require('settings-sharelatex') + expect(settings.filestore.s3).to.deep.equal s3_settings + done() From 089bf3e0846bc2195300d762ae90a8499177718e Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Tue, 10 Jul 2018 14:17:37 +0100 Subject: [PATCH 252/555] seperate the standard s3 creds from the bucket s3 creds --- services/filestore/app/coffee/BucketController.coffee | 2 +- services/filestore/config/settings.defaults.coffee | 5 ++++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/services/filestore/app/coffee/BucketController.coffee b/services/filestore/app/coffee/BucketController.coffee index 81c660ec69..bb7bd4544b 100644 --- a/services/filestore/app/coffee/BucketController.coffee +++ b/services/filestore/app/coffee/BucketController.coffee @@ -9,7 +9,7 @@ module.exports = BucketController = getFile: (req, res)-> {bucket} = req.params key = req.params[0] - credentials = settings.filestore.s3?[bucket] + credentials = settings.filestore.s3BucketCreds?[bucket] options = { key: key, bucket: bucket, diff --git a/services/filestore/config/settings.defaults.coffee b/services/filestore/config/settings.defaults.coffee index 62d42cf9e3..59f5d8a0a6 100644 --- a/services/filestore/config/settings.defaults.coffee +++ b/services/filestore/config/settings.defaults.coffee @@ -28,11 +28,14 @@ module.exports = # s3: # key: "" # default # secret: "" # default + # + # s3BucketCreds: # bucketname1: # secrets for bucketname1 # auth_key: "" # auth_secret: "" # bucketname2: # secrets for bucketname2... - s3: JSON.parse process.env['S3_CREDENTIALS'] if process.env['S3_CREDENTIALS'] + + s3BucketCreds: JSON.parse process.env['S3_BUCKET_CREDENTIALS'] if process.env['S3_BUCKET_CREDENTIALS'] path: uploadFolder: Path.resolve(__dirname + "/../uploads") From 1e4e68c64ab024b5af2e8c345f6c875736bb8e17 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Tue, 10 Jul 2018 14:55:40 +0100 Subject: [PATCH 253/555] fix settings test --- services/filestore/test/unit/coffee/SettingsTests.coffee | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/services/filestore/test/unit/coffee/SettingsTests.coffee b/services/filestore/test/unit/coffee/SettingsTests.coffee index 95278622ca..efb690511e 100644 --- a/services/filestore/test/unit/coffee/SettingsTests.coffee +++ b/services/filestore/test/unit/coffee/SettingsTests.coffee @@ -4,19 +4,16 @@ chai = require('chai') should = chai.should() expect = chai.expect modulePath = "../../../app/js/BucketController.js" -SandboxedModule = require('sandboxed-module') describe "Settings", -> describe "s3", -> it "should use JSONified env var if present", (done)-> s3_settings = - key: 'default_key' - secret: 'default_secret' bucket1: auth_key: 'bucket1_key' auth_secret: 'bucket1_secret' - process.env['S3_CREDENTIALS'] = JSON.stringify s3_settings + process.env['S3_BUCKET_CREDENTIALS'] = JSON.stringify s3_settings - settings =require('settings-sharelatex') - expect(settings.filestore.s3).to.deep.equal s3_settings + settings = require("settings-sharelatex") + expect(settings.filestore.s3BucketCreds).to.deep.equal s3_settings done() From 4784ae33a23a61355c1873294050be3ad955c0cf Mon Sep 17 00:00:00 2001 From: Christopher Hoskin Date: Tue, 17 Jul 2018 10:51:15 +0100 Subject: [PATCH 254/555] Bump build scripts to 1.1.8 --- services/filestore/Jenkinsfile | 42 ++++++++++++++++++++++++ services/filestore/Makefile | 10 +++--- services/filestore/docker-compose.ci.yml | 2 +- services/filestore/docker-compose.yml | 2 +- 4 files changed, 49 insertions(+), 7 deletions(-) diff --git a/services/filestore/Jenkinsfile b/services/filestore/Jenkinsfile index efe844839b..6aa3f14577 100644 --- a/services/filestore/Jenkinsfile +++ b/services/filestore/Jenkinsfile @@ -3,12 +3,33 @@ String cron_string = BRANCH_NAME == "master" ? "@daily" : "" pipeline { agent any + environment { + GIT_PROJECT = "filestore/-sharelatex" + JENKINS_WORKFLOW = "filestore/-sharelatex" + TARGET_URL = "${env.JENKINS_URL}blue/organizations/jenkins/${JENKINS_WORKFLOW}/detail/$BRANCH_NAME/$BUILD_NUMBER/pipeline" + GIT_API_URL = "https://api.github.com/repos/sharelatex/${GIT_PROJECT}/statuses/$GIT_COMMIT" + } + triggers { pollSCM('* * * * *') cron(cron_string) } stages { + stage('Install') { + steps { + withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) { + sh "curl $GIT_API_URL \ + --data '{ \ + \"state\" : \"pending\", \ + \"target_url\": \"$TARGET_URL\", \ + \"description\": \"Your build is underway\", \ + \"context\": \"ci/jenkins\" }' \ + -u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD" + } + } + } + stage('Build') { steps { sh 'make build' @@ -62,11 +83,32 @@ pipeline { sh 'make clean' } + success { + withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) { + sh "curl $GIT_API_URL \ + --data '{ \ + \"state\" : \"success\", \ + \"target_url\": \"$TARGET_URL\", \ + \"description\": \"Your build succeeded!\", \ + \"context\": \"ci/jenkins\" }' \ + -u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD" + } + } + failure { mail(from: "${EMAIL_ALERT_FROM}", to: "${EMAIL_ALERT_TO}", subject: "Jenkins build failed: ${JOB_NAME}:${BUILD_NUMBER}", body: "Build: ${BUILD_URL}") + withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) { + sh "curl $GIT_API_URL \ + --data '{ \ + \"state\" : \"failure\", \ + \"target_url\": \"$TARGET_URL\", \ + \"description\": \"Your build failed\", \ + \"context\": \"ci/jenkins\" }' \ + -u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD" + } } } diff --git a/services/filestore/Makefile b/services/filestore/Makefile index 76d583b35e..8a76ec6069 100644 --- a/services/filestore/Makefile +++ b/services/filestore/Makefile @@ -1,11 +1,11 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.7 +# Version: 1.1.8 BUILD_NUMBER ?= local BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) -PROJECT_NAME = filestore +PROJECT_NAME = filestore/ DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \ BRANCH_NAME=$(BRANCH_NAME) \ @@ -26,16 +26,16 @@ clean: test: test_unit test_acceptance test_unit: - @[ ! -d test/unit ] && echo "filestore has no unit tests" || $(DOCKER_COMPOSE) run --rm test_unit + @[ ! -d test/unit ] && echo "filestore/ has no unit tests" || $(DOCKER_COMPOSE) run --rm test_unit test_acceptance: test_clean test_acceptance_pre_run # clear the database before each acceptance test run - @[ ! -d test/acceptance ] && echo "filestore has no acceptance tests" || $(DOCKER_COMPOSE) run --rm test_acceptance + @[ ! -d test/acceptance ] && echo "filestore/ has no acceptance tests" || $(DOCKER_COMPOSE) run --rm test_acceptance test_clean: $(DOCKER_COMPOSE) down -v -t 0 test_acceptance_pre_run: - @[ ! -f test/acceptance/scripts/pre-run ] && echo "filestore has no pre acceptance tests task" || $(DOCKER_COMPOSE) run --rm test_acceptance test/acceptance/scripts/pre-run + @[ ! -f test/acceptance/scripts/pre-run ] && echo "filestore/ has no pre acceptance tests task" || $(DOCKER_COMPOSE) run --rm test_acceptance test/acceptance/scripts/pre-run build: docker build --pull --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ --tag gcr.io/csh-gcdm-test/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ diff --git a/services/filestore/docker-compose.ci.yml b/services/filestore/docker-compose.ci.yml index 57299e0d2f..c3dafce4e4 100644 --- a/services/filestore/docker-compose.ci.yml +++ b/services/filestore/docker-compose.ci.yml @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.7 +# Version: 1.1.8 version: "2" diff --git a/services/filestore/docker-compose.yml b/services/filestore/docker-compose.yml index 659bbab129..4b23773962 100644 --- a/services/filestore/docker-compose.yml +++ b/services/filestore/docker-compose.yml @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.7 +# Version: 1.1.8 version: "2" From d182e5329ce661a1547de0008e34a24f75e93ade Mon Sep 17 00:00:00 2001 From: Christopher Hoskin Date: Tue, 17 Jul 2018 10:52:20 +0100 Subject: [PATCH 255/555] Add cr-test2 GCR project --- services/filestore/Jenkinsfile | 6 ++++++ services/filestore/Makefile | 2 ++ 2 files changed, 8 insertions(+) diff --git a/services/filestore/Jenkinsfile b/services/filestore/Jenkinsfile index 6aa3f14577..36e287d333 100644 --- a/services/filestore/Jenkinsfile +++ b/services/filestore/Jenkinsfile @@ -63,6 +63,12 @@ pipeline { sh 'DOCKER_REPO=gcr.io/csh-staging make publish' sh 'docker logout https://gcr.io/csh-staging' + withCredentials([file(credentialsId: 'gcr.io_cr-test2', variable: 'DOCKER_REPO_KEY_PATH')]) { + sh 'docker login -u _json_key --password-stdin https://gcr.io/cr-test2 < ${DOCKER_REPO_KEY_PATH}' + } + sh 'DOCKER_REPO=gcr.io/cr-test2 make publish' + sh 'docker logout https://gcr.io/cr-test2' + } } diff --git a/services/filestore/Makefile b/services/filestore/Makefile index 8a76ec6069..12f0541e04 100644 --- a/services/filestore/Makefile +++ b/services/filestore/Makefile @@ -18,6 +18,7 @@ clean: docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) docker rmi gcr.io/csh-gcdm-test/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) docker rmi gcr.io/csh-staging/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + docker rmi gcr.io/cr-test2/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) rm -f app.js rm -rf app/js rm -rf test/unit/js @@ -40,6 +41,7 @@ build: docker build --pull --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ --tag gcr.io/csh-gcdm-test/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ --tag gcr.io/csh-staging/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ + --tag gcr.io/cr-test2/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ . publish: From c7733d05ccd20d87a6828625f0027fa93607ceed Mon Sep 17 00:00:00 2001 From: Christopher Hoskin Date: Tue, 17 Jul 2018 11:06:33 +0100 Subject: [PATCH 256/555] Remove trailing / from project name --- services/filestore/Jenkinsfile | 4 ++-- services/filestore/Makefile | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/services/filestore/Jenkinsfile b/services/filestore/Jenkinsfile index 36e287d333..2c8b2903fc 100644 --- a/services/filestore/Jenkinsfile +++ b/services/filestore/Jenkinsfile @@ -4,8 +4,8 @@ pipeline { agent any environment { - GIT_PROJECT = "filestore/-sharelatex" - JENKINS_WORKFLOW = "filestore/-sharelatex" + GIT_PROJECT = "filestore-sharelatex" + JENKINS_WORKFLOW = "filestore-sharelatex" TARGET_URL = "${env.JENKINS_URL}blue/organizations/jenkins/${JENKINS_WORKFLOW}/detail/$BRANCH_NAME/$BUILD_NUMBER/pipeline" GIT_API_URL = "https://api.github.com/repos/sharelatex/${GIT_PROJECT}/statuses/$GIT_COMMIT" } diff --git a/services/filestore/Makefile b/services/filestore/Makefile index 12f0541e04..1638c56f4d 100644 --- a/services/filestore/Makefile +++ b/services/filestore/Makefile @@ -5,7 +5,7 @@ BUILD_NUMBER ?= local BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) -PROJECT_NAME = filestore/ +PROJECT_NAME = filestore DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \ BRANCH_NAME=$(BRANCH_NAME) \ @@ -27,16 +27,16 @@ clean: test: test_unit test_acceptance test_unit: - @[ ! -d test/unit ] && echo "filestore/ has no unit tests" || $(DOCKER_COMPOSE) run --rm test_unit + @[ ! -d test/unit ] && echo "filestore has no unit tests" || $(DOCKER_COMPOSE) run --rm test_unit test_acceptance: test_clean test_acceptance_pre_run # clear the database before each acceptance test run - @[ ! -d test/acceptance ] && echo "filestore/ has no acceptance tests" || $(DOCKER_COMPOSE) run --rm test_acceptance + @[ ! -d test/acceptance ] && echo "filestore has no acceptance tests" || $(DOCKER_COMPOSE) run --rm test_acceptance test_clean: $(DOCKER_COMPOSE) down -v -t 0 test_acceptance_pre_run: - @[ ! -f test/acceptance/scripts/pre-run ] && echo "filestore/ has no pre acceptance tests task" || $(DOCKER_COMPOSE) run --rm test_acceptance test/acceptance/scripts/pre-run + @[ ! -f test/acceptance/scripts/pre-run ] && echo "filestore has no pre acceptance tests task" || $(DOCKER_COMPOSE) run --rm test_acceptance test/acceptance/scripts/pre-run build: docker build --pull --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ --tag gcr.io/csh-gcdm-test/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ From 104406953529aa380034e26cd9e58040452bc94a Mon Sep 17 00:00:00 2001 From: Christopher Hoskin Date: Tue, 17 Jul 2018 12:03:14 +0100 Subject: [PATCH 257/555] Remove csh-gcdm-test and csh-staging repos --- services/filestore/Jenkinsfile | 12 ------------ services/filestore/Makefile | 4 ---- 2 files changed, 16 deletions(-) diff --git a/services/filestore/Jenkinsfile b/services/filestore/Jenkinsfile index 2c8b2903fc..36da0fa0c2 100644 --- a/services/filestore/Jenkinsfile +++ b/services/filestore/Jenkinsfile @@ -51,18 +51,6 @@ pipeline { stage('Package and publish build') { steps { - withCredentials([file(credentialsId: 'gcr.io_csh-gcdm-test', variable: 'DOCKER_REPO_KEY_PATH')]) { - sh 'docker login -u _json_key --password-stdin https://gcr.io/csh-gcdm-test < ${DOCKER_REPO_KEY_PATH}' - } - sh 'DOCKER_REPO=gcr.io/csh-gcdm-test make publish' - sh 'docker logout https://gcr.io/csh-gcdm-test' - - withCredentials([file(credentialsId: 'gcr.io_csh-staging', variable: 'DOCKER_REPO_KEY_PATH')]) { - sh 'docker login -u _json_key --password-stdin https://gcr.io/csh-staging < ${DOCKER_REPO_KEY_PATH}' - } - sh 'DOCKER_REPO=gcr.io/csh-staging make publish' - sh 'docker logout https://gcr.io/csh-staging' - withCredentials([file(credentialsId: 'gcr.io_cr-test2', variable: 'DOCKER_REPO_KEY_PATH')]) { sh 'docker login -u _json_key --password-stdin https://gcr.io/cr-test2 < ${DOCKER_REPO_KEY_PATH}' } diff --git a/services/filestore/Makefile b/services/filestore/Makefile index 1638c56f4d..597b376dfb 100644 --- a/services/filestore/Makefile +++ b/services/filestore/Makefile @@ -16,8 +16,6 @@ DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \ clean: docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) - docker rmi gcr.io/csh-gcdm-test/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) - docker rmi gcr.io/csh-staging/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) docker rmi gcr.io/cr-test2/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) rm -f app.js rm -rf app/js @@ -39,8 +37,6 @@ test_acceptance_pre_run: @[ ! -f test/acceptance/scripts/pre-run ] && echo "filestore has no pre acceptance tests task" || $(DOCKER_COMPOSE) run --rm test_acceptance test/acceptance/scripts/pre-run build: docker build --pull --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ - --tag gcr.io/csh-gcdm-test/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ - --tag gcr.io/csh-staging/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ --tag gcr.io/cr-test2/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ . From f9f599e3e755eede8cb17069b2b4ed324102d92e Mon Sep 17 00:00:00 2001 From: Christopher Hoskin Date: Wed, 18 Jul 2018 11:30:01 +0100 Subject: [PATCH 258/555] Push container images to overleaf-ops project --- services/filestore/Jenkinsfile | 8 ++++---- services/filestore/Makefile | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/services/filestore/Jenkinsfile b/services/filestore/Jenkinsfile index 36da0fa0c2..1ab9a075e1 100644 --- a/services/filestore/Jenkinsfile +++ b/services/filestore/Jenkinsfile @@ -51,11 +51,11 @@ pipeline { stage('Package and publish build') { steps { - withCredentials([file(credentialsId: 'gcr.io_cr-test2', variable: 'DOCKER_REPO_KEY_PATH')]) { - sh 'docker login -u _json_key --password-stdin https://gcr.io/cr-test2 < ${DOCKER_REPO_KEY_PATH}' + withCredentials([file(credentialsId: 'gcr.io_overleaf-ops', variable: 'DOCKER_REPO_KEY_PATH')]) { + sh 'docker login -u _json_key --password-stdin https://gcr.io/overleaf-ops < ${DOCKER_REPO_KEY_PATH}' } - sh 'DOCKER_REPO=gcr.io/cr-test2 make publish' - sh 'docker logout https://gcr.io/cr-test2' + sh 'DOCKER_REPO=gcr.io/overleaf-ops make publish' + sh 'docker logout https://gcr.io/overleaf-ops' } } diff --git a/services/filestore/Makefile b/services/filestore/Makefile index 597b376dfb..8500a66cf6 100644 --- a/services/filestore/Makefile +++ b/services/filestore/Makefile @@ -16,7 +16,7 @@ DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \ clean: docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) - docker rmi gcr.io/cr-test2/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) rm -f app.js rm -rf app/js rm -rf test/unit/js @@ -37,7 +37,7 @@ test_acceptance_pre_run: @[ ! -f test/acceptance/scripts/pre-run ] && echo "filestore has no pre acceptance tests task" || $(DOCKER_COMPOSE) run --rm test_acceptance test/acceptance/scripts/pre-run build: docker build --pull --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ - --tag gcr.io/cr-test2/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ + --tag gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ . publish: From cba013cd710f34f07c2b4c370e8f04cf9659469e Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Mon, 23 Jul 2018 17:06:31 +0100 Subject: [PATCH 259/555] change ordering of docker rmi --- services/filestore/Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/Makefile b/services/filestore/Makefile index 8500a66cf6..49f86269d2 100644 --- a/services/filestore/Makefile +++ b/services/filestore/Makefile @@ -15,12 +15,12 @@ DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \ clean: - docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) rm -f app.js rm -rf app/js rm -rf test/unit/js rm -rf test/acceptance/js + docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) test: test_unit test_acceptance From 0e9a535e1c8b3589f176f8019d6b56dd298af5fd Mon Sep 17 00:00:00 2001 From: Christopher Hoskin Date: Thu, 26 Jul 2018 16:05:11 +0100 Subject: [PATCH 260/555] Bump build script to 1.1.9 --- services/filestore/.github/ISSUE_TEMPLATE.md | 38 ++++++++++++++++ .../.github/PULL_REQUEST_TEMPLATE.md | 45 +++++++++++++++++++ services/filestore/Makefile | 4 +- services/filestore/buildscript.txt | 9 ++++ services/filestore/docker-compose.ci.yml | 2 +- services/filestore/docker-compose.yml | 2 +- 6 files changed, 96 insertions(+), 4 deletions(-) create mode 100644 services/filestore/.github/ISSUE_TEMPLATE.md create mode 100644 services/filestore/.github/PULL_REQUEST_TEMPLATE.md create mode 100644 services/filestore/buildscript.txt diff --git a/services/filestore/.github/ISSUE_TEMPLATE.md b/services/filestore/.github/ISSUE_TEMPLATE.md new file mode 100644 index 0000000000..e0093aa90c --- /dev/null +++ b/services/filestore/.github/ISSUE_TEMPLATE.md @@ -0,0 +1,38 @@ + + +## Steps to Reproduce + + + +1. +2. +3. + +## Expected Behaviour + + +## Observed Behaviour + + + +## Context + + +## Technical Info + + +* URL: +* Browser Name and version: +* Operating System and version (desktop or mobile): +* Signed in as: +* Project and/or file: + +## Analysis + + +## Who Needs to Know? + + + +- +- diff --git a/services/filestore/.github/PULL_REQUEST_TEMPLATE.md b/services/filestore/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 0000000000..ed25ee83c1 --- /dev/null +++ b/services/filestore/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,45 @@ + + +### Description + + + +#### Screenshots + + + +#### Related Issues / PRs + + + +### Review + + + +#### Potential Impact + + + +#### Manual Testing Performed + +- [ ] +- [ ] + +#### Accessibility + + + +### Deployment + + + +#### Deployment Checklist + +- [ ] Update documentation not included in the PR (if any) +- [ ] + +#### Metrics and Monitoring + + + +#### Who Needs to Know? diff --git a/services/filestore/Makefile b/services/filestore/Makefile index 49f86269d2..9b34c3ccb9 100644 --- a/services/filestore/Makefile +++ b/services/filestore/Makefile @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.8 +# Version: 1.1.9 BUILD_NUMBER ?= local BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) @@ -15,12 +15,12 @@ DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \ clean: + docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) rm -f app.js rm -rf app/js rm -rf test/unit/js rm -rf test/acceptance/js - docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) test: test_unit test_acceptance diff --git a/services/filestore/buildscript.txt b/services/filestore/buildscript.txt new file mode 100644 index 0000000000..d4c335faa9 --- /dev/null +++ b/services/filestore/buildscript.txt @@ -0,0 +1,9 @@ +--script-version=1.1.9 +filestore +--node-version=6.9.5 +--acceptance-creds=None +--language=coffeescript +--dependencies=['mongo', 'redis'] +--docker-repos=['gcr.io/overleaf-ops'] +--kube=false +--build-target=docker diff --git a/services/filestore/docker-compose.ci.yml b/services/filestore/docker-compose.ci.yml index c3dafce4e4..17c4ddd2bf 100644 --- a/services/filestore/docker-compose.ci.yml +++ b/services/filestore/docker-compose.ci.yml @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.8 +# Version: 1.1.9 version: "2" diff --git a/services/filestore/docker-compose.yml b/services/filestore/docker-compose.yml index 4b23773962..dcbc14e683 100644 --- a/services/filestore/docker-compose.yml +++ b/services/filestore/docker-compose.yml @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.8 +# Version: 1.1.9 version: "2" From 53dacc04ef90c172cb676a54645fcb9b0ac067dd Mon Sep 17 00:00:00 2001 From: Christopher Hoskin Date: Thu, 26 Jul 2018 16:06:30 +0100 Subject: [PATCH 261/555] Bump metrics from 1.3.0 to 1.8.1 --- services/filestore/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/package.json b/services/filestore/package.json index 0f455df13f..524e5139ff 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -30,7 +30,7 @@ "heapdump": "^0.3.2", "knox": "~0.9.1", "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.1.0", - "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.3.0", + "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.8.1", "node-transloadit": "0.0.4", "node-uuid": "~1.4.1", "pngcrush": "0.0.3", From f4ec677296185e7c127bf8f1e57cb33cafc4b100 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 6 Sep 2018 17:01:18 +0100 Subject: [PATCH 262/555] add sentry to logging --- services/filestore/app.coffee | 3 +++ services/filestore/package.json | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index ce10d2c91e..957a3f6f40 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -11,6 +11,9 @@ domain = require("domain") appIsOk = true app = express() +if Settings.sentry?.dsn? + logger.initializeErrorReporting(Settings.sentry.dsn) + Metrics = require "metrics-sharelatex" Metrics.initialize("filestore") Metrics.open_sockets.monitor(logger) diff --git a/services/filestore/package.json b/services/filestore/package.json index 4048d93e21..047801ec2f 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -18,7 +18,7 @@ "fs-extra": "^1.0.0", "heapdump": "^0.3.2", "knox": "~0.9.1", - "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.1.0", + "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.7", "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.3.0", "node-transloadit": "0.0.4", "node-uuid": "~1.4.1", From f3adf5a6531478236ed01ea29bf91e70a3b037cb Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 6 Sep 2018 17:01:40 +0100 Subject: [PATCH 263/555] fix shrinkwrap for upgraded logger-sharelatex module --- services/filestore/npm-shrinkwrap.json | 1382 +++++++++++++++++++++++- services/filestore/package.json | 1 + 2 files changed, 1362 insertions(+), 21 deletions(-) diff --git a/services/filestore/npm-shrinkwrap.json b/services/filestore/npm-shrinkwrap.json index 1b87652bb1..34fdb6b539 100644 --- a/services/filestore/npm-shrinkwrap.json +++ b/services/filestore/npm-shrinkwrap.json @@ -2,11 +2,62 @@ "name": "filestore-sharelatex", "version": "0.1.4", "dependencies": { + "abbrev": { + "version": "1.1.1", + "from": "abbrev@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz" + }, "accept-encoding": { "version": "0.1.0", "from": "accept-encoding@>=0.1.0 <0.2.0", "resolved": "https://registry.npmjs.org/accept-encoding/-/accept-encoding-0.1.0.tgz" }, + "ajv": { + "version": "5.5.2", + "from": "ajv@>=5.3.0 <6.0.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-5.5.2.tgz", + "dev": true + }, + "ansi-regex": { + "version": "0.2.1", + "from": "ansi-regex@>=0.2.0 <0.3.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-0.2.1.tgz" + }, + "ansi-styles": { + "version": "1.0.0", + "from": "ansi-styles@>=1.0.0 <1.1.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-1.0.0.tgz", + "dev": true + }, + "argparse": { + "version": "0.1.16", + "from": "argparse@>=0.1.11 <0.2.0", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-0.1.16.tgz", + "dependencies": { + "underscore": { + "version": "1.7.0", + "from": "underscore@>=1.7.0 <1.8.0", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.7.0.tgz" + }, + "underscore.string": { + "version": "2.4.0", + "from": "underscore.string@>=2.4.0 <2.5.0", + "resolved": "https://registry.npmjs.org/underscore.string/-/underscore.string-2.4.0.tgz" + } + } + }, + "asn1": { + "version": "0.2.4", + "from": "asn1@>=0.2.3 <0.3.0", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz", + "dev": true + }, + "assert-plus": { + "version": "1.0.0", + "from": "assert-plus@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "dev": true + }, "assertion-error": { "version": "1.0.2", "from": "assertion-error@>=1.0.1 <2.0.0", @@ -17,6 +68,12 @@ "from": "async@>=0.2.10 <0.3.0", "resolved": "https://registry.npmjs.org/async/-/async-0.2.10.tgz" }, + "asynckit": { + "version": "0.4.0", + "from": "asynckit@>=0.4.0 <0.5.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "dev": true + }, "aws-sdk": { "version": "2.116.0", "from": "aws-sdk@>=2.1.39 <3.0.0", @@ -34,6 +91,18 @@ "from": "aws-sign@>=0.2.0 <0.3.0", "resolved": "https://registry.npmjs.org/aws-sign/-/aws-sign-0.2.0.tgz" }, + "aws-sign2": { + "version": "0.7.0", + "from": "aws-sign2@>=0.7.0 <0.8.0", + "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", + "dev": true + }, + "aws4": { + "version": "1.8.0", + "from": "aws4@>=1.8.0 <2.0.0", + "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.8.0.tgz", + "dev": true + }, "balanced-match": { "version": "1.0.0", "from": "balanced-match@>=1.0.0 <2.0.0", @@ -50,6 +119,13 @@ "from": "batch@0.5.0", "resolved": "https://registry.npmjs.org/batch/-/batch-0.5.0.tgz" }, + "bcrypt-pbkdf": { + "version": "1.0.2", + "from": "bcrypt-pbkdf@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", + "dev": true, + "optional": true + }, "best-encoding": { "version": "0.1.1", "from": "best-encoding@>=0.1.1 <0.2.0", @@ -83,6 +159,26 @@ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.8.tgz", "optional": true }, + "broadway": { + "version": "0.3.6", + "from": "broadway@>=0.3.2 <0.4.0", + "resolved": "https://registry.npmjs.org/broadway/-/broadway-0.3.6.tgz", + "dev": true, + "dependencies": { + "cliff": { + "version": "0.1.9", + "from": "cliff@0.1.9", + "resolved": "https://registry.npmjs.org/cliff/-/cliff-0.1.9.tgz", + "dev": true + }, + "winston": { + "version": "0.8.0", + "from": "winston@0.8.0", + "resolved": "https://registry.npmjs.org/winston/-/winston-0.8.0.tgz", + "dev": true + } + } + }, "buffer": { "version": "4.9.1", "from": "buffer@4.9.1", @@ -96,7 +192,8 @@ "bunyan": { "version": "1.3.6", "from": "bunyan@1.3.6", - "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.3.6.tgz" + "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.3.6.tgz", + "dev": true }, "bytes": { "version": "0.2.1", @@ -113,16 +210,47 @@ "from": "chai@latest", "resolved": "https://registry.npmjs.org/chai/-/chai-4.1.2.tgz" }, + "chalk": { + "version": "0.4.0", + "from": "chalk@>=0.4.0 <0.5.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-0.4.0.tgz", + "dev": true + }, "check-error": { "version": "1.0.2", "from": "check-error@>=1.0.1 <2.0.0", "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz" }, + "cliff": { + "version": "0.1.10", + "from": "cliff@>=0.1.9 <0.2.0", + "resolved": "https://registry.npmjs.org/cliff/-/cliff-0.1.10.tgz", + "dev": true, + "dependencies": { + "colors": { + "version": "1.0.3", + "from": "colors@>=1.0.3 <1.1.0", + "resolved": "https://registry.npmjs.org/colors/-/colors-1.0.3.tgz", + "dev": true + } + } + }, + "co": { + "version": "4.6.0", + "from": "co@>=4.6.0 <5.0.0", + "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", + "dev": true + }, "coffee-script": { "version": "1.7.1", "from": "coffee-script@>=1.7.1 <1.8.0", "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.7.1.tgz" }, + "colors": { + "version": "0.6.2", + "from": "colors@>=0.6.0-1 <0.7.0", + "resolved": "https://registry.npmjs.org/colors/-/colors-0.6.2.tgz" + }, "combined-stream": { "version": "0.0.7", "from": "combined-stream@>=0.0.4 <0.1.0", @@ -139,6 +267,44 @@ "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", "optional": true }, + "configstore": { + "version": "0.3.2", + "from": "configstore@>=0.3.0 <0.4.0", + "resolved": "https://registry.npmjs.org/configstore/-/configstore-0.3.2.tgz", + "dev": true, + "dependencies": { + "argparse": { + "version": "1.0.10", + "from": "argparse@>=1.0.7 <2.0.0", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "dev": true + }, + "esprima": { + "version": "4.0.1", + "from": "esprima@>=4.0.0 <5.0.0", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "dev": true + }, + "graceful-fs": { + "version": "3.0.11", + "from": "graceful-fs@>=3.0.1 <4.0.0", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-3.0.11.tgz", + "dev": true + }, + "js-yaml": { + "version": "3.12.0", + "from": "js-yaml@>=3.1.0 <4.0.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.12.0.tgz", + "dev": true + }, + "mkdirp": { + "version": "0.5.1", + "from": "mkdirp@>=0.5.0 <0.6.0", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", + "dev": true + } + } + }, "connect": { "version": "2.12.0", "from": "connect@2.12.0", @@ -174,6 +340,23 @@ "from": "crypto-browserify@1.0.9", "resolved": "https://registry.npmjs.org/crypto-browserify/-/crypto-browserify-1.0.9.tgz" }, + "cycle": { + "version": "1.0.3", + "from": "cycle@>=1.0.0 <1.1.0", + "resolved": "https://registry.npmjs.org/cycle/-/cycle-1.0.3.tgz", + "dev": true + }, + "dashdash": { + "version": "1.14.1", + "from": "dashdash@>=1.12.0 <2.0.0", + "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", + "dev": true + }, + "dateformat": { + "version": "1.0.2-1.2.3", + "from": "dateformat@1.0.2-1.2.3", + "resolved": "https://registry.npmjs.org/dateformat/-/dateformat-1.0.2-1.2.3.tgz" + }, "debug": { "version": "0.8.1", "from": "debug@>=0.7.3 <1.0.0", @@ -184,22 +367,81 @@ "from": "deep-eql@>=3.0.0 <4.0.0", "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-3.0.1.tgz" }, + "deep-equal": { + "version": "1.0.1", + "from": "deep-equal@*", + "resolved": "https://registry.npmjs.org/deep-equal/-/deep-equal-1.0.1.tgz", + "dev": true + }, "delayed-stream": { "version": "0.0.5", "from": "delayed-stream@0.0.5", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz" }, + "diff": { + "version": "1.0.7", + "from": "diff@1.0.7", + "resolved": "https://registry.npmjs.org/diff/-/diff-1.0.7.tgz" + }, + "director": { + "version": "1.2.7", + "from": "director@1.2.7", + "resolved": "https://registry.npmjs.org/director/-/director-1.2.7.tgz", + "dev": true + }, "dtrace-provider": { "version": "0.4.0", "from": "dtrace-provider@>=0.4.0 <0.5.0", "resolved": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.4.0.tgz", + "dev": true, "optional": true }, + "ecc-jsbn": { + "version": "0.1.2", + "from": "ecc-jsbn@>=0.1.1 <0.2.0", + "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", + "dev": true, + "optional": true + }, + "escape-string-regexp": { + "version": "1.0.5", + "from": "escape-string-regexp@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz" + }, + "esprima": { + "version": "1.0.4", + "from": "esprima@>=1.0.2 <1.1.0", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-1.0.4.tgz" + }, + "event-stream": { + "version": "0.5.3", + "from": "event-stream@>=0.5.0 <0.6.0", + "resolved": "https://registry.npmjs.org/event-stream/-/event-stream-0.5.3.tgz", + "dev": true, + "dependencies": { + "optimist": { + "version": "0.2.8", + "from": "optimist@>=0.2.0 <0.3.0", + "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.2.8.tgz", + "dev": true + } + } + }, + "eventemitter2": { + "version": "0.4.14", + "from": "eventemitter2@>=0.4.9 <0.5.0", + "resolved": "https://registry.npmjs.org/eventemitter2/-/eventemitter2-0.4.14.tgz" + }, "events": { "version": "1.1.1", "from": "events@>=1.1.1 <2.0.0", "resolved": "https://registry.npmjs.org/events/-/events-1.1.1.tgz" }, + "exit": { + "version": "0.1.2", + "from": "exit@>=0.1.1 <0.2.0", + "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz" + }, "express": { "version": "3.4.8", "from": "express@>=3.4.8 <3.5.0", @@ -212,11 +454,103 @@ } } }, + "extend": { + "version": "3.0.2", + "from": "extend@>=3.0.2 <3.1.0", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "dev": true + }, + "extsprintf": { + "version": "1.3.0", + "from": "extsprintf@1.3.0", + "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", + "dev": true + }, + "eyes": { + "version": "0.1.8", + "from": "eyes@>=0.1.8 <0.2.0", + "resolved": "https://registry.npmjs.org/eyes/-/eyes-0.1.8.tgz", + "dev": true + }, + "fast-deep-equal": { + "version": "1.1.0", + "from": "fast-deep-equal@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-1.1.0.tgz", + "dev": true + }, + "fast-json-stable-stringify": { + "version": "2.0.0", + "from": "fast-json-stable-stringify@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz", + "dev": true + }, + "faye-websocket": { + "version": "0.4.4", + "from": "faye-websocket@>=0.4.3 <0.5.0", + "resolved": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.4.4.tgz", + "dev": true + }, + "findup-sync": { + "version": "0.1.3", + "from": "findup-sync@>=0.1.0 <0.2.0", + "resolved": "https://registry.npmjs.org/findup-sync/-/findup-sync-0.1.3.tgz", + "dependencies": { + "glob": { + "version": "3.2.11", + "from": "glob@>=3.2.9 <3.3.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-3.2.11.tgz" + }, + "lodash": { + "version": "2.4.2", + "from": "lodash@>=2.4.1 <2.5.0", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-2.4.2.tgz" + }, + "minimatch": { + "version": "0.3.0", + "from": "minimatch@>=0.3.0 <0.4.0", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-0.3.0.tgz" + } + } + }, + "flatiron": { + "version": "0.4.3", + "from": "flatiron@>=0.4.2 <0.5.0", + "resolved": "https://registry.npmjs.org/flatiron/-/flatiron-0.4.3.tgz", + "dev": true, + "dependencies": { + "optimist": { + "version": "0.6.0", + "from": "optimist@0.6.0", + "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.0.tgz", + "dev": true + } + } + }, + "forever": { + "version": "0.14.2", + "from": "forever@>=0.14.1 <0.15.0", + "resolved": "https://registry.npmjs.org/forever/-/forever-0.14.2.tgz", + "dev": true + }, "forever-agent": { "version": "0.2.0", "from": "forever-agent@>=0.2.0 <0.3.0", "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.2.0.tgz" }, + "forever-monitor": { + "version": "1.5.2", + "from": "forever-monitor@>=1.5.1 <1.6.0", + "resolved": "https://registry.npmjs.org/forever-monitor/-/forever-monitor-1.5.2.tgz", + "dev": true, + "dependencies": { + "minimatch": { + "version": "1.0.0", + "from": "minimatch@>=1.0.0 <1.1.0", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-1.0.0.tgz", + "dev": true + } + } + }, "form-data": { "version": "0.0.10", "from": "form-data@>=0.0.3 <0.1.0", @@ -237,11 +571,28 @@ "from": "fs-extra@>=1.0.0 <2.0.0", "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-1.0.0.tgz" }, + "gaze": { + "version": "0.4.3", + "from": "gaze@>=0.4.0 <0.5.0", + "resolved": "https://registry.npmjs.org/gaze/-/gaze-0.4.3.tgz", + "dev": true + }, "get-func-name": { "version": "2.0.0", "from": "get-func-name@>=2.0.0 <3.0.0", "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz" }, + "getobject": { + "version": "0.1.0", + "from": "getobject@>=0.1.0 <0.2.0", + "resolved": "https://registry.npmjs.org/getobject/-/getobject-0.1.0.tgz" + }, + "getpass": { + "version": "0.1.7", + "from": "getpass@>=0.1.1 <0.2.0", + "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", + "dev": true + }, "gettemporaryfilepath": { "version": "0.0.1", "from": "gettemporaryfilepath@0.0.1", @@ -253,11 +604,266 @@ "resolved": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz", "optional": true }, + "globule": { + "version": "0.1.0", + "from": "globule@>=0.1.0 <0.2.0", + "resolved": "https://registry.npmjs.org/globule/-/globule-0.1.0.tgz", + "dev": true, + "dependencies": { + "glob": { + "version": "3.1.21", + "from": "glob@~3.1.21", + "resolved": "https://registry.npmjs.org/glob/-/glob-3.1.21.tgz", + "dev": true + }, + "graceful-fs": { + "version": "1.2.3", + "from": "graceful-fs@~1.2.0", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-1.2.3.tgz", + "dev": true + }, + "inherits": { + "version": "1.0.2", + "from": "inherits@1", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-1.0.2.tgz", + "dev": true + }, + "lodash": { + "version": "1.0.2", + "from": "lodash@>=1.0.1 <1.1.0", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-1.0.2.tgz", + "dev": true + }, + "minimatch": { + "version": "0.2.14", + "from": "minimatch@~0.2.11", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-0.2.14.tgz", + "dev": true + } + } + }, "graceful-fs": { "version": "4.1.11", "from": "graceful-fs@>=4.1.2 <5.0.0", "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.11.tgz" }, + "growl": { + "version": "1.7.0", + "from": "growl@>=1.7.0 <1.8.0", + "resolved": "https://registry.npmjs.org/growl/-/growl-1.7.0.tgz" + }, + "grunt": { + "version": "0.4.1", + "from": "grunt@0.4.1", + "resolved": "https://registry.npmjs.org/grunt/-/grunt-0.4.1.tgz", + "dev": true, + "dependencies": { + "async": { + "version": "0.1.22", + "from": "async@>=0.1.22 <0.2.0", + "resolved": "https://registry.npmjs.org/async/-/async-0.1.22.tgz", + "dev": true + }, + "coffee-script": { + "version": "1.3.3", + "from": "coffee-script@>=1.3.3 <1.4.0", + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.3.3.tgz", + "dev": true + }, + "glob": { + "version": "3.1.21", + "from": "glob@>=3.1.21 <3.2.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-3.1.21.tgz", + "dev": true + }, + "graceful-fs": { + "version": "1.2.3", + "from": "graceful-fs@>=1.2.0 <1.3.0", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-1.2.3.tgz", + "dev": true + }, + "inherits": { + "version": "1.0.2", + "from": "inherits@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-1.0.2.tgz", + "dev": true + }, + "lodash": { + "version": "0.9.2", + "from": "lodash@>=0.9.0 <0.10.0", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-0.9.2.tgz", + "dev": true + }, + "minimatch": { + "version": "0.2.14", + "from": "minimatch@>=0.2.6 <0.3.0", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-0.2.14.tgz", + "dev": true + }, + "rimraf": { + "version": "2.0.3", + "from": "rimraf@>=2.0.2 <2.1.0", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.0.3.tgz", + "dev": true, + "dependencies": { + "graceful-fs": { + "version": "1.1.14", + "from": "graceful-fs@>=1.1.0 <1.2.0", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-1.1.14.tgz", + "dev": true, + "optional": true + } + } + } + } + }, + "grunt-bunyan": { + "version": "0.5.0", + "from": "grunt-bunyan@>=0.5.0 <0.6.0", + "resolved": "https://registry.npmjs.org/grunt-bunyan/-/grunt-bunyan-0.5.0.tgz", + "dependencies": { + "lodash": { + "version": "2.4.2", + "from": "lodash@>=2.4.1 <2.5.0", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-2.4.2.tgz" + } + } + }, + "grunt-concurrent": { + "version": "0.4.2", + "from": "grunt-concurrent@0.4.2", + "resolved": "https://registry.npmjs.org/grunt-concurrent/-/grunt-concurrent-0.4.2.tgz", + "dev": true + }, + "grunt-contrib-clean": { + "version": "0.5.0", + "from": "grunt-contrib-clean@0.5.0", + "resolved": "https://registry.npmjs.org/grunt-contrib-clean/-/grunt-contrib-clean-0.5.0.tgz", + "dev": true + }, + "grunt-contrib-coffee": { + "version": "0.7.0", + "from": "grunt-contrib-coffee@0.7.0", + "resolved": "https://registry.npmjs.org/grunt-contrib-coffee/-/grunt-contrib-coffee-0.7.0.tgz", + "dev": true, + "dependencies": { + "coffee-script": { + "version": "1.6.3", + "from": "coffee-script@>=1.6.2 <1.7.0", + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.3.tgz", + "dev": true + } + } + }, + "grunt-contrib-requirejs": { + "version": "0.4.1", + "from": "grunt-contrib-requirejs@0.4.1", + "resolved": "https://registry.npmjs.org/grunt-contrib-requirejs/-/grunt-contrib-requirejs-0.4.1.tgz", + "dev": true + }, + "grunt-contrib-watch": { + "version": "0.5.3", + "from": "grunt-contrib-watch@0.5.3", + "resolved": "https://registry.npmjs.org/grunt-contrib-watch/-/grunt-contrib-watch-0.5.3.tgz", + "dev": true + }, + "grunt-execute": { + "version": "0.2.2", + "from": "grunt-execute@>=0.2.2 <0.3.0", + "resolved": "https://registry.npmjs.org/grunt-execute/-/grunt-execute-0.2.2.tgz" + }, + "grunt-forever": { + "version": "0.4.7", + "from": "grunt-forever@>=0.4.4 <0.5.0", + "resolved": "https://registry.npmjs.org/grunt-forever/-/grunt-forever-0.4.7.tgz", + "dev": true + }, + "grunt-legacy-log": { + "version": "0.1.3", + "from": "grunt-legacy-log@>=0.1.0 <0.2.0", + "resolved": "https://registry.npmjs.org/grunt-legacy-log/-/grunt-legacy-log-0.1.3.tgz", + "dependencies": { + "lodash": { + "version": "2.4.2", + "from": "lodash@>=2.4.1 <2.5.0", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-2.4.2.tgz" + }, + "underscore.string": { + "version": "2.3.3", + "from": "underscore.string@>=2.3.3 <2.4.0", + "resolved": "https://registry.npmjs.org/underscore.string/-/underscore.string-2.3.3.tgz" + } + } + }, + "grunt-legacy-log-utils": { + "version": "0.1.1", + "from": "grunt-legacy-log-utils@>=0.1.1 <0.2.0", + "resolved": "https://registry.npmjs.org/grunt-legacy-log-utils/-/grunt-legacy-log-utils-0.1.1.tgz", + "dependencies": { + "lodash": { + "version": "2.4.2", + "from": "lodash@>=2.4.1 <2.5.0", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-2.4.2.tgz" + }, + "underscore.string": { + "version": "2.3.3", + "from": "underscore.string@~2.3.3", + "resolved": "https://registry.npmjs.org/underscore.string/-/underscore.string-2.3.3.tgz" + } + } + }, + "grunt-legacy-util": { + "version": "0.2.0", + "from": "grunt-legacy-util@>=0.2.0 <0.3.0", + "resolved": "https://registry.npmjs.org/grunt-legacy-util/-/grunt-legacy-util-0.2.0.tgz", + "dependencies": { + "async": { + "version": "0.1.22", + "from": "async@~0.1.22", + "resolved": "https://registry.npmjs.org/async/-/async-0.1.22.tgz" + }, + "lodash": { + "version": "0.9.2", + "from": "lodash@~0.9.2", + "resolved": "http://registry.npmjs.org/lodash/-/lodash-0.9.2.tgz" + } + } + }, + "grunt-mocha-test": { + "version": "0.8.2", + "from": "grunt-mocha-test@>=0.8.2 <0.9.0", + "resolved": "https://registry.npmjs.org/grunt-mocha-test/-/grunt-mocha-test-0.8.2.tgz", + "dev": true + }, + "grunt-nodemon": { + "version": "0.2.1", + "from": "grunt-nodemon@0.2.1", + "resolved": "https://registry.npmjs.org/grunt-nodemon/-/grunt-nodemon-0.2.1.tgz", + "dev": true + }, + "har-schema": { + "version": "2.0.0", + "from": "har-schema@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", + "dev": true + }, + "har-validator": { + "version": "5.1.0", + "from": "har-validator@>=5.1.0 <5.2.0", + "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.0.tgz", + "dev": true + }, + "has-ansi": { + "version": "0.1.0", + "from": "has-ansi@>=0.1.0 <0.2.0", + "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-0.1.0.tgz" + }, + "has-color": { + "version": "0.1.7", + "from": "has-color@>=0.1.0 <0.2.0", + "resolved": "https://registry.npmjs.org/has-color/-/has-color-0.1.7.tgz", + "dev": true + }, "hawk": { "version": "0.10.2", "from": "hawk@>=0.10.2 <0.11.0", @@ -273,6 +879,28 @@ "from": "hoek@>=0.7.0 <0.8.0", "resolved": "https://registry.npmjs.org/hoek/-/hoek-0.7.6.tgz" }, + "hooker": { + "version": "0.2.3", + "from": "hooker@>=0.2.3 <0.3.0", + "resolved": "https://registry.npmjs.org/hooker/-/hooker-0.2.3.tgz" + }, + "http-signature": { + "version": "1.2.0", + "from": "http-signature@>=1.2.0 <1.3.0", + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", + "dev": true + }, + "i": { + "version": "0.3.6", + "from": "i@>=0.3.0 <0.4.0", + "resolved": "https://registry.npmjs.org/i/-/i-0.3.6.tgz", + "dev": true + }, + "iconv-lite": { + "version": "0.2.11", + "from": "iconv-lite@>=0.2.5 <0.3.0", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.2.11.tgz" + }, "ieee754": { "version": "1.1.8", "from": "ieee754@>=1.1.4 <2.0.0", @@ -289,16 +917,75 @@ "from": "inherits@>=2.0.1 <2.1.0", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" }, + "ini": { + "version": "1.3.5", + "from": "ini@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.5.tgz", + "dev": true + }, + "is-typedarray": { + "version": "1.0.0", + "from": "is-typedarray@>=1.0.0 <1.1.0", + "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", + "dev": true + }, "isarray": { "version": "1.0.0", "from": "isarray@>=1.0.0 <2.0.0", "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz" }, + "isstream": { + "version": "0.1.2", + "from": "isstream@>=0.1.0 <0.2.0", + "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", + "dev": true + }, + "jade": { + "version": "0.26.3", + "from": "jade@0.26.3", + "resolved": "https://registry.npmjs.org/jade/-/jade-0.26.3.tgz", + "dependencies": { + "commander": { + "version": "0.6.1", + "from": "commander@0.6.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-0.6.1.tgz" + }, + "mkdirp": { + "version": "0.3.0", + "from": "mkdirp@0.3.0", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.0.tgz" + } + } + }, "jmespath": { "version": "0.15.0", "from": "jmespath@0.15.0", "resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.15.0.tgz" }, + "js-yaml": { + "version": "2.0.5", + "from": "js-yaml@>=2.0.2 <2.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-2.0.5.tgz" + }, + "jsbn": { + "version": "0.1.1", + "from": "jsbn@>=0.1.0 <0.2.0", + "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", + "dev": true, + "optional": true + }, + "json-schema": { + "version": "0.2.3", + "from": "json-schema@0.2.3", + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", + "dev": true + }, + "json-schema-traverse": { + "version": "0.3.1", + "from": "json-schema-traverse@>=0.3.0 <0.4.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz", + "dev": true + }, "json-stringify-safe": { "version": "3.0.0", "from": "json-stringify-safe@>=3.0.0 <3.1.0", @@ -309,6 +996,12 @@ "from": "jsonfile@>=2.1.0 <3.0.0", "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-2.4.0.tgz" }, + "jsprim": { + "version": "1.4.1", + "from": "jsprim@>=1.2.2 <2.0.0", + "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", + "dev": true + }, "just-extend": { "version": "1.1.22", "from": "just-extend@>=1.1.22 <2.0.0", @@ -389,20 +1082,176 @@ } } }, + "lazy": { + "version": "1.0.11", + "from": "lazy@>=1.0.11 <1.1.0", + "resolved": "https://registry.npmjs.org/lazy/-/lazy-1.0.11.tgz", + "dev": true + }, "lodash": { "version": "4.17.4", "from": "lodash@>=4.0.0 <5.0.0", "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz" }, "logger-sharelatex": { - "version": "1.0.0", - "from": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.1.0", - "resolved": "git+https://github.com/sharelatex/logger-sharelatex.git#5a3ea8e655f23e76a77bbc207c012d3fc944c8d8", + "version": "1.5.7", + "from": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.7", + "resolved": "git+https://github.com/sharelatex/logger-sharelatex.git#13562f8866708fc86aef8202bf5a2ce4d1c6eed7", "dependencies": { + "ansi-styles": { + "version": "1.1.0", + "from": "ansi-styles@>=1.1.0 <2.0.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-1.1.0.tgz" + }, + "async": { + "version": "0.1.22", + "from": "async@>=0.1.22 <0.2.0", + "resolved": "https://registry.npmjs.org/async/-/async-0.1.22.tgz" + }, + "bunyan": { + "version": "1.5.1", + "from": "bunyan@1.5.1", + "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.5.1.tgz" + }, + "chalk": { + "version": "0.5.1", + "from": "chalk@>=0.5.0 <0.6.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-0.5.1.tgz" + }, "coffee-script": { - "version": "1.4.0", - "from": "coffee-script@1.4.0", - "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.4.0.tgz" + "version": "1.12.4", + "from": "coffee-script@1.12.4", + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.12.4.tgz" + }, + "commander": { + "version": "2.0.0", + "from": "commander@2.0.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.0.0.tgz" + }, + "dtrace-provider": { + "version": "0.6.0", + "from": "dtrace-provider@>=0.6.0 <0.7.0", + "resolved": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.6.0.tgz", + "optional": true + }, + "fs-extra": { + "version": "0.9.1", + "from": "fs-extra@>=0.9.1 <0.10.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-0.9.1.tgz", + "dependencies": { + "mkdirp": { + "version": "0.5.1", + "from": "mkdirp@>=0.5.0 <0.6.0", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz" + } + } + }, + "glob": { + "version": "3.1.21", + "from": "glob@>=3.1.21 <3.2.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-3.1.21.tgz" + }, + "graceful-fs": { + "version": "1.2.3", + "from": "graceful-fs@>=1.2.0 <1.3.0", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-1.2.3.tgz" + }, + "grunt": { + "version": "0.4.5", + "from": "grunt@>=0.4.5 <0.5.0", + "resolved": "https://registry.npmjs.org/grunt/-/grunt-0.4.5.tgz", + "dependencies": { + "coffee-script": { + "version": "1.3.3", + "from": "coffee-script@>=1.3.3 <1.4.0", + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.3.3.tgz" + } + } + }, + "grunt-contrib-clean": { + "version": "0.6.0", + "from": "grunt-contrib-clean@>=0.6.0 <0.7.0", + "resolved": "https://registry.npmjs.org/grunt-contrib-clean/-/grunt-contrib-clean-0.6.0.tgz" + }, + "grunt-contrib-coffee": { + "version": "0.11.1", + "from": "grunt-contrib-coffee@>=0.11.0 <0.12.0", + "resolved": "https://registry.npmjs.org/grunt-contrib-coffee/-/grunt-contrib-coffee-0.11.1.tgz", + "dependencies": { + "coffee-script": { + "version": "1.7.1", + "from": "coffee-script@>=1.7.0 <1.8.0", + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.7.1.tgz" + }, + "lodash": { + "version": "2.4.2", + "from": "lodash@>=2.4.1 <2.5.0", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-2.4.2.tgz" + } + } + }, + "grunt-mocha-test": { + "version": "0.11.0", + "from": "grunt-mocha-test@>=0.11.0 <0.12.0", + "resolved": "https://registry.npmjs.org/grunt-mocha-test/-/grunt-mocha-test-0.11.0.tgz" + }, + "inherits": { + "version": "1.0.2", + "from": "inherits@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-1.0.2.tgz" + }, + "jsonfile": { + "version": "1.1.1", + "from": "jsonfile@>=1.1.0 <1.2.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-1.1.1.tgz" + }, + "lodash": { + "version": "0.9.2", + "from": "lodash@>=0.9.2 <0.10.0", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-0.9.2.tgz" + }, + "minimatch": { + "version": "0.2.14", + "from": "minimatch@>=0.2.12 <0.3.0", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-0.2.14.tgz" + }, + "mocha": { + "version": "1.20.1", + "from": "mocha@>=1.20.0 <1.21.0", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-1.20.1.tgz", + "dependencies": { + "glob": { + "version": "3.2.3", + "from": "glob@3.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-3.2.3.tgz" + }, + "graceful-fs": { + "version": "2.0.3", + "from": "graceful-fs@>=2.0.0 <2.1.0", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-2.0.3.tgz" + }, + "inherits": { + "version": "2.0.3", + "from": "inherits@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" + } + } + }, + "nan": { + "version": "2.11.0", + "from": "nan@>=2.0.8 <3.0.0", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.11.0.tgz", + "optional": true + }, + "ncp": { + "version": "0.5.1", + "from": "ncp@>=0.5.1 <0.6.0", + "resolved": "https://registry.npmjs.org/ncp/-/ncp-0.5.1.tgz" + }, + "strip-ansi": { + "version": "0.3.0", + "from": "strip-ansi@>=0.3.0 <0.4.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-0.3.0.tgz" } } }, @@ -411,10 +1260,21 @@ "from": "lolex@>=2.1.2 <3.0.0", "resolved": "https://registry.npmjs.org/lolex/-/lolex-2.1.2.tgz" }, + "lpad": { + "version": "0.1.0", + "from": "lpad@>=0.1.0 <0.2.0", + "resolved": "https://registry.npmjs.org/lpad/-/lpad-0.1.0.tgz", + "dev": true + }, + "lru-cache": { + "version": "2.7.3", + "from": "lru-cache@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.7.3.tgz" + }, "lsmod": { - "version": "0.0.3", - "from": "lsmod@>=0.0.3 <0.1.0", - "resolved": "https://registry.npmjs.org/lsmod/-/lsmod-0.0.3.tgz" + "version": "1.0.0", + "from": "lsmod@1.0.0", + "resolved": "https://registry.npmjs.org/lsmod/-/lsmod-1.0.0.tgz" }, "lynx": { "version": "0.1.1", @@ -458,6 +1318,18 @@ "from": "mime@>=1.2.9 <1.3.0", "resolved": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz" }, + "mime-db": { + "version": "1.36.0", + "from": "mime-db@>=1.36.0 <1.37.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.36.0.tgz", + "dev": true + }, + "mime-types": { + "version": "2.1.20", + "from": "mime-types@>=2.1.19 <2.2.0", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.20.tgz", + "dev": true + }, "minimatch": { "version": "3.0.4", "from": "minimatch@>=2.0.0 <3.0.0||>=3.0.0 <4.0.0", @@ -474,11 +1346,49 @@ "from": "mkdirp@>=0.3.5 <0.4.0", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.5.tgz" }, + "mocha": { + "version": "1.14.0", + "from": "mocha@>=1.14.0 <1.15.0", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-1.14.0.tgz", + "dev": true, + "dependencies": { + "commander": { + "version": "2.0.0", + "from": "commander@2.0.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.0.0.tgz", + "dev": true + }, + "glob": { + "version": "3.2.3", + "from": "glob@3.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-3.2.3.tgz", + "dev": true + }, + "graceful-fs": { + "version": "2.0.3", + "from": "graceful-fs@>=2.0.0 <2.1.0", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-2.0.3.tgz", + "dev": true + }, + "minimatch": { + "version": "0.2.14", + "from": "minimatch@~0.2.11", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-0.2.14.tgz", + "dev": true + } + } + }, "multiparty": { "version": "2.2.0", "from": "multiparty@2.2.0", "resolved": "https://registry.npmjs.org/multiparty/-/multiparty-2.2.0.tgz" }, + "mute-stream": { + "version": "0.0.7", + "from": "mute-stream@>=0.0.4 <0.1.0", + "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz", + "dev": true + }, "mv": { "version": "2.1.1", "from": "mv@>=2.0.0 <3.0.0", @@ -503,6 +1413,7 @@ "version": "1.5.3", "from": "nan@>=1.5.1 <1.6.0", "resolved": "https://registry.npmjs.org/nan/-/nan-1.5.3.tgz", + "dev": true, "optional": true }, "native-promise-only": { @@ -510,6 +1421,32 @@ "from": "native-promise-only@>=0.8.1 <0.9.0", "resolved": "https://registry.npmjs.org/native-promise-only/-/native-promise-only-0.8.1.tgz" }, + "natives": { + "version": "1.1.4", + "from": "natives@>=1.1.0 <2.0.0", + "resolved": "https://registry.npmjs.org/natives/-/natives-1.1.4.tgz", + "dev": true + }, + "nconf": { + "version": "0.6.9", + "from": "nconf@>=0.6.9 <0.7.0", + "resolved": "https://registry.npmjs.org/nconf/-/nconf-0.6.9.tgz", + "dev": true, + "dependencies": { + "async": { + "version": "0.2.9", + "from": "async@0.2.9", + "resolved": "https://registry.npmjs.org/async/-/async-0.2.9.tgz", + "dev": true + }, + "optimist": { + "version": "0.6.0", + "from": "optimist@0.6.0", + "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.0.tgz", + "dev": true + } + } + }, "ncp": { "version": "2.0.0", "from": "ncp@>=2.0.0 <2.1.0", @@ -560,16 +1497,85 @@ "from": "node-uuid@>=1.4.1 <1.5.0", "resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.8.tgz" }, + "nodemon": { + "version": "1.0.20", + "from": "nodemon@>=1.0.9 <1.1.0", + "resolved": "https://registry.npmjs.org/nodemon/-/nodemon-1.0.20.tgz", + "dev": true, + "dependencies": { + "minimatch": { + "version": "0.2.14", + "from": "minimatch@~0.2.14", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-0.2.14.tgz", + "dev": true + } + } + }, + "nopt": { + "version": "1.0.10", + "from": "nopt@>=1.0.10 <1.1.0", + "resolved": "https://registry.npmjs.org/nopt/-/nopt-1.0.10.tgz" + }, + "noptify": { + "version": "0.0.3", + "from": "noptify@latest", + "resolved": "https://registry.npmjs.org/noptify/-/noptify-0.0.3.tgz", + "dev": true, + "dependencies": { + "nopt": { + "version": "2.0.0", + "from": "nopt@>=2.0.0 <2.1.0", + "resolved": "https://registry.npmjs.org/nopt/-/nopt-2.0.0.tgz", + "dev": true + } + } + }, + "nssocket": { + "version": "0.5.3", + "from": "nssocket@>=0.5.1 <0.6.0", + "resolved": "https://registry.npmjs.org/nssocket/-/nssocket-0.5.3.tgz", + "dev": true + }, "oauth-sign": { "version": "0.2.0", "from": "oauth-sign@>=0.2.0 <0.3.0", "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.2.0.tgz" }, + "object-assign": { + "version": "2.1.1", + "from": "object-assign@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-2.1.1.tgz", + "dev": true + }, "once": { "version": "1.4.0", "from": "once@>=1.3.0 <2.0.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz" }, + "optimist": { + "version": "0.6.1", + "from": "optimist@>=0.6.0 <0.7.0", + "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz", + "dev": true + }, + "os-homedir": { + "version": "1.0.2", + "from": "os-homedir@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz", + "dev": true + }, + "os-tmpdir": { + "version": "1.0.2", + "from": "os-tmpdir@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", + "dev": true + }, + "osenv": { + "version": "0.1.5", + "from": "osenv@>=0.1.0 <0.2.0", + "resolved": "https://registry.npmjs.org/osenv/-/osenv-0.1.5.tgz", + "dev": true + }, "path-is-absolute": { "version": "1.0.1", "from": "path-is-absolute@>=1.0.0 <2.0.0", @@ -598,15 +1604,40 @@ "from": "pause@0.0.1", "resolved": "https://registry.npmjs.org/pause/-/pause-0.0.1.tgz" }, + "performance-now": { + "version": "2.1.0", + "from": "performance-now@>=2.1.0 <3.0.0", + "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", + "dev": true + }, + "pkginfo": { + "version": "0.3.1", + "from": "pkginfo@>=0.3.0 <0.4.0", + "resolved": "https://registry.npmjs.org/pkginfo/-/pkginfo-0.3.1.tgz", + "dev": true + }, "pngcrush": { "version": "0.0.3", "from": "pngcrush@0.0.3", "resolved": "https://registry.npmjs.org/pngcrush/-/pngcrush-0.0.3.tgz" }, - "process-nextick-args": { - "version": "1.0.7", - "from": "process-nextick-args@>=1.0.6 <1.1.0", - "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz" + "prompt": { + "version": "0.2.14", + "from": "prompt@0.2.14", + "resolved": "https://registry.npmjs.org/prompt/-/prompt-0.2.14.tgz", + "dev": true + }, + "ps-tree": { + "version": "0.0.3", + "from": "ps-tree@>=0.0.0 <0.1.0", + "resolved": "https://registry.npmjs.org/ps-tree/-/ps-tree-0.0.3.tgz", + "dev": true + }, + "psl": { + "version": "1.1.29", + "from": "psl@>=1.1.24 <2.0.0", + "resolved": "https://registry.npmjs.org/psl/-/psl-1.1.29.tgz", + "dev": true }, "punycode": { "version": "1.3.2", @@ -629,15 +1660,43 @@ "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz" }, "raven": { - "version": "0.8.1", - "from": "raven@>=0.8.0 <0.9.0", - "resolved": "https://registry.npmjs.org/raven/-/raven-0.8.1.tgz" + "version": "1.2.1", + "from": "raven@>=1.1.3 <2.0.0", + "resolved": "https://registry.npmjs.org/raven/-/raven-1.2.1.tgz", + "dependencies": { + "cookie": { + "version": "0.3.1", + "from": "cookie@0.3.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz" + }, + "json-stringify-safe": { + "version": "5.0.1", + "from": "json-stringify-safe@5.0.1", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz" + }, + "stack-trace": { + "version": "0.0.9", + "from": "stack-trace@0.0.9", + "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.9.tgz" + }, + "uuid": { + "version": "3.0.0", + "from": "uuid@3.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.0.0.tgz" + } + } }, "raw-body": { "version": "1.1.2", "from": "raw-body@1.1.2", "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-1.1.2.tgz" }, + "read": { + "version": "1.0.7", + "from": "read@>=1.0.0 <1.1.0", + "resolved": "https://registry.npmjs.org/read/-/read-1.0.7.tgz", + "dev": true + }, "readable-stream": { "version": "1.1.14", "from": "readable-stream@>=1.1.9 <1.2.0", @@ -691,20 +1750,32 @@ "from": "require-like@0.1.2", "resolved": "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz" }, + "requirejs": { + "version": "2.1.22", + "from": "requirejs@>=2.1.0 <2.2.0", + "resolved": "https://registry.npmjs.org/requirejs/-/requirejs-2.1.22.tgz", + "dev": true + }, "response": { "version": "0.14.0", "from": "response@0.14.0", "resolved": "https://registry.npmjs.org/response/-/response-0.14.0.tgz" }, + "revalidator": { + "version": "0.1.8", + "from": "revalidator@>=0.1.0 <0.2.0", + "resolved": "https://registry.npmjs.org/revalidator/-/revalidator-0.1.8.tgz", + "dev": true + }, "rimraf": { "version": "2.2.8", "from": "rimraf@2.2.8", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.2.8.tgz" }, "safe-buffer": { - "version": "5.1.1", - "from": "safe-buffer@>=5.0.1 <6.0.0", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.1.tgz" + "version": "5.1.2", + "from": "safe-buffer@>=5.1.1 <5.2.0", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz" }, "safe-json-stringify": { "version": "1.0.4", @@ -712,6 +1783,12 @@ "resolved": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.0.4.tgz", "optional": true }, + "safer-buffer": { + "version": "2.1.2", + "from": "safer-buffer@>=2.0.2 <3.0.0", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "dev": true + }, "samsam": { "version": "1.2.1", "from": "samsam@>=1.1.3 <2.0.0", @@ -734,6 +1811,12 @@ "from": "sax@1.2.1", "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz" }, + "semver": { + "version": "2.3.2", + "from": "semver@>=2.3.0 <3.0.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-2.3.2.tgz", + "dev": true + }, "send": { "version": "0.1.4", "from": "send@0.1.4", @@ -758,6 +1841,11 @@ } } }, + "sigmund": { + "version": "1.0.1", + "from": "sigmund@>=1.0.0 <1.1.0", + "resolved": "https://registry.npmjs.org/sigmund/-/sigmund-1.0.1.tgz" + }, "sinon": { "version": "3.2.1", "from": "sinon@latest", @@ -775,16 +1863,51 @@ "from": "sntp@>=0.1.0 <0.2.0", "resolved": "https://registry.npmjs.org/sntp/-/sntp-0.1.4.tgz" }, + "sprintf-js": { + "version": "1.0.3", + "from": "sprintf-js@>=1.0.2 <1.1.0", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "dev": true + }, + "sshpk": { + "version": "1.14.2", + "from": "sshpk@>=1.7.0 <2.0.0", + "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.14.2.tgz", + "dev": true + }, "stack-trace": { "version": "0.0.7", "from": "stack-trace@0.0.7", - "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.7.tgz" + "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.7.tgz", + "dev": true }, "statsd-parser": { "version": "0.0.4", "from": "statsd-parser@>=0.0.4 <0.1.0", "resolved": "https://registry.npmjs.org/statsd-parser/-/statsd-parser-0.0.4.tgz" }, + "stream-browserify": { + "version": "2.0.1", + "from": "stream-browserify@>=2.0.1 <3.0.0", + "resolved": "https://registry.npmjs.org/stream-browserify/-/stream-browserify-2.0.1.tgz", + "dependencies": { + "process-nextick-args": { + "version": "2.0.0", + "from": "process-nextick-args@>=2.0.0 <2.1.0", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.0.tgz" + }, + "readable-stream": { + "version": "2.3.6", + "from": "readable-stream@>=2.0.2 <3.0.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz" + }, + "string_decoder": { + "version": "1.1.1", + "from": "string_decoder@>=1.1.1 <1.2.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz" + } + } + }, "stream-buffers": { "version": "0.2.6", "from": "stream-buffers@>=0.2.5 <0.3.0", @@ -800,16 +1923,79 @@ "from": "string_decoder@>=0.10.0 <0.11.0", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" }, + "strip-ansi": { + "version": "0.1.1", + "from": "strip-ansi@>=0.1.0 <0.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-0.1.1.tgz", + "dev": true + }, + "supports-color": { + "version": "0.2.0", + "from": "supports-color@>=0.2.0 <0.3.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-0.2.0.tgz" + }, "text-encoding": { "version": "0.6.4", "from": "text-encoding@0.6.4", "resolved": "https://registry.npmjs.org/text-encoding/-/text-encoding-0.6.4.tgz" }, + "timekeeper": { + "version": "1.0.0", + "from": "timekeeper@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/timekeeper/-/timekeeper-1.0.0.tgz" + }, + "timespan": { + "version": "2.3.0", + "from": "timespan@>=2.3.0 <2.4.0", + "resolved": "https://registry.npmjs.org/timespan/-/timespan-2.3.0.tgz", + "dev": true + }, + "tiny-lr": { + "version": "0.0.4", + "from": "tiny-lr@0.0.4", + "resolved": "https://registry.npmjs.org/tiny-lr/-/tiny-lr-0.0.4.tgz", + "dev": true, + "dependencies": { + "debug": { + "version": "0.7.4", + "from": "debug@>=0.7.0 <0.8.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-0.7.4.tgz", + "dev": true + }, + "qs": { + "version": "0.5.6", + "from": "qs@>=0.5.2 <0.6.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-0.5.6.tgz", + "dev": true + } + } + }, + "tough-cookie": { + "version": "2.4.3", + "from": "tough-cookie@>=2.4.3 <2.5.0", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.4.3.tgz", + "dev": true, + "dependencies": { + "punycode": { + "version": "1.4.1", + "from": "punycode@>=1.4.1 <2.0.0", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", + "dev": true + } + } + }, "tunnel-agent": { "version": "0.2.0", "from": "tunnel-agent@>=0.2.0 <0.3.0", "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.2.0.tgz" }, + "tweetnacl": { + "version": "0.14.5", + "from": "tweetnacl@>=0.14.0 <0.15.0", + "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", + "dev": true, + "optional": true + }, "type-detect": { "version": "4.0.3", "from": "type-detect@>=4.0.0 <5.0.0", @@ -825,21 +2011,175 @@ "from": "underscore@>=1.5.2 <1.6.0", "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.5.2.tgz" }, + "underscore.string": { + "version": "2.2.1", + "from": "underscore.string@>=2.2.0-rc <2.3.0", + "resolved": "https://registry.npmjs.org/underscore.string/-/underscore.string-2.2.1.tgz" + }, + "update-notifier": { + "version": "0.1.10", + "from": "update-notifier@>=0.1.8 <0.2.0", + "resolved": "https://registry.npmjs.org/update-notifier/-/update-notifier-0.1.10.tgz", + "dev": true, + "dependencies": { + "caseless": { + "version": "0.12.0", + "from": "caseless@>=0.12.0 <0.13.0", + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", + "dev": true + }, + "combined-stream": { + "version": "1.0.6", + "from": "combined-stream@>=1.0.6 <1.1.0", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.6.tgz", + "dev": true + }, + "delayed-stream": { + "version": "1.0.0", + "from": "delayed-stream@>=1.0.0 <1.1.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "dev": true + }, + "forever-agent": { + "version": "0.6.1", + "from": "forever-agent@>=0.6.1 <0.7.0", + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", + "dev": true + }, + "form-data": { + "version": "2.3.2", + "from": "form-data@>=2.3.2 <2.4.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.2.tgz", + "dev": true + }, + "json-stringify-safe": { + "version": "5.0.1", + "from": "json-stringify-safe@>=5.0.1 <5.1.0", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", + "dev": true + }, + "oauth-sign": { + "version": "0.9.0", + "from": "oauth-sign@>=0.9.0 <0.10.0", + "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", + "dev": true + }, + "qs": { + "version": "6.5.2", + "from": "qs@>=6.5.2 <6.6.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", + "dev": true + }, + "request": { + "version": "2.88.0", + "from": "request@>=2.36.0 <3.0.0", + "resolved": "https://registry.npmjs.org/request/-/request-2.88.0.tgz", + "dev": true + }, + "safe-buffer": { + "version": "5.1.2", + "from": "safe-buffer@>=5.1.2 <6.0.0", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "dev": true + }, + "tunnel-agent": { + "version": "0.6.0", + "from": "tunnel-agent@>=0.6.0 <0.7.0", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", + "dev": true + }, + "uuid": { + "version": "3.3.2", + "from": "uuid@>=3.3.2 <4.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", + "dev": true + } + } + }, "url": { "version": "0.10.3", "from": "url@0.10.3", "resolved": "https://registry.npmjs.org/url/-/url-0.10.3.tgz" }, + "user-home": { + "version": "1.1.1", + "from": "user-home@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/user-home/-/user-home-1.1.1.tgz", + "dev": true + }, "util-deprecate": { "version": "1.0.2", "from": "util-deprecate@>=1.0.1 <1.1.0", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz" }, + "utile": { + "version": "0.2.1", + "from": "utile@>=0.2.1 <0.3.0", + "resolved": "https://registry.npmjs.org/utile/-/utile-0.2.1.tgz", + "dev": true, + "dependencies": { + "ncp": { + "version": "0.4.2", + "from": "ncp@>=0.4.0 <0.5.0", + "resolved": "https://registry.npmjs.org/ncp/-/ncp-0.4.2.tgz", + "dev": true + } + } + }, + "uuid": { + "version": "2.0.3", + "from": "uuid@>=2.0.1 <3.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-2.0.3.tgz", + "dev": true + }, + "verror": { + "version": "1.10.0", + "from": "verror@1.10.0", + "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", + "dev": true + }, + "watch": { + "version": "0.13.0", + "from": "watch@>=0.13.0 <0.14.0", + "resolved": "https://registry.npmjs.org/watch/-/watch-0.13.0.tgz", + "dev": true, + "dependencies": { + "minimist": { + "version": "1.2.0", + "from": "minimist@>=1.1.0 <2.0.0", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz", + "dev": true + } + } + }, + "which": { + "version": "1.0.9", + "from": "which@>=1.0.5 <1.1.0", + "resolved": "https://registry.npmjs.org/which/-/which-1.0.9.tgz" + }, + "winston": { + "version": "0.8.3", + "from": "winston@>=0.8.1 <0.9.0", + "resolved": "https://registry.npmjs.org/winston/-/winston-0.8.3.tgz", + "dev": true + }, + "wordwrap": { + "version": "0.0.3", + "from": "wordwrap@>=0.0.2 <0.1.0", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz", + "dev": true + }, "wrappy": { "version": "1.0.2", "from": "wrappy@>=1.0.0 <2.0.0", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" }, + "xdg-basedir": { + "version": "1.0.1", + "from": "xdg-basedir@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-1.0.1.tgz", + "dev": true + }, "xml2js": { "version": "0.4.17", "from": "xml2js@0.4.17", diff --git a/services/filestore/package.json b/services/filestore/package.json index 047801ec2f..0910f4c4db 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -29,6 +29,7 @@ "response": "0.14.0", "rimraf": "2.2.8", "settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.0.0", + "stream-browserify": "^2.0.1", "stream-buffers": "~0.2.5", "underscore": "~1.5.2" }, From 96e9d8b0ec89169a1dacb9dadd121c433f79693a Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Tue, 11 Sep 2018 10:51:10 +0100 Subject: [PATCH 264/555] fix typo --- services/filestore/app.coffee | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index 957a3f6f40..9bcf258331 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -11,8 +11,8 @@ domain = require("domain") appIsOk = true app = express() -if Settings.sentry?.dsn? - logger.initializeErrorReporting(Settings.sentry.dsn) +if settings.sentry?.dsn? + logger.initializeErrorReporting(settings.sentry.dsn) Metrics = require "metrics-sharelatex" Metrics.initialize("filestore") From 38f565d35fc09218e504a39b5a133e858d7ab66f Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Fri, 28 Sep 2018 15:53:57 +0100 Subject: [PATCH 265/555] logout settings --- services/filestore/app/coffee/HealthCheckController.coffee | 1 + 1 file changed, 1 insertion(+) diff --git a/services/filestore/app/coffee/HealthCheckController.coffee b/services/filestore/app/coffee/HealthCheckController.coffee index db3f111c5e..7b7a80bfc8 100644 --- a/services/filestore/app/coffee/HealthCheckController.coffee +++ b/services/filestore/app/coffee/HealthCheckController.coffee @@ -15,6 +15,7 @@ checkCanStoreFiles = (callback)-> req = {params:{}, query:{}, headers:{}} req.params.project_id = settings.health_check.project_id req.params.file_id = settings.health_check.file_id + console.log settings myWritableStreamBuffer = new streamBuffers.WritableStreamBuffer(initialSize: 100) res = { send: (code) -> From 038b3476fe4f1a7253941e4512087ffd78620f4a Mon Sep 17 00:00:00 2001 From: Christopher Hoskin Date: Mon, 22 Oct 2018 14:09:56 +0100 Subject: [PATCH 266/555] Add some details to the README --- services/filestore/README.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/services/filestore/README.md b/services/filestore/README.md index 420214b56f..cd126ab053 100644 --- a/services/filestore/README.md +++ b/services/filestore/README.md @@ -5,6 +5,18 @@ An API for CRUD operations on binary files stored in S3 [![Build Status](https://travis-ci.org/sharelatex/filestore-sharelatex.png?branch=master)](https://travis-ci.org/sharelatex/filestore-sharelatex) +filestore acts as a proxy between the CLSIs and (currently) Amazon S3 storage, presenting a RESTful HTTP interface to the CLSIs on port 3009 by default. Urls are mapped to node functions in https://github.com/sharelatex/filestore-sharelatex/blob/master/app.coffee . URLs are of the form: + +* `/project/:project_id/file/:file_id` +* `/template/:template_id/v/:version/:format` +* `/project/:project_id/public/:public_file_id` +* `/project/:project_id/size` +* `/bucket/:bucket/key/*` +* `/heapdump` +* `/shutdown` +* `/status` - returns `filestore sharelatex up` or `server is being shut down` (HTTP 500) +* `/health_check` + License ------- From 06bfd4c270e8c01a79188612f5a02d7fcfdd7be2 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 7 Nov 2018 16:07:55 +0000 Subject: [PATCH 267/555] add dockerfile to install packages needed by filestore --- services/filestore/Dockerfile | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 services/filestore/Dockerfile diff --git a/services/filestore/Dockerfile b/services/filestore/Dockerfile new file mode 100644 index 0000000000..5f4f63aec0 --- /dev/null +++ b/services/filestore/Dockerfile @@ -0,0 +1,5 @@ +FROM node:6.9.5 + +RUN apt-get update +# we also need imagemagick but it is already in the node docker image +RUN apt-get install -y --no-install-recommends ghostscript optipng From 40ac59090bfba92d95ec55e03f983b3c6bd39514 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 7 Nov 2018 16:22:09 +0000 Subject: [PATCH 268/555] make backends consistent by deleting temporary files --- .../app/coffee/FSPersistorManager.coffee | 20 +++++++++++++++---- .../app/coffee/S3PersistorManager.coffee | 10 ++++++---- 2 files changed, 22 insertions(+), 8 deletions(-) diff --git a/services/filestore/app/coffee/FSPersistorManager.coffee b/services/filestore/app/coffee/FSPersistorManager.coffee index 0868216a15..733202e4cd 100644 --- a/services/filestore/app/coffee/FSPersistorManager.coffee +++ b/services/filestore/app/coffee/FSPersistorManager.coffee @@ -13,10 +13,19 @@ module.exports = sendFile: ( location, target, source, callback = (err)->) -> filteredTarget = filterName target logger.log location:location, target:filteredTarget, source:source, "sending file" - fs.rename source, "#{location}/#{filteredTarget}", (err) -> - if err!=null + done = _.once (err) -> + if err? logger.err err:err, location:location, target:filteredTarget, source:source, "Error on put of file" - callback err + callback(err) + # actually copy the file (instead of moving it) to maintain consistent behaviour + # between the different implementations + sourceStream = fs.createReadStream source + sourceStream.on 'error', done + targetStream = fs.createWriteStream "#{location}/#{filteredTarget}" + targetStream.on 'error', done + targetStream.on 'finish', () -> + done() + sourceStream.pipe targetStream sendStream: ( location, target, sourceStream, callback = (err)->) -> logger.log location:location, target:target, "sending file stream" @@ -26,7 +35,10 @@ module.exports = if err? logger.err location:location, target:target, fsPath:fsPath, err:err, "something went wrong writing stream to disk" return callback err - @sendFile location, target, fsPath, callback + @sendFile location, target, fsPath, (err) -> + # delete the temporary file created above and return the original error + LocalFileWriter.deleteFile fsPath, () -> + callback(err) # opts may be {start: Number, end: Number} getFileStream: (location, name, opts, _callback = (err, res)->) -> diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index 2bd6eb0e9b..3055810131 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -42,9 +42,8 @@ module.exports = if res.statusCode != 200 logger.err bucketName:bucketName, key:key, fsPath:fsPath, "non 200 response from s3 putting file" return callback("non 200 response from s3 on put file") - LocalFileWriter.deleteFile fsPath, (err)-> - logger.log res:res, bucketName:bucketName, key:key, fsPath:fsPath,"file uploaded to s3" - callback(err) + logger.log res:res, bucketName:bucketName, key:key, fsPath:fsPath,"file uploaded to s3" + callback(err) putEventEmiter.on "error", (err)-> logger.err err:err, bucketName:bucketName, key:key, fsPath:fsPath, "error emmited on put of file" callback err @@ -57,7 +56,10 @@ module.exports = if err? logger.err bucketName:bucketName, key:key, fsPath:fsPath, err:err, "something went wrong writing stream to disk" return callback(err) - @sendFile bucketName, key, fsPath, callback + @sendFile bucketName, key, fsPath, (err) -> + # delete the temporary file created above and return the original error + LocalFileWriter.deleteFile fsPath, () -> + callback(err) # opts may be {start: Number, end: Number} getFileStream: (bucketName, key, opts, callback = (err, res)->)-> From 78ccf453dbb308ab2ad636c1e9e4582415b01460 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 7 Nov 2018 16:51:06 +0000 Subject: [PATCH 269/555] update unit tests --- .../coffee/FSPersistorManagerTests.coffee | 30 +++++++++++++++++-- 1 file changed, 27 insertions(+), 3 deletions(-) diff --git a/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee b/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee index 46def22a05..f762b7a89f 100644 --- a/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee @@ -25,6 +25,7 @@ describe "FSPersistorManagerTests", -> @Rimraf = sinon.stub() @LocalFileWriter = writeStream: sinon.stub() + deleteFile: sinon.stub() @requires = "./LocalFileWriter":@LocalFileWriter "fs":@Fs @@ -43,10 +44,32 @@ describe "FSPersistorManagerTests", -> @FSPersistorManager = SandboxedModule.require modulePath, requires: @requires describe "sendFile", -> - it "should put the file", (done) -> - @Fs.rename.callsArgWith(2,@error) + beforeEach -> + @Fs.createReadStream = sinon.stub().returns({ + on: -> + pipe: -> + }) + + it "should copy the file", (done) -> + @Fs.createWriteStream =sinon.stub().returns({ + on: (event, handler) -> + process.nextTick(handler) if event is 'finish' + }) @FSPersistorManager.sendFile @location, @name1, @name2, (err)=> - @Fs.rename.calledWith( @name2, "#{@location}/#{@name1Filtered}" ).should.equal true + @Fs.createReadStream.calledWith(@name2).should.equal true + @Fs.createWriteStream.calledWith("#{@location}/#{@name1Filtered}" ).should.equal true + done() + + it "should return an error if the file cannot be stored", (done) -> + @Fs.createWriteStream =sinon.stub().returns({ + on: (event, handler) => + if event is 'error' + process.nextTick () => + handler(@error) + }) + @FSPersistorManager.sendFile @location, @name1, @name2, (err)=> + @Fs.createReadStream.calledWith(@name2).should.equal true + @Fs.createWriteStream.calledWith("#{@location}/#{@name1Filtered}" ).should.equal true err.should.equal @error done() @@ -54,6 +77,7 @@ describe "FSPersistorManagerTests", -> beforeEach -> @FSPersistorManager.sendFile = sinon.stub().callsArgWith(3) @LocalFileWriter.writeStream.callsArgWith(2, null, @name1) + @LocalFileWriter.deleteFile.callsArg(1) @SourceStream = on:-> From 8744b3aa4eefc2769be624e4d92029bb9080c0b4 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 7 Nov 2018 16:23:55 +0000 Subject: [PATCH 270/555] serve file from disk to avoid read-after-write inconsistency --- .../filestore/app/coffee/FileHandler.coffee | 19 ++++++++++++++++++- .../app/coffee/LocalFileWriter.coffee | 17 +++++++++++++++++ .../test/unit/coffee/FileHandlerTests.coffee | 10 +++++++--- .../unit/coffee/LocalFileWriterTests.coffee | 15 +++++++++++++++ 4 files changed, 57 insertions(+), 4 deletions(-) diff --git a/services/filestore/app/coffee/FileHandler.coffee b/services/filestore/app/coffee/FileHandler.coffee index 2eeb09bc74..94c6a7395c 100644 --- a/services/filestore/app/coffee/FileHandler.coffee +++ b/services/filestore/app/coffee/FileHandler.coffee @@ -64,7 +64,24 @@ module.exports = FileHandler = LocalFileWriter.deleteFile convertedFsPath, -> LocalFileWriter.deleteFile originalFsPath, -> return callback(err) - PersistorManager.getFileStream bucket, convertedKey, opts, callback + # Send back the converted file from the local copy to avoid problems + # with the file not being present in S3 yet. As described in the + # documentation below, we have already made a 'HEAD' request in + # checkIfFileExists so we only have "eventual consistency" if we try + # to stream it from S3 here. This was a cause of many 403 errors. + # + # "Amazon S3 provides read-after-write consistency for PUTS of new + # objects in your S3 bucket in all regions with one caveat. The + # caveat is that if you make a HEAD or GET request to the key name + # (to find if the object exists) before creating the object, Amazon + # S3 provides eventual consistency for read-after-write."" + # https://docs.aws.amazon.com/AmazonS3/latest/dev/Introduction.html#ConsistencyModel + LocalFileWriter.getStream convertedFsPath, (err, readStream) -> + return callback(err) if err? + readStream.on 'end', () -> + logger.log {convertedFsPath: convertedFsPath}, "deleting temporary file" + LocalFileWriter.deleteFile convertedFsPath, -> + callback(null, readStream) _convertFile: (bucket, originalKey, opts, callback)-> @_writeS3FileToDisk bucket, originalKey, opts, (err, originalFsPath)-> diff --git a/services/filestore/app/coffee/LocalFileWriter.coffee b/services/filestore/app/coffee/LocalFileWriter.coffee index 47b2b91e77..72422b7696 100644 --- a/services/filestore/app/coffee/LocalFileWriter.coffee +++ b/services/filestore/app/coffee/LocalFileWriter.coffee @@ -5,6 +5,7 @@ _ = require("underscore") logger = require("logger-sharelatex") metrics = require("metrics-sharelatex") Settings = require("settings-sharelatex") +Errors = require "./Errors" module.exports = @@ -26,6 +27,22 @@ module.exports = callback err stream.pipe writeStream + getStream: (fsPath, _callback = (err, res)->) -> + callback = _.once _callback + timer = new metrics.Timer("readingFile") + logger.log fsPath:fsPath, "reading file locally" + readStream = fs.createReadStream(fsPath) + readStream.on "end", -> + timer.done() + logger.log fsPath:fsPath, "finished reading file locally" + readStream.on "error", (err)-> + logger.err err:err, fsPath:fsPath, "problem reading file locally, with read stream" + if err.code == 'ENOENT' + callback new Errors.NotFoundError(err.message), null + else + callback err + callback null, readStream + deleteFile: (fsPath, callback)-> if !fsPath? or fsPath == "" return callback() diff --git a/services/filestore/test/unit/coffee/FileHandlerTests.coffee b/services/filestore/test/unit/coffee/FileHandlerTests.coffee index ab757b9360..50b8a17524 100644 --- a/services/filestore/test/unit/coffee/FileHandlerTests.coffee +++ b/services/filestore/test/unit/coffee/FileHandlerTests.coffee @@ -23,6 +23,7 @@ describe "FileHandler", -> directorySize: sinon.stub() @LocalFileWriter = writeStream: sinon.stub() + getStream: sinon.stub() deleteFile: sinon.stub() @FileConverter = convert: sinon.stub() @@ -152,17 +153,20 @@ describe "FileHandler", -> it "should _convertFile ", (done)-> @stubbedStream = {"something":"here"} + @localStream = { + on: -> + } @PersistorManager.sendFile = sinon.stub().callsArgWith(3) - @PersistorManager.getFileStream = sinon.stub().callsArgWith(3, null, @stubbedStream) + @LocalFileWriter.getStream = sinon.stub().callsArgWith(1, null, @localStream) @convetedKey = @key+"converted" @handler._convertFile = sinon.stub().callsArgWith(3, null, @stubbedPath) @ImageOptimiser.compressPng = sinon.stub().callsArgWith(1) @handler._getConvertedFileAndCache @bucket, @key, @convetedKey, {}, (err, fsStream)=> @handler._convertFile.called.should.equal true @PersistorManager.sendFile.calledWith(@bucket, @convetedKey, @stubbedPath).should.equal true - @PersistorManager.getFileStream.calledWith(@bucket, @convetedKey).should.equal true @ImageOptimiser.compressPng.calledWith(@stubbedPath).should.equal true - fsStream.should.equal @stubbedStream + @LocalFileWriter.getStream.calledWith(@stubbedPath).should.equal true + fsStream.should.equal @localStream done() describe "_convertFile", -> diff --git a/services/filestore/test/unit/coffee/LocalFileWriterTests.coffee b/services/filestore/test/unit/coffee/LocalFileWriterTests.coffee index 0b9eec035e..a6bc964e0f 100644 --- a/services/filestore/test/unit/coffee/LocalFileWriterTests.coffee +++ b/services/filestore/test/unit/coffee/LocalFileWriterTests.coffee @@ -15,8 +15,11 @@ describe "LocalFileWriter", -> on: (type, cb)-> if type == "finish" cb() + @readStream = + on: -> @fs = createWriteStream : sinon.stub().returns(@writeStream) + createReadStream: sinon.stub().returns(@readStream) unlink: sinon.stub() @settings = path: @@ -51,6 +54,18 @@ describe "LocalFileWriter", -> fsPath.should.equal @stubbedFsPath done() + describe "getStream", -> + + it "should read the stream from the file ", (done)-> + @writer.getStream @stubbedFsPath, (err, stream)=> + @fs.createReadStream.calledWith(@stubbedFsPath).should.equal true + done() + + it "should send the stream in the callback", (done)-> + @writer.getStream @stubbedFsPath, (err, readStream)=> + readStream.should.equal @readStream + done() + describe "delete file", -> it "should unlink the file", (done)-> From c2bfdd095ca82e73f637c81d2f2f60110b014eac Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 8 Nov 2018 16:29:23 +0000 Subject: [PATCH 271/555] upgrade logger-sharelatex to v1.5.8 --- services/filestore/npm-shrinkwrap.json | 10 +++++----- services/filestore/package.json | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/services/filestore/npm-shrinkwrap.json b/services/filestore/npm-shrinkwrap.json index 34fdb6b539..dcbdd2848d 100644 --- a/services/filestore/npm-shrinkwrap.json +++ b/services/filestore/npm-shrinkwrap.json @@ -1094,9 +1094,9 @@ "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz" }, "logger-sharelatex": { - "version": "1.5.7", - "from": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.7", - "resolved": "git+https://github.com/sharelatex/logger-sharelatex.git#13562f8866708fc86aef8202bf5a2ce4d1c6eed7", + "version": "1.5.8", + "from": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.8", + "resolved": "git+https://github.com/sharelatex/logger-sharelatex.git#3f841b014572706e472c47fe0d0c0c1e569bad8c", "dependencies": { "ansi-styles": { "version": "1.1.0", @@ -1238,9 +1238,9 @@ } }, "nan": { - "version": "2.11.0", + "version": "2.11.1", "from": "nan@>=2.0.8 <3.0.0", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.11.0.tgz", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.11.1.tgz", "optional": true }, "ncp": { diff --git a/services/filestore/package.json b/services/filestore/package.json index 0910f4c4db..80d4a06240 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -18,7 +18,7 @@ "fs-extra": "^1.0.0", "heapdump": "^0.3.2", "knox": "~0.9.1", - "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.7", + "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.8", "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.3.0", "node-transloadit": "0.0.4", "node-uuid": "~1.4.1", From 69b164092cdccb469052464ea1f3c3697a06d54d Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 9 Nov 2018 14:04:26 +0000 Subject: [PATCH 272/555] suppress unnecessary error logging for NotFound --- services/filestore/app/coffee/FileController.coffee | 2 +- services/filestore/app/coffee/FileHandler.coffee | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/services/filestore/app/coffee/FileController.coffee b/services/filestore/app/coffee/FileController.coffee index 27ac078379..24fd5229de 100644 --- a/services/filestore/app/coffee/FileController.coffee +++ b/services/filestore/app/coffee/FileController.coffee @@ -29,10 +29,10 @@ module.exports = FileController = logger.log start: range.start, end: range.end, "getting range of bytes from file" FileHandler.getFile bucket, key, options, (err, fileStream)-> if err? - logger.err err:err, key:key, bucket:bucket, format:format, style:style, "problem getting file" if err instanceof Errors.NotFoundError return res.send 404 else + logger.err err:err, key:key, bucket:bucket, format:format, style:style, "problem getting file" return res.send 500 else if req.query.cacheWarm logger.log key:key, bucket:bucket, format:format, style:style, "request is only for cache warm so not sending stream" diff --git a/services/filestore/app/coffee/FileHandler.coffee b/services/filestore/app/coffee/FileHandler.coffee index 2eeb09bc74..8fc7cd037a 100644 --- a/services/filestore/app/coffee/FileHandler.coffee +++ b/services/filestore/app/coffee/FileHandler.coffee @@ -6,6 +6,7 @@ FileConverter = require("./FileConverter") KeyBuilder = require("./KeyBuilder") async = require("async") ImageOptimiser = require("./ImageOptimiser") +Errors = require('./Errors') module.exports = FileHandler = @@ -32,7 +33,7 @@ module.exports = FileHandler = _getStandardFile: (bucket, key, opts, callback)-> PersistorManager.getFileStream bucket, key, opts, (err, fileStream)-> - if err? + if err? and !(err instanceof Errors.NotFoundError) logger.err bucket:bucket, key:key, opts:FileHandler._scrubSecrets(opts), "error getting fileStream" callback err, fileStream From 23a6d6e81d3cfacc8cb1707b00527368bdc4dbe7 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 9 Nov 2018 14:05:07 +0000 Subject: [PATCH 273/555] consider 403 and 404 as NotFound errors --- services/filestore/app/coffee/S3PersistorManager.coffee | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index 2bd6eb0e9b..0bb713deb0 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -74,7 +74,9 @@ module.exports = s3Stream = s3Client.get(key, headers) s3Stream.end() s3Stream.on 'response', (res) -> - if res.statusCode == 404 + if res.statusCode in [403, 404] + # S3 returns a 403 instead of a 404 when the user doesn't have + # permission to list the bucket contents. logger.log bucketName:bucketName, key:key, "file not found in s3" return callback new Errors.NotFoundError("File not found in S3: #{bucketName}:#{key}"), null if res.statusCode not in [200, 206] From ee1c14a41036a85b4a1dcc977a61d0f59e1be36a Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Thu, 29 Nov 2018 12:46:14 +0000 Subject: [PATCH 274/555] bump metrics --- services/filestore/npm-shrinkwrap.json | 5 +++-- services/filestore/package.json | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/services/filestore/npm-shrinkwrap.json b/services/filestore/npm-shrinkwrap.json index c911545763..1121f348eb 100644 --- a/services/filestore/npm-shrinkwrap.json +++ b/services/filestore/npm-shrinkwrap.json @@ -1466,8 +1466,9 @@ "integrity": "sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=" }, "metrics-sharelatex": { - "version": "git+https://github.com/sharelatex/metrics-sharelatex.git#e5356366b5b83997c8e1645b2e936af453381517", - "integrity": "sha1-JA9No3q08VfmnEuRjv/AQU3MoRg=", + "version": "2.0.3", + "from": "git+https://github.com/sharelatex/metrics-sharelatex.git#v2.0.3", + "resolved": "git+https://github.com/sharelatex/metrics-sharelatex.git#02522a50b7a5ec46eed9b0a93513ef992a21eb45", "requires": { "coffee-script": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz", "lynx": "https://registry.npmjs.org/lynx/-/lynx-0.1.1.tgz", diff --git a/services/filestore/package.json b/services/filestore/package.json index 0497597db6..83f4d53120 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -29,7 +29,7 @@ "fs-extra": "^1.0.0", "heapdump": "^0.3.2", "knox": "~0.9.1", - "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.8.1", + "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v2.0.3", "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.7", "node-transloadit": "0.0.4", "node-uuid": "~1.4.1", From 95f0a02dee4d9cb938b011bc280fc077362bd36c Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Thu, 29 Nov 2018 13:26:14 +0000 Subject: [PATCH 275/555] stub out metrics in tests & inject metrics endpoint --- services/filestore/app.coffee | 2 ++ .../filestore/test/unit/coffee/BucketControllerTests.coffee | 2 ++ .../filestore/test/unit/coffee/FileControllerTests.coffee | 2 ++ .../filestore/test/unit/coffee/FileConverterTests.coffee | 4 ++++ .../filestore/test/unit/coffee/LocalFileWriterTests.coffee | 5 +++++ 5 files changed, 15 insertions(+) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index a571550f8a..b172e3ba88 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -63,6 +63,8 @@ app.use (req, res, next) -> res.set 'Connection', 'close' next() +Metrics.injectMetricsRoute(app) + app.get "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.getFile app.post "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.insertFile diff --git a/services/filestore/test/unit/coffee/BucketControllerTests.coffee b/services/filestore/test/unit/coffee/BucketControllerTests.coffee index 461f3f03d6..c6bbabc89e 100644 --- a/services/filestore/test/unit/coffee/BucketControllerTests.coffee +++ b/services/filestore/test/unit/coffee/BucketControllerTests.coffee @@ -35,6 +35,8 @@ describe "BucketController", -> "./FileHandler": @FileHandler "./PersistorManager":@PersistorManager "settings-sharelatex": @settings + "metrics-sharelatex": + inc:-> "logger-sharelatex": log:-> err:-> diff --git a/services/filestore/test/unit/coffee/FileControllerTests.coffee b/services/filestore/test/unit/coffee/FileControllerTests.coffee index 591644de60..d75750f924 100644 --- a/services/filestore/test/unit/coffee/FileControllerTests.coffee +++ b/services/filestore/test/unit/coffee/FileControllerTests.coffee @@ -29,6 +29,8 @@ describe "FileController", -> "./FileHandler": @FileHandler "./PersistorManager":@PersistorManager "settings-sharelatex": @settings + "metrics-sharelatex": + inc:-> "logger-sharelatex": log:-> err:-> diff --git a/services/filestore/test/unit/coffee/FileConverterTests.coffee b/services/filestore/test/unit/coffee/FileConverterTests.coffee index bdb908be98..ed59d56376 100644 --- a/services/filestore/test/unit/coffee/FileConverterTests.coffee +++ b/services/filestore/test/unit/coffee/FileConverterTests.coffee @@ -16,6 +16,10 @@ describe "FileConverter", -> "logger-sharelatex": log:-> err:-> + "metrics-sharelatex": + inc:-> + Timer:-> + done:-> "settings-sharelatex": @Settings = commands: convertCommandPrefix: [] diff --git a/services/filestore/test/unit/coffee/LocalFileWriterTests.coffee b/services/filestore/test/unit/coffee/LocalFileWriterTests.coffee index 0b9eec035e..773289f08e 100644 --- a/services/filestore/test/unit/coffee/LocalFileWriterTests.coffee +++ b/services/filestore/test/unit/coffee/LocalFileWriterTests.coffee @@ -27,6 +27,11 @@ describe "LocalFileWriter", -> log:-> err:-> "settings-sharelatex":@settings + "metrics-sharelatex": + inc:-> + Timer:-> + done:-> + @stubbedFsPath = "something/uploads/eio2k1j3" describe "writeStrem", -> From e64156cd273659b5f3223e6e4474b7e424207409 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Thu, 29 Nov 2018 16:41:30 +0000 Subject: [PATCH 276/555] clean npm-shrinkwrap and package.json --- services/filestore/npm-shrinkwrap.json | 3364 ++++++++++-------------- services/filestore/package.json | 17 +- 2 files changed, 1351 insertions(+), 2030 deletions(-) diff --git a/services/filestore/npm-shrinkwrap.json b/services/filestore/npm-shrinkwrap.json index 1121f348eb..548b2736f6 100644 --- a/services/filestore/npm-shrinkwrap.json +++ b/services/filestore/npm-shrinkwrap.json @@ -1,2563 +1,1895 @@ { "name": "filestore-sharelatex", "version": "0.1.4", - "lockfileVersion": 1, - "requires": true, "dependencies": { - "@sinonjs/commons": { - "version": "https://registry.npmjs.org/@sinonjs/commons/-/commons-1.0.2.tgz", - "integrity": "sha1-PgrHN3gWJ7iEQlf63D2AOZfQUm4=", - "requires": { - "type-detect": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz" + "@google-cloud/common": { + "version": "0.23.0", + "from": "@google-cloud/common@>=0.23.0 <0.24.0", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.23.0.tgz" + }, + "@google-cloud/debug-agent": { + "version": "3.0.0", + "from": "@google-cloud/debug-agent@>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/@google-cloud/debug-agent/-/debug-agent-3.0.0.tgz", + "dependencies": { + "coffeescript": { + "version": "2.3.2", + "from": "coffeescript@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/coffeescript/-/coffeescript-2.3.2.tgz" + }, + "lodash": { + "version": "4.17.11", + "from": "lodash@>=4.12.0 <5.0.0", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.11.tgz" + } } }, + "@google-cloud/projectify": { + "version": "0.3.2", + "from": "@google-cloud/projectify@>=0.3.0 <0.4.0", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-0.3.2.tgz" + }, + "@google-cloud/promisify": { + "version": "0.3.1", + "from": "@google-cloud/promisify@>=0.3.0 <0.4.0", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-0.3.1.tgz" + }, + "@google-cloud/trace-agent": { + "version": "3.3.1", + "from": "@google-cloud/trace-agent@>=3.2.0 <4.0.0", + "resolved": "https://registry.npmjs.org/@google-cloud/trace-agent/-/trace-agent-3.3.1.tgz", + "dependencies": { + "@google-cloud/common": { + "version": "0.26.2", + "from": "@google-cloud/common@>=0.26.0 <0.27.0", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.26.2.tgz" + }, + "gcp-metadata": { + "version": "0.9.0", + "from": "gcp-metadata@>=0.9.0 <0.10.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.9.0.tgz" + }, + "google-auth-library": { + "version": "2.0.1", + "from": "google-auth-library@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-2.0.1.tgz", + "dependencies": { + "gcp-metadata": { + "version": "0.7.0", + "from": "gcp-metadata@^0.7.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.7.0.tgz" + } + } + }, + "lru-cache": { + "version": "4.1.4", + "from": "lru-cache@>=4.1.3 <5.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.4.tgz" + }, + "through2": { + "version": "3.0.0", + "from": "through2@>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/through2/-/through2-3.0.0.tgz" + }, + "uuid": { + "version": "3.3.2", + "from": "uuid@>=3.0.1 <4.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz" + } + } + }, + "@sinonjs/commons": { + "version": "1.3.0", + "from": "@sinonjs/commons@>=1.2.0 <2.0.0", + "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-1.3.0.tgz" + }, "@sinonjs/formatio": { - "version": "https://registry.npmjs.org/@sinonjs/formatio/-/formatio-3.0.0.tgz", - "integrity": "sha1-nSgtgQMKA6A/oMXOMf2HhqTaMRo=", - "requires": { - "@sinonjs/samsam": "https://registry.npmjs.org/@sinonjs/samsam/-/samsam-2.1.0.tgz" - }, + "version": "3.0.0", + "from": "@sinonjs/formatio@>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/@sinonjs/formatio/-/formatio-3.0.0.tgz", "dependencies": { "@sinonjs/samsam": { - "version": "https://registry.npmjs.org/@sinonjs/samsam/-/samsam-2.1.0.tgz", - "integrity": "sha1-uLj1uBlgW9Y2Aabt5FkVaIDzjqM=", - "requires": { - "array-from": "https://registry.npmjs.org/array-from/-/array-from-2.1.1.tgz" - } + "version": "2.1.0", + "from": "@sinonjs/samsam@2.1.0", + "resolved": "https://registry.npmjs.org/@sinonjs/samsam/-/samsam-2.1.0.tgz" } } }, "@sinonjs/samsam": { - "version": "https://registry.npmjs.org/@sinonjs/samsam/-/samsam-2.1.1.tgz", - "integrity": "sha1-81JiHCTJ6asu0pOnZV6NRr/WTBY=", - "requires": { - "array-from": "https://registry.npmjs.org/array-from/-/array-from-2.1.1.tgz" - } + "version": "2.1.2", + "from": "@sinonjs/samsam@>=2.1.2 <3.0.0", + "resolved": "https://registry.npmjs.org/@sinonjs/samsam/-/samsam-2.1.2.tgz" + }, + "@types/caseless": { + "version": "0.12.1", + "from": "@types/caseless@*", + "resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.1.tgz" + }, + "@types/duplexify": { + "version": "3.6.0", + "from": "@types/duplexify@>=3.5.0 <4.0.0", + "resolved": "https://registry.npmjs.org/@types/duplexify/-/duplexify-3.6.0.tgz" + }, + "@types/form-data": { + "version": "2.2.1", + "from": "@types/form-data@*", + "resolved": "https://registry.npmjs.org/@types/form-data/-/form-data-2.2.1.tgz" + }, + "@types/node": { + "version": "10.12.10", + "from": "@types/node@*", + "resolved": "https://registry.npmjs.org/@types/node/-/node-10.12.10.tgz" + }, + "@types/request": { + "version": "2.48.1", + "from": "@types/request@>=2.47.0 <3.0.0", + "resolved": "https://registry.npmjs.org/@types/request/-/request-2.48.1.tgz" + }, + "@types/tough-cookie": { + "version": "2.3.4", + "from": "@types/tough-cookie@*", + "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-2.3.4.tgz" }, "abbrev": { - "version": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", - "integrity": "sha1-+PLIh60Qv2f2NPAFtph/7TF5qsg=", - "dev": true + "version": "1.1.1", + "from": "abbrev@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz" }, "accept-encoding": { - "version": "https://registry.npmjs.org/accept-encoding/-/accept-encoding-0.1.0.tgz", - "integrity": "sha1-XdiLjfcfHcLlzGuVZezOHjmaMz4=" + "version": "0.1.0", + "from": "accept-encoding@>=0.1.0 <0.2.0", + "resolved": "https://registry.npmjs.org/accept-encoding/-/accept-encoding-0.1.0.tgz" }, "accepts": { - "version": "https://registry.npmjs.org/accepts/-/accepts-1.3.5.tgz", - "integrity": "sha1-63d99gEXI6OxTopywIBcjoZ0a9I=", - "requires": { - "mime-types": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.20.tgz", - "negotiator": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.1.tgz" - } + "version": "1.3.5", + "from": "accepts@>=1.3.5 <1.4.0", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.5.tgz" }, - "ajv": { - "version": "https://registry.npmjs.org/ajv/-/ajv-5.5.2.tgz", - "integrity": "sha1-c7Xuyj+rZT49P5Qis0GtQiBdyWU=", - "dev": true, - "requires": { - "co": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", - "fast-deep-equal": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-1.1.0.tgz", - "fast-json-stable-stringify": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz", - "json-schema-traverse": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz" - } + "acorn": { + "version": "5.7.3", + "from": "acorn@>=5.0.3 <6.0.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-5.7.3.tgz" + }, + "agent-base": { + "version": "4.2.1", + "from": "agent-base@>=4.1.0 <5.0.0", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.2.1.tgz" + }, + "ansi-regex": { + "version": "0.2.1", + "from": "ansi-regex@>=0.2.0 <0.3.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-0.2.1.tgz" }, "ansi-styles": { - "version": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-1.0.0.tgz", - "integrity": "sha1-yxAt8cVvUSPquLZ817mAJ6AnkXg=", - "dev": true + "version": "1.1.0", + "from": "ansi-styles@>=1.1.0 <2.0.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-1.1.0.tgz" }, "argparse": { - "version": "https://registry.npmjs.org/argparse/-/argparse-0.1.16.tgz", - "integrity": "sha1-z9AeD7uj1srtBJ+9dY1A9lGW9Xw=", - "dev": true, - "requires": { - "underscore": "https://registry.npmjs.org/underscore/-/underscore-1.7.0.tgz", - "underscore.string": "https://registry.npmjs.org/underscore.string/-/underscore.string-2.4.0.tgz" - }, + "version": "0.1.16", + "from": "argparse@>=0.1.11 <0.2.0", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-0.1.16.tgz", "dependencies": { "underscore": { - "version": "https://registry.npmjs.org/underscore/-/underscore-1.7.0.tgz", - "integrity": "sha1-a7rwh3UA02vjTsqlhODbn+8DUgk=", - "dev": true + "version": "1.7.0", + "from": "underscore@>=1.7.0 <1.8.0", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.7.0.tgz" }, "underscore.string": { - "version": "https://registry.npmjs.org/underscore.string/-/underscore.string-2.4.0.tgz", - "integrity": "sha1-jN2PusTi0uoefi6Al8QvRCKA+Fs=", - "dev": true + "version": "2.4.0", + "from": "underscore.string@>=2.4.0 <2.5.0", + "resolved": "https://registry.npmjs.org/underscore.string/-/underscore.string-2.4.0.tgz" } } }, "array-flatten": { - "version": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", - "integrity": "sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=" + "version": "1.1.1", + "from": "array-flatten@1.1.1", + "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz" }, "array-from": { - "version": "https://registry.npmjs.org/array-from/-/array-from-2.1.1.tgz", - "integrity": "sha1-z+nYwmYoudxa7MYqn12PHzUsEZU=" + "version": "2.1.1", + "from": "array-from@>=2.1.1 <3.0.0", + "resolved": "https://registry.npmjs.org/array-from/-/array-from-2.1.1.tgz" }, - "asn1": { - "version": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz", - "integrity": "sha1-jSR136tVO7M+d7VOWeiAu4ziMTY=", - "dev": true, - "requires": { - "safer-buffer": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz" - } - }, - "assert-plus": { - "version": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", - "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=", - "dev": true + "arrify": { + "version": "1.0.1", + "from": "arrify@>=1.0.1 <2.0.0", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz" }, "assertion-error": { - "version": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", - "integrity": "sha1-5gtrDo8wG9l+U3UhW9pAbIURjAs=" + "version": "1.1.0", + "from": "assertion-error@>=1.1.0 <2.0.0", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz" }, "async": { - "version": "https://registry.npmjs.org/async/-/async-0.2.10.tgz", - "integrity": "sha1-trvgsGdLnXGXCMo43owjfLUmw9E=" + "version": "0.2.10", + "from": "async@>=0.2.10 <0.3.0", + "resolved": "https://registry.npmjs.org/async/-/async-0.2.10.tgz" }, - "asynckit": { - "version": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=", - "dev": true + "async-listener": { + "version": "0.6.10", + "from": "async-listener@>=0.6.0 <0.7.0", + "resolved": "https://registry.npmjs.org/async-listener/-/async-listener-0.6.10.tgz" }, "aws-sdk": { - "version": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.315.0.tgz", - "integrity": "sha1-fzkxYq8DjL73IjdERKm8muG9u+k=", - "requires": { - "buffer": "https://registry.npmjs.org/buffer/-/buffer-4.9.1.tgz", - "events": "https://registry.npmjs.org/events/-/events-1.1.1.tgz", - "ieee754": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.8.tgz", - "jmespath": "https://registry.npmjs.org/jmespath/-/jmespath-0.15.0.tgz", - "querystring": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", - "sax": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz", - "url": "https://registry.npmjs.org/url/-/url-0.10.3.tgz", - "uuid": "https://registry.npmjs.org/uuid/-/uuid-3.1.0.tgz", - "xml2js": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.19.tgz" - }, + "version": "2.366.0", + "from": "aws-sdk@>=2.1.39 <3.0.0", + "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.366.0.tgz", "dependencies": { "uuid": { - "version": "https://registry.npmjs.org/uuid/-/uuid-3.1.0.tgz", - "integrity": "sha1-PdPT55Crwk17DToDT/q6vijrvAQ=" + "version": "3.1.0", + "from": "uuid@3.1.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.1.0.tgz" } } }, "aws-sign": { - "version": "https://registry.npmjs.org/aws-sign/-/aws-sign-0.2.1.tgz", - "integrity": "sha1-uWGyLwuqTxXsJBFA83dtbBQoVtA=" + "version": "0.2.1", + "from": "aws-sign@>=0.2.0 <0.3.0", + "resolved": "https://registry.npmjs.org/aws-sign/-/aws-sign-0.2.1.tgz" }, - "aws-sign2": { - "version": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", - "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=", - "dev": true - }, - "aws4": { - "version": "https://registry.npmjs.org/aws4/-/aws4-1.8.0.tgz", - "integrity": "sha1-8OAD2cqef1nHpQiUXXsu+aBKVC8=", - "dev": true + "axios": { + "version": "0.18.0", + "from": "axios@>=0.18.0 <0.19.0", + "resolved": "http://registry.npmjs.org/axios/-/axios-0.18.0.tgz" }, "balanced-match": { - "version": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", - "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=" + "version": "1.0.0", + "from": "balanced-match@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz" }, "base64-js": { - "version": "https://registry.npmjs.org/base64-js/-/base64-js-1.3.0.tgz", - "integrity": "sha1-yrHmEY8FEJXli1KBrqjBzSK/wOM=" - }, - "bcrypt-pbkdf": { - "version": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", - "integrity": "sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=", - "dev": true, - "optional": true, - "requires": { - "tweetnacl": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz" - } + "version": "1.3.0", + "from": "base64-js@>=1.0.2 <2.0.0", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.3.0.tgz" }, "best-encoding": { - "version": "https://registry.npmjs.org/best-encoding/-/best-encoding-0.1.1.tgz", - "integrity": "sha1-GVIT2rysBFgYuAe3ox+Dn63cl04=", - "requires": { - "accept-encoding": "https://registry.npmjs.org/accept-encoding/-/accept-encoding-0.1.0.tgz" - } + "version": "0.1.1", + "from": "best-encoding@>=0.1.1 <0.2.0", + "resolved": "https://registry.npmjs.org/best-encoding/-/best-encoding-0.1.1.tgz" + }, + "bignumber.js": { + "version": "7.2.1", + "from": "bignumber.js@>=7.0.0 <8.0.0", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-7.2.1.tgz" + }, + "bintrees": { + "version": "1.0.1", + "from": "bintrees@1.0.1", + "resolved": "https://registry.npmjs.org/bintrees/-/bintrees-1.0.1.tgz" }, "bl": { - "version": "https://registry.npmjs.org/bl/-/bl-0.7.0.tgz", - "integrity": "sha1-P7BnBgKsKHjrdw3CA58YNr5irls=", - "requires": { - "readable-stream": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz" + "version": "0.7.0", + "from": "bl@>=0.7.0 <0.8.0", + "resolved": "https://registry.npmjs.org/bl/-/bl-0.7.0.tgz", + "dependencies": { + "isarray": { + "version": "0.0.1", + "from": "isarray@0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + }, + "readable-stream": { + "version": "1.0.34", + "from": "readable-stream@>=1.0.2 <1.1.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz" + }, + "string_decoder": { + "version": "0.10.31", + "from": "string_decoder@>=0.10.0 <0.11.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" + } } }, "body-parser": { - "version": "https://registry.npmjs.org/body-parser/-/body-parser-1.18.3.tgz", - "integrity": "sha1-WykhmP/dVTs6DyDe0FkrlWlVyLQ=", - "requires": { - "bytes": "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz", - "content-type": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz", - "debug": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "depd": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", - "http-errors": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz", - "iconv-lite": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.23.tgz", - "on-finished": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", - "qs": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", - "raw-body": "https://registry.npmjs.org/raw-body/-/raw-body-2.3.3.tgz", - "type-is": "https://registry.npmjs.org/type-is/-/type-is-1.6.16.tgz" - } + "version": "1.18.3", + "from": "body-parser@>=1.2.0 <2.0.0", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.18.3.tgz" }, "boom": { - "version": "https://registry.npmjs.org/boom/-/boom-0.3.8.tgz", - "integrity": "sha1-yM2wQUNZEnQWKMBE7Mcy0dF8Ceo=", - "requires": { - "hoek": "https://registry.npmjs.org/hoek/-/hoek-0.7.6.tgz" - } + "version": "0.3.8", + "from": "boom@>=0.3.0 <0.4.0", + "resolved": "https://registry.npmjs.org/boom/-/boom-0.3.8.tgz" }, "brace-expansion": { - "version": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha1-PH/L9SnYcibz0vUrlm/1Jx60Qd0=", - "requires": { - "balanced-match": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", - "concat-map": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" - } - }, - "broadway": { - "version": "https://registry.npmjs.org/broadway/-/broadway-0.3.6.tgz", - "integrity": "sha1-fb7waLlUt5B5Jf1USWO1eKkCuno=", - "dev": true, - "requires": { - "cliff": "https://registry.npmjs.org/cliff/-/cliff-0.1.9.tgz", - "eventemitter2": "http://registry.npmjs.org/eventemitter2/-/eventemitter2-0.4.14.tgz", - "nconf": "https://registry.npmjs.org/nconf/-/nconf-0.6.9.tgz", - "utile": "https://registry.npmjs.org/utile/-/utile-0.2.1.tgz", - "winston": "https://registry.npmjs.org/winston/-/winston-0.8.0.tgz" - }, - "dependencies": { - "cliff": { - "version": "https://registry.npmjs.org/cliff/-/cliff-0.1.9.tgz", - "integrity": "sha1-ohHgnGo947oa8n0EnTASUNGIErw=", - "dev": true, - "requires": { - "colors": "https://registry.npmjs.org/colors/-/colors-0.6.2.tgz", - "eyes": "https://registry.npmjs.org/eyes/-/eyes-0.1.8.tgz", - "winston": "https://registry.npmjs.org/winston/-/winston-0.8.0.tgz" - } - }, - "winston": { - "version": "https://registry.npmjs.org/winston/-/winston-0.8.0.tgz", - "integrity": "sha1-YdCDD6aZcGISIGsKK1ymmpMENmg=", - "dev": true, - "requires": { - "async": "https://registry.npmjs.org/async/-/async-0.2.10.tgz", - "colors": "https://registry.npmjs.org/colors/-/colors-0.6.2.tgz", - "cycle": "https://registry.npmjs.org/cycle/-/cycle-1.0.3.tgz", - "eyes": "https://registry.npmjs.org/eyes/-/eyes-0.1.8.tgz", - "pkginfo": "https://registry.npmjs.org/pkginfo/-/pkginfo-0.3.1.tgz", - "stack-trace": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.9.tgz" - } - } - } + "version": "1.1.11", + "from": "brace-expansion@>=1.1.7 <2.0.0", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz" }, "browser-stdout": { - "version": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", - "integrity": "sha1-uqVZ7hTO1zRSIputcyZGfGH6vWA=" + "version": "1.3.1", + "from": "browser-stdout@1.3.1", + "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz" }, "buffer": { - "version": "https://registry.npmjs.org/buffer/-/buffer-4.9.1.tgz", - "integrity": "sha1-bRu2AbB6TvztlwlBMgkwJ8lbwpg=", - "requires": { - "base64-js": "https://registry.npmjs.org/base64-js/-/base64-js-1.3.0.tgz", - "ieee754": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.8.tgz", - "isarray": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz" - } + "version": "4.9.1", + "from": "buffer@4.9.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.1.tgz" + }, + "buffer-equal-constant-time": { + "version": "1.0.1", + "from": "buffer-equal-constant-time@1.0.1", + "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz" + }, + "builtin-modules": { + "version": "3.0.0", + "from": "builtin-modules@>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.0.0.tgz" }, "bunyan": { - "version": "https://registry.npmjs.org/bunyan/-/bunyan-1.5.1.tgz", - "integrity": "sha1-X259RMQ7lS9WsPQTCeOrEjkbTi0=", - "requires": { - "dtrace-provider": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.6.0.tgz", - "mv": "https://registry.npmjs.org/mv/-/mv-2.1.1.tgz", - "safe-json-stringify": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.2.0.tgz" - } + "version": "1.5.1", + "from": "bunyan@1.5.1", + "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.5.1.tgz" }, "bytes": { - "version": "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz", - "integrity": "sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg=" + "version": "3.0.0", + "from": "bytes@3.0.0", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz" }, "caseless": { - "version": "https://registry.npmjs.org/caseless/-/caseless-0.3.0.tgz", - "integrity": "sha1-U06XkWOH07cGtk/eu6xGQ4RQk08=" + "version": "0.3.0", + "from": "caseless@>=0.3.0 <0.4.0", + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.3.0.tgz" }, "chai": { - "version": "https://registry.npmjs.org/chai/-/chai-4.1.2.tgz", - "integrity": "sha1-D2RYS6ZC8PKs4oBiefTwbKI61zw=", - "requires": { - "assertion-error": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", - "check-error": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz", - "deep-eql": "https://registry.npmjs.org/deep-eql/-/deep-eql-3.0.1.tgz", - "get-func-name": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz", - "pathval": "https://registry.npmjs.org/pathval/-/pathval-1.1.0.tgz", - "type-detect": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz" - } + "version": "4.2.0", + "from": "chai@latest", + "resolved": "https://registry.npmjs.org/chai/-/chai-4.2.0.tgz" }, "chalk": { - "version": "https://registry.npmjs.org/chalk/-/chalk-0.4.0.tgz", - "integrity": "sha1-UZmj3c0MHv4jvAjBsCewYXbgxk8=", - "dev": true, - "requires": { - "ansi-styles": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-1.0.0.tgz", - "has-color": "https://registry.npmjs.org/has-color/-/has-color-0.1.7.tgz", - "strip-ansi": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-0.1.1.tgz" - } + "version": "0.5.1", + "from": "chalk@>=0.5.0 <0.6.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-0.5.1.tgz" }, "check-error": { - "version": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz", - "integrity": "sha1-V00xLt2Iu13YkS6Sht1sCu1KrII=" - }, - "cliff": { - "version": "https://registry.npmjs.org/cliff/-/cliff-0.1.10.tgz", - "integrity": "sha1-U74z6p9ZvshWCe4wCsQgdgPlIBM=", - "dev": true, - "requires": { - "colors": "https://registry.npmjs.org/colors/-/colors-1.0.3.tgz", - "eyes": "https://registry.npmjs.org/eyes/-/eyes-0.1.8.tgz", - "winston": "https://registry.npmjs.org/winston/-/winston-0.8.3.tgz" - }, - "dependencies": { - "colors": { - "version": "https://registry.npmjs.org/colors/-/colors-1.0.3.tgz", - "integrity": "sha1-BDP0TYCWgP3rYO0mDxsMJi6CpAs=", - "dev": true - } - } - }, - "co": { - "version": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", - "integrity": "sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ=", - "dev": true + "version": "1.0.2", + "from": "check-error@>=1.0.2 <2.0.0", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz" }, "coffee-script": { - "version": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.7.1.tgz", - "integrity": "sha1-YplqhheAx15tUGnROCJyO3NAS/w=", - "requires": { - "mkdirp": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.5.tgz" - } + "version": "1.7.1", + "from": "coffee-script@>=1.7.1 <1.8.0", + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.7.1.tgz" }, "colors": { - "version": "https://registry.npmjs.org/colors/-/colors-0.6.2.tgz", - "integrity": "sha1-JCP+ZnisDF2uiFLl0OW+CMmXq8w=", - "dev": true + "version": "0.6.2", + "from": "colors@>=0.6.2 <0.7.0", + "resolved": "https://registry.npmjs.org/colors/-/colors-0.6.2.tgz" }, "combined-stream": { - "version": "https://registry.npmjs.org/combined-stream/-/combined-stream-0.0.7.tgz", - "integrity": "sha1-ATfmV7qlp1QcV6w3rF/AfXO03B8=", - "requires": { - "delayed-stream": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz" - } + "version": "0.0.7", + "from": "combined-stream@>=0.0.4 <0.1.0", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-0.0.7.tgz" }, "commander": { - "version": "http://registry.npmjs.org/commander/-/commander-2.0.0.tgz", - "integrity": "sha1-0bhvkB+LZL2UG96tr5JFMDk76Sg=", - "dev": true + "version": "2.0.0", + "from": "commander@2.0.0", + "resolved": "http://registry.npmjs.org/commander/-/commander-2.0.0.tgz" }, "concat-map": { - "version": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" + "version": "0.0.1", + "from": "concat-map@0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" }, - "configstore": { - "version": "https://registry.npmjs.org/configstore/-/configstore-0.3.2.tgz", - "integrity": "sha1-JeTBbDdoq/dcWmW8YXYfSVBVtFk=", - "dev": true, - "requires": { - "graceful-fs": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-3.0.11.tgz", - "js-yaml": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.12.0.tgz", - "mkdirp": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", - "object-assign": "https://registry.npmjs.org/object-assign/-/object-assign-2.1.1.tgz", - "osenv": "https://registry.npmjs.org/osenv/-/osenv-0.1.5.tgz", - "user-home": "https://registry.npmjs.org/user-home/-/user-home-1.1.1.tgz", - "uuid": "https://registry.npmjs.org/uuid/-/uuid-2.0.3.tgz", - "xdg-basedir": "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-1.0.1.tgz" - }, - "dependencies": { - "argparse": { - "version": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", - "integrity": "sha1-vNZ5HqWuCXJeF+WtmIE0zUCz2RE=", - "dev": true, - "requires": { - "sprintf-js": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz" - } - }, - "esprima": { - "version": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", - "integrity": "sha1-E7BM2z5sXRnfkatph6hpVhmwqnE=", - "dev": true - }, - "graceful-fs": { - "version": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-3.0.11.tgz", - "integrity": "sha1-dhPHeKGv6mLyXGMKCG1/Osu92Bg=", - "dev": true, - "requires": { - "natives": "https://registry.npmjs.org/natives/-/natives-1.1.5.tgz" - } - }, - "js-yaml": { - "version": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.12.0.tgz", - "integrity": "sha1-6u1lbsg0TxD1J8a/obbiJE3hZ9E=", - "dev": true, - "requires": { - "argparse": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", - "esprima": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz" - } - }, - "mkdirp": { - "version": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", - "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", - "dev": true, - "requires": { - "minimist": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz" - } - } - } + "console-log-level": { + "version": "1.4.0", + "from": "console-log-level@>=1.4.0 <2.0.0", + "resolved": "https://registry.npmjs.org/console-log-level/-/console-log-level-1.4.0.tgz" }, "content-disposition": { - "version": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.2.tgz", - "integrity": "sha1-DPaLud318r55YcOoUXjLhdunjLQ=" + "version": "0.5.2", + "from": "content-disposition@0.5.2", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.2.tgz" }, "content-type": { - "version": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz", - "integrity": "sha1-4TjMdeBAxyexlm/l5fjJruJW/js=" + "version": "1.0.4", + "from": "content-type@>=1.0.4 <1.1.0", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz" + }, + "continuation-local-storage": { + "version": "3.2.1", + "from": "continuation-local-storage@>=3.2.1 <4.0.0", + "resolved": "https://registry.npmjs.org/continuation-local-storage/-/continuation-local-storage-3.2.1.tgz" }, "cookie": { - "version": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz", - "integrity": "sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s=" + "version": "0.3.1", + "from": "cookie@0.3.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz" }, "cookie-jar": { - "version": "https://registry.npmjs.org/cookie-jar/-/cookie-jar-0.2.0.tgz", - "integrity": "sha1-ZOzAasl423leS1KQy+SLo3gUAPo=" + "version": "0.2.0", + "from": "cookie-jar@>=0.2.0 <0.3.0", + "resolved": "https://registry.npmjs.org/cookie-jar/-/cookie-jar-0.2.0.tgz" }, "cookie-signature": { - "version": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", - "integrity": "sha1-4wOogrNCzD7oylE6eZmXNNqzriw=" + "version": "1.0.6", + "from": "cookie-signature@1.0.6", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz" }, "core-util-is": { - "version": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", - "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" + "version": "1.0.2", + "from": "core-util-is@>=1.0.0 <1.1.0", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz" }, "cryptiles": { - "version": "https://registry.npmjs.org/cryptiles/-/cryptiles-0.1.3.tgz", - "integrity": "sha1-GlVnNPBtJLo0hirpy55wmjr7/xw=", - "requires": { - "boom": "https://registry.npmjs.org/boom/-/boom-0.3.8.tgz" - } - }, - "cycle": { - "version": "https://registry.npmjs.org/cycle/-/cycle-1.0.3.tgz", - "integrity": "sha1-IegLK+hYD5i0aPN5QwZisEbDStI=", - "dev": true - }, - "dashdash": { - "version": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", - "integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=", - "dev": true, - "requires": { - "assert-plus": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz" - } + "version": "0.1.3", + "from": "cryptiles@>=0.1.0 <0.2.0", + "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-0.1.3.tgz" }, "dateformat": { - "version": "https://registry.npmjs.org/dateformat/-/dateformat-1.0.2-1.2.3.tgz", - "integrity": "sha1-sCIMAt6YYXQztyhRz0fePfLNvuk=", - "dev": true + "version": "1.0.2-1.2.3", + "from": "dateformat@1.0.2-1.2.3", + "resolved": "https://registry.npmjs.org/dateformat/-/dateformat-1.0.2-1.2.3.tgz" }, "debug": { - "version": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha1-XRKFFd8TT/Mn6QpMk/Tgd6U2NB8=", - "requires": { - "ms": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz" - } + "version": "2.6.9", + "from": "debug@2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz" }, "deep-eql": { - "version": "https://registry.npmjs.org/deep-eql/-/deep-eql-3.0.1.tgz", - "integrity": "sha1-38lARACtHI/gI+faHfHBR8S0RN8=", - "requires": { - "type-detect": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz" - } + "version": "3.0.1", + "from": "deep-eql@>=3.0.1 <4.0.0", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-3.0.1.tgz" }, - "deep-equal": { - "version": "https://registry.npmjs.org/deep-equal/-/deep-equal-1.0.1.tgz", - "integrity": "sha1-9dJgKStmDghO/0zbyfCK0yR0SLU=", - "dev": true + "define-properties": { + "version": "1.1.3", + "from": "define-properties@>=1.1.2 <2.0.0", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz" }, "delayed-stream": { - "version": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz", - "integrity": "sha1-1LH0OpPoKW3+AmlPRoC8N6MTxz8=" + "version": "0.0.5", + "from": "delayed-stream@0.0.5", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz" }, "depd": { - "version": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", - "integrity": "sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=" + "version": "1.1.2", + "from": "depd@>=1.1.2 <1.2.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz" }, "destroy": { - "version": "https://registry.npmjs.org/destroy/-/destroy-1.0.4.tgz", - "integrity": "sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA=" + "version": "1.0.4", + "from": "destroy@>=1.0.4 <1.1.0", + "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.0.4.tgz" }, "diff": { - "version": "https://registry.npmjs.org/diff/-/diff-1.0.7.tgz", - "integrity": "sha1-JLuwAcSn1VIhaefKvbLCgU7ZHPQ=", - "dev": true - }, - "director": { - "version": "https://registry.npmjs.org/director/-/director-1.2.7.tgz", - "integrity": "sha1-v9N0EHX9f7GlsuE2WMX0vsd3NvM=", - "dev": true + "version": "1.0.7", + "from": "diff@1.0.7", + "resolved": "https://registry.npmjs.org/diff/-/diff-1.0.7.tgz" }, "dtrace-provider": { - "version": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.6.0.tgz", - "integrity": "sha1-CweNVReTfYcxAUUtkUZzdVe3XlE=", - "optional": true, - "requires": { - "nan": "https://registry.npmjs.org/nan/-/nan-2.11.0.tgz" - } + "version": "0.6.0", + "from": "dtrace-provider@>=0.6.0 <0.7.0", + "resolved": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.6.0.tgz", + "optional": true }, - "ecc-jsbn": { - "version": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", - "integrity": "sha1-OoOpBOVDUyh4dMVkt1SThoSamMk=", - "dev": true, - "optional": true, - "requires": { - "jsbn": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", - "safer-buffer": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz" - } + "duplexify": { + "version": "3.6.1", + "from": "duplexify@>=3.6.0 <4.0.0", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.6.1.tgz" + }, + "ecdsa-sig-formatter": { + "version": "1.0.10", + "from": "ecdsa-sig-formatter@1.0.10", + "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.10.tgz" }, "ee-first": { - "version": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", - "integrity": "sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=" + "version": "1.1.1", + "from": "ee-first@1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz" + }, + "emitter-listener": { + "version": "1.1.2", + "from": "emitter-listener@>=1.1.1 <2.0.0", + "resolved": "https://registry.npmjs.org/emitter-listener/-/emitter-listener-1.1.2.tgz" }, "encodeurl": { - "version": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", - "integrity": "sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k=" + "version": "1.0.2", + "from": "encodeurl@>=1.0.2 <1.1.0", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz" + }, + "end-of-stream": { + "version": "1.4.1", + "from": "end-of-stream@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.1.tgz" + }, + "ent": { + "version": "2.2.0", + "from": "ent@>=2.2.0 <3.0.0", + "resolved": "https://registry.npmjs.org/ent/-/ent-2.2.0.tgz" + }, + "es-abstract": { + "version": "1.12.0", + "from": "es-abstract@>=1.5.1 <2.0.0", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.12.0.tgz" + }, + "es-to-primitive": { + "version": "1.2.0", + "from": "es-to-primitive@>=1.1.1 <2.0.0", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.0.tgz" + }, + "es6-promise": { + "version": "4.2.5", + "from": "es6-promise@>=4.0.3 <5.0.0", + "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.5.tgz" + }, + "es6-promisify": { + "version": "5.0.0", + "from": "es6-promisify@>=5.0.0 <6.0.0", + "resolved": "https://registry.npmjs.org/es6-promisify/-/es6-promisify-5.0.0.tgz" }, "escape-html": { - "version": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", - "integrity": "sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg=" + "version": "1.0.3", + "from": "escape-html@>=1.0.3 <1.1.0", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz" }, "escape-string-regexp": { - "version": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=" + "version": "1.0.5", + "from": "escape-string-regexp@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz" }, "esprima": { - "version": "https://registry.npmjs.org/esprima/-/esprima-1.0.4.tgz", - "integrity": "sha1-n1V+CPw7TSbs6d00+Pv0drYlha0=", - "dev": true + "version": "1.0.4", + "from": "esprima@>=1.0.2 <1.1.0", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-1.0.4.tgz" }, "etag": { - "version": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", - "integrity": "sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc=" - }, - "event-stream": { - "version": "https://registry.npmjs.org/event-stream/-/event-stream-0.5.3.tgz", - "integrity": "sha1-t3uTCfcQet3+q2PwwOr9jbC9jBw=", - "dev": true, - "requires": { - "optimist": "https://registry.npmjs.org/optimist/-/optimist-0.2.8.tgz" - }, - "dependencies": { - "optimist": { - "version": "https://registry.npmjs.org/optimist/-/optimist-0.2.8.tgz", - "integrity": "sha1-6YGrfiaLRXlIWTtVZ0wJmoFcrDE=", - "dev": true, - "requires": { - "wordwrap": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz" - } - } - } + "version": "1.8.1", + "from": "etag@>=1.8.1 <1.9.0", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz" }, "eventemitter2": { - "version": "http://registry.npmjs.org/eventemitter2/-/eventemitter2-0.4.14.tgz", - "integrity": "sha1-j2G3XN4BKy6esoTUVFWDtWQ7Yas=", - "dev": true + "version": "0.4.14", + "from": "eventemitter2@>=0.4.13 <0.5.0", + "resolved": "http://registry.npmjs.org/eventemitter2/-/eventemitter2-0.4.14.tgz" }, "events": { - "version": "https://registry.npmjs.org/events/-/events-1.1.1.tgz", - "integrity": "sha1-nr23Y1rQmccNzEwqH1AEKI6L2SQ=" + "version": "1.1.1", + "from": "events@1.1.1", + "resolved": "https://registry.npmjs.org/events/-/events-1.1.1.tgz" + }, + "exit": { + "version": "0.1.2", + "from": "exit@>=0.1.1 <0.2.0", + "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz" }, "express": { - "version": "https://registry.npmjs.org/express/-/express-4.16.3.tgz", - "integrity": "sha1-avilAjUNsyRuzEvs9rWjTSL37VM=", - "requires": { - "accepts": "https://registry.npmjs.org/accepts/-/accepts-1.3.5.tgz", - "array-flatten": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", - "body-parser": "https://registry.npmjs.org/body-parser/-/body-parser-1.18.2.tgz", - "content-disposition": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.2.tgz", - "content-type": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz", - "cookie": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz", - "cookie-signature": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", - "debug": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "depd": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", - "encodeurl": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", - "escape-html": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", - "etag": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", - "finalhandler": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.1.tgz", - "fresh": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", - "merge-descriptors": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", - "methods": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", - "on-finished": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", - "parseurl": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.2.tgz", - "path-to-regexp": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", - "proxy-addr": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.4.tgz", - "qs": "https://registry.npmjs.org/qs/-/qs-6.5.1.tgz", - "range-parser": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz", - "safe-buffer": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.1.tgz", - "send": "https://registry.npmjs.org/send/-/send-0.16.2.tgz", - "serve-static": "https://registry.npmjs.org/serve-static/-/serve-static-1.13.2.tgz", - "setprototypeof": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.0.tgz", - "statuses": "https://registry.npmjs.org/statuses/-/statuses-1.4.0.tgz", - "type-is": "https://registry.npmjs.org/type-is/-/type-is-1.6.16.tgz", - "utils-merge": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", - "vary": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz" - }, + "version": "4.16.4", + "from": "express@>=4.2.0 <5.0.0", + "resolved": "https://registry.npmjs.org/express/-/express-4.16.4.tgz", "dependencies": { - "body-parser": { - "version": "https://registry.npmjs.org/body-parser/-/body-parser-1.18.2.tgz", - "integrity": "sha1-h2eKGdhLR9hZuDGZvVm84iKxBFQ=", - "requires": { - "bytes": "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz", - "content-type": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz", - "debug": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "depd": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", - "http-errors": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz", - "iconv-lite": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.19.tgz", - "on-finished": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", - "qs": "https://registry.npmjs.org/qs/-/qs-6.5.1.tgz", - "raw-body": "https://registry.npmjs.org/raw-body/-/raw-body-2.3.2.tgz", - "type-is": "https://registry.npmjs.org/type-is/-/type-is-1.6.16.tgz" - } - }, - "iconv-lite": { - "version": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.19.tgz", - "integrity": "sha1-90aPYBNfXl2tM5nAqBvpoWA6CCs=" - }, - "qs": { - "version": "https://registry.npmjs.org/qs/-/qs-6.5.1.tgz", - "integrity": "sha1-NJzfbu+J7EXBLX1es/wMhwNDptg=" - }, - "raw-body": { - "version": "https://registry.npmjs.org/raw-body/-/raw-body-2.3.2.tgz", - "integrity": "sha1-vNYMd9Prk83gBQKVw/N5OJvIj4k=", - "requires": { - "bytes": "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz", - "http-errors": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.2.tgz", - "iconv-lite": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.19.tgz", - "unpipe": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz" - }, - "dependencies": { - "depd": { - "version": "https://registry.npmjs.org/depd/-/depd-1.1.1.tgz", - "integrity": "sha1-V4O04cRZ8G+lyif5kfPQbnoxA1k=" - }, - "http-errors": { - "version": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.2.tgz", - "integrity": "sha1-CgAsyFcHGSp+eUbO7cERVfYOxzY=", - "requires": { - "depd": "https://registry.npmjs.org/depd/-/depd-1.1.1.tgz", - "inherits": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", - "setprototypeof": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.0.3.tgz", - "statuses": "https://registry.npmjs.org/statuses/-/statuses-1.4.0.tgz" - } - }, - "setprototypeof": { - "version": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.0.3.tgz", - "integrity": "sha1-ZlZ+NwQ+608E2RvWWMDL77VbjgQ=" - } - } - }, "statuses": { - "version": "https://registry.npmjs.org/statuses/-/statuses-1.4.0.tgz", - "integrity": "sha1-u3PURtonlhBu/MG2AaJT1sRr0Ic=" + "version": "1.4.0", + "from": "statuses@>=1.4.0 <1.5.0", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.4.0.tgz" } } }, "extend": { - "version": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", - "integrity": "sha1-+LETa0Bx+9jrFAr/hYsQGewpFfo=", - "dev": true - }, - "extsprintf": { - "version": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", - "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=", - "dev": true - }, - "eyes": { - "version": "https://registry.npmjs.org/eyes/-/eyes-0.1.8.tgz", - "integrity": "sha1-Ys8SAjTGg3hdkCNIqADvPgzCC8A=", - "dev": true - }, - "fast-deep-equal": { - "version": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-1.1.0.tgz", - "integrity": "sha1-wFNHeBfIa1HaqFPIHgWbcz0CNhQ=", - "dev": true - }, - "fast-json-stable-stringify": { - "version": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz", - "integrity": "sha1-1RQsDK7msRifh9OnYREGT4bIu/I=", - "dev": true - }, - "faye-websocket": { - "version": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.4.4.tgz", - "integrity": "sha1-wUxbO/FNdBf/v9mQwKdJXNnzN7w=", - "dev": true + "version": "3.0.2", + "from": "extend@>=3.0.1 <4.0.0", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz" }, "finalhandler": { - "version": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.1.tgz", - "integrity": "sha1-7r9O2EAHnIP0JJA4ydcDAIMBsQU=", - "requires": { - "debug": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "encodeurl": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", - "escape-html": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", - "on-finished": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", - "parseurl": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.2.tgz", - "statuses": "https://registry.npmjs.org/statuses/-/statuses-1.4.0.tgz", - "unpipe": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz" - }, + "version": "1.1.1", + "from": "finalhandler@1.1.1", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.1.tgz", "dependencies": { "statuses": { - "version": "https://registry.npmjs.org/statuses/-/statuses-1.4.0.tgz", - "integrity": "sha1-u3PURtonlhBu/MG2AaJT1sRr0Ic=" + "version": "1.4.0", + "from": "statuses@>=1.4.0 <1.5.0", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.4.0.tgz" } } }, + "findit2": { + "version": "2.2.3", + "from": "findit2@>=2.2.3 <3.0.0", + "resolved": "https://registry.npmjs.org/findit2/-/findit2-2.2.3.tgz" + }, "findup-sync": { - "version": "https://registry.npmjs.org/findup-sync/-/findup-sync-0.1.3.tgz", - "integrity": "sha1-fz56l7gjksZTvwZYm9hRkOk8NoM=", - "dev": true, - "requires": { - "glob": "https://registry.npmjs.org/glob/-/glob-3.2.11.tgz", - "lodash": "https://registry.npmjs.org/lodash/-/lodash-2.4.2.tgz" - }, + "version": "0.1.3", + "from": "findup-sync@>=0.1.2 <0.2.0", + "resolved": "https://registry.npmjs.org/findup-sync/-/findup-sync-0.1.3.tgz", "dependencies": { "glob": { - "version": "https://registry.npmjs.org/glob/-/glob-3.2.11.tgz", - "integrity": "sha1-Spc/Y1uRkPcV0QmH1cAP0oFevj0=", - "dev": true, - "requires": { - "inherits": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", - "minimatch": "https://registry.npmjs.org/minimatch/-/minimatch-0.3.0.tgz" - } + "version": "3.2.11", + "from": "glob@>=3.2.9 <3.3.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-3.2.11.tgz" }, "lodash": { - "version": "https://registry.npmjs.org/lodash/-/lodash-2.4.2.tgz", - "integrity": "sha1-+t2DS5aDBz2hebPq5tnA0VBT9z4=", - "dev": true + "version": "2.4.2", + "from": "lodash@>=2.4.1 <2.5.0", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-2.4.2.tgz" }, "minimatch": { - "version": "https://registry.npmjs.org/minimatch/-/minimatch-0.3.0.tgz", - "integrity": "sha1-J12O2qxPG7MyZHIInnlJyDlGmd0=", - "dev": true, - "requires": { - "lru-cache": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.7.3.tgz", - "sigmund": "https://registry.npmjs.org/sigmund/-/sigmund-1.0.1.tgz" - } + "version": "0.3.0", + "from": "minimatch@>=0.3.0 <0.4.0", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-0.3.0.tgz" } } }, - "flatiron": { - "version": "https://registry.npmjs.org/flatiron/-/flatiron-0.4.3.tgz", - "integrity": "sha1-JIz3mj2n19w3nioRySonGcu1QPY=", - "dev": true, - "requires": { - "broadway": "https://registry.npmjs.org/broadway/-/broadway-0.3.6.tgz", - "director": "https://registry.npmjs.org/director/-/director-1.2.7.tgz", - "optimist": "https://registry.npmjs.org/optimist/-/optimist-0.6.0.tgz", - "prompt": "https://registry.npmjs.org/prompt/-/prompt-0.2.14.tgz" - }, + "follow-redirects": { + "version": "1.5.10", + "from": "follow-redirects@>=1.3.0 <2.0.0", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.5.10.tgz", "dependencies": { - "optimist": { - "version": "https://registry.npmjs.org/optimist/-/optimist-0.6.0.tgz", - "integrity": "sha1-aUJIJvNAX3nxQub8PZrljU27kgA=", - "dev": true, - "requires": { - "minimist": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", - "wordwrap": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz" - } + "debug": { + "version": "3.1.0", + "from": "debug@3.1.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz" } } }, - "forever": { - "version": "https://registry.npmjs.org/forever/-/forever-0.14.2.tgz", - "integrity": "sha1-6Tsr2UxXBavBmxXlTDEz1puinGs=", - "dev": true, - "requires": { - "cliff": "https://registry.npmjs.org/cliff/-/cliff-0.1.10.tgz", - "colors": "https://registry.npmjs.org/colors/-/colors-0.6.2.tgz", - "flatiron": "https://registry.npmjs.org/flatiron/-/flatiron-0.4.3.tgz", - "forever-monitor": "https://registry.npmjs.org/forever-monitor/-/forever-monitor-1.5.2.tgz", - "nconf": "https://registry.npmjs.org/nconf/-/nconf-0.6.9.tgz", - "nssocket": "https://registry.npmjs.org/nssocket/-/nssocket-0.5.3.tgz", - "optimist": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz", - "timespan": "https://registry.npmjs.org/timespan/-/timespan-2.3.0.tgz", - "utile": "https://registry.npmjs.org/utile/-/utile-0.2.1.tgz", - "winston": "https://registry.npmjs.org/winston/-/winston-0.8.3.tgz" - } - }, "forever-agent": { - "version": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.2.0.tgz", - "integrity": "sha1-4cJcetROCcOPIzh2x2/MJP+EOx8=" - }, - "forever-monitor": { - "version": "https://registry.npmjs.org/forever-monitor/-/forever-monitor-1.5.2.tgz", - "integrity": "sha1-J5OI36k7CFNj1rKKgj7wpq7rNdc=", - "dev": true, - "requires": { - "broadway": "https://registry.npmjs.org/broadway/-/broadway-0.3.6.tgz", - "minimatch": "https://registry.npmjs.org/minimatch/-/minimatch-1.0.0.tgz", - "ps-tree": "https://registry.npmjs.org/ps-tree/-/ps-tree-0.0.3.tgz", - "utile": "https://registry.npmjs.org/utile/-/utile-0.2.1.tgz", - "watch": "https://registry.npmjs.org/watch/-/watch-0.13.0.tgz" - }, - "dependencies": { - "minimatch": { - "version": "https://registry.npmjs.org/minimatch/-/minimatch-1.0.0.tgz", - "integrity": "sha1-4N0hILSeG3JM6NcUxSCCKpQ4V20=", - "dev": true, - "requires": { - "lru-cache": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.7.3.tgz", - "sigmund": "https://registry.npmjs.org/sigmund/-/sigmund-1.0.1.tgz" - } - } - } + "version": "0.2.0", + "from": "forever-agent@>=0.2.0 <0.3.0", + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.2.0.tgz" }, "form-data": { - "version": "https://registry.npmjs.org/form-data/-/form-data-0.0.10.tgz", - "integrity": "sha1-2zRaU3jYau6x7V1VO4aawZLS9e0=", - "requires": { - "async": "https://registry.npmjs.org/async/-/async-0.2.10.tgz", - "combined-stream": "https://registry.npmjs.org/combined-stream/-/combined-stream-0.0.7.tgz", - "mime": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz" - }, + "version": "0.0.10", + "from": "form-data@>=0.0.3 <0.1.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-0.0.10.tgz", "dependencies": { "mime": { - "version": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz", - "integrity": "sha1-WCA+7Ybjpe8XrtK32evUfwpg3RA=" + "version": "1.2.11", + "from": "mime@>=1.2.2 <1.3.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz" } } }, "forwarded": { - "version": "https://registry.npmjs.org/forwarded/-/forwarded-0.1.2.tgz", - "integrity": "sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ=" + "version": "0.1.2", + "from": "forwarded@>=0.1.2 <0.2.0", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.1.2.tgz" }, "fresh": { - "version": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", - "integrity": "sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac=" + "version": "0.5.2", + "from": "fresh@0.5.2", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz" }, "fs-extra": { - "version": "https://registry.npmjs.org/fs-extra/-/fs-extra-1.0.0.tgz", - "integrity": "sha1-zTzl9+fLYUWIP8rjGR6Yd/hYeVA=", - "requires": { - "graceful-fs": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.11.tgz", - "jsonfile": "https://registry.npmjs.org/jsonfile/-/jsonfile-2.4.0.tgz", - "klaw": "https://registry.npmjs.org/klaw/-/klaw-1.3.1.tgz" - } + "version": "1.0.0", + "from": "fs-extra@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-1.0.0.tgz" }, "fs.realpath": { - "version": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" + "version": "1.0.0", + "from": "fs.realpath@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz" }, - "gaze": { - "version": "https://registry.npmjs.org/gaze/-/gaze-0.4.3.tgz", - "integrity": "sha1-5Tj0/15P5kj0c6l+HrslPS3hJ7U=", - "dev": true, - "requires": { - "globule": "https://registry.npmjs.org/globule/-/globule-0.1.0.tgz" - } + "function-bind": { + "version": "1.1.1", + "from": "function-bind@>=1.1.1 <2.0.0", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz" + }, + "gcp-metadata": { + "version": "0.7.0", + "from": "gcp-metadata@>=0.7.0 <0.8.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.7.0.tgz" }, "get-func-name": { - "version": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz", - "integrity": "sha1-6td0q+5y4gQJQzoGY2YCPdaIekE=" + "version": "2.0.0", + "from": "get-func-name@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz" }, - "getpass": { - "version": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", - "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=", - "dev": true, - "requires": { - "assert-plus": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz" - } + "getobject": { + "version": "0.1.0", + "from": "getobject@>=0.1.0 <0.2.0", + "resolved": "https://registry.npmjs.org/getobject/-/getobject-0.1.0.tgz" }, "gettemporaryfilepath": { - "version": "https://registry.npmjs.org/gettemporaryfilepath/-/gettemporaryfilepath-0.0.1.tgz", - "integrity": "sha1-uKLHAUu1zUFTTpg7XKFgo3RwhGk=" + "version": "0.0.1", + "from": "gettemporaryfilepath@0.0.1", + "resolved": "https://registry.npmjs.org/gettemporaryfilepath/-/gettemporaryfilepath-0.0.1.tgz" }, "glob": { - "version": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz", - "integrity": "sha1-DwiGD2oVUSey+t1PnOJLGqtuTSI=", - "optional": true, - "requires": { - "inflight": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "inherits": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", - "minimatch": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "once": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "path-is-absolute": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz" + "version": "6.0.4", + "from": "glob@>=6.0.1 <7.0.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz", + "optional": true + }, + "google-auth-library": { + "version": "1.6.1", + "from": "google-auth-library@>=1.6.0 <2.0.0", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-1.6.1.tgz", + "dependencies": { + "gcp-metadata": { + "version": "0.6.3", + "from": "gcp-metadata@>=0.6.3 <0.7.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.6.3.tgz" + }, + "lru-cache": { + "version": "4.1.4", + "from": "lru-cache@>=4.1.3 <5.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.4.tgz" + } } }, - "globule": { - "version": "https://registry.npmjs.org/globule/-/globule-0.1.0.tgz", - "integrity": "sha1-2cjt3h2nnRJaFRt5UzuXhnY0auU=", - "dev": true, - "requires": { - "glob": "https://registry.npmjs.org/glob/-/glob-3.1.21.tgz", - "lodash": "https://registry.npmjs.org/lodash/-/lodash-1.0.2.tgz", - "minimatch": "https://registry.npmjs.org/minimatch/-/minimatch-0.2.14.tgz" - }, + "google-p12-pem": { + "version": "1.0.2", + "from": "google-p12-pem@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-1.0.2.tgz", "dependencies": { - "glob": { - "version": "https://registry.npmjs.org/glob/-/glob-3.1.21.tgz", - "integrity": "sha1-0p4KBV3qUTj00H7UDomC6DwgZs0=", - "dev": true, - "requires": { - "graceful-fs": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-1.2.3.tgz", - "inherits": "https://registry.npmjs.org/inherits/-/inherits-1.0.2.tgz", - "minimatch": "https://registry.npmjs.org/minimatch/-/minimatch-0.2.14.tgz" - } - }, - "graceful-fs": { - "version": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-1.2.3.tgz", - "integrity": "sha1-FaSAaldUfLLS2/J/QuiajDRRs2Q=", - "dev": true - }, - "inherits": { - "version": "https://registry.npmjs.org/inherits/-/inherits-1.0.2.tgz", - "integrity": "sha1-ykMJ2t7mtUzAuNJH6NfHoJdb3Js=", - "dev": true - }, - "lodash": { - "version": "https://registry.npmjs.org/lodash/-/lodash-1.0.2.tgz", - "integrity": "sha1-j1dWDIO1n8JwvT1WG2kAQ0MOJVE=", - "dev": true - }, - "minimatch": { - "version": "https://registry.npmjs.org/minimatch/-/minimatch-0.2.14.tgz", - "integrity": "sha1-x054BXT2PG+aCQ6Q775u9TpqdWo=", - "dev": true, - "requires": { - "lru-cache": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.7.3.tgz", - "sigmund": "https://registry.npmjs.org/sigmund/-/sigmund-1.0.1.tgz" - } + "pify": { + "version": "3.0.0", + "from": "pify@>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz" } } }, "graceful-fs": { - "version": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.11.tgz", - "integrity": "sha1-Dovf5NHduIVNZOBOp8AOKgJuVlg=" + "version": "4.1.15", + "from": "graceful-fs@>=4.1.2 <5.0.0", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.15.tgz" }, "growl": { - "version": "https://registry.npmjs.org/growl/-/growl-1.7.0.tgz", - "integrity": "sha1-3i1mE20ALhErpw8/EMMc98NQsto=", - "dev": true + "version": "1.7.0", + "from": "growl@>=1.7.0 <1.8.0", + "resolved": "https://registry.npmjs.org/growl/-/growl-1.7.0.tgz" }, "grunt": { - "version": "https://registry.npmjs.org/grunt/-/grunt-0.4.1.tgz", - "integrity": "sha1-1YkuVoCt2e0b796apjXPRrj0lyk=", - "dev": true, - "requires": { - "async": "https://registry.npmjs.org/async/-/async-0.1.22.tgz", - "coffee-script": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.3.3.tgz", - "colors": "https://registry.npmjs.org/colors/-/colors-0.6.2.tgz", - "dateformat": "https://registry.npmjs.org/dateformat/-/dateformat-1.0.2-1.2.3.tgz", - "eventemitter2": "http://registry.npmjs.org/eventemitter2/-/eventemitter2-0.4.14.tgz", - "findup-sync": "https://registry.npmjs.org/findup-sync/-/findup-sync-0.1.3.tgz", - "glob": "https://registry.npmjs.org/glob/-/glob-3.1.21.tgz", - "hooker": "https://registry.npmjs.org/hooker/-/hooker-0.2.3.tgz", - "iconv-lite": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.2.11.tgz", - "js-yaml": "https://registry.npmjs.org/js-yaml/-/js-yaml-2.0.5.tgz", - "lodash": "http://registry.npmjs.org/lodash/-/lodash-0.9.2.tgz", - "minimatch": "https://registry.npmjs.org/minimatch/-/minimatch-0.2.14.tgz", - "nopt": "https://registry.npmjs.org/nopt/-/nopt-1.0.10.tgz", - "rimraf": "https://registry.npmjs.org/rimraf/-/rimraf-2.0.3.tgz", - "underscore.string": "https://registry.npmjs.org/underscore.string/-/underscore.string-2.2.1.tgz", - "which": "https://registry.npmjs.org/which/-/which-1.0.9.tgz" - }, + "version": "0.4.5", + "from": "grunt@>=0.4.5 <0.5.0", + "resolved": "https://registry.npmjs.org/grunt/-/grunt-0.4.5.tgz", "dependencies": { "async": { - "version": "https://registry.npmjs.org/async/-/async-0.1.22.tgz", - "integrity": "sha1-D8GqoIig4+8Ovi2IMbqw3PiEUGE=", - "dev": true + "version": "0.1.22", + "from": "async@>=0.1.22 <0.2.0", + "resolved": "https://registry.npmjs.org/async/-/async-0.1.22.tgz" }, "coffee-script": { - "version": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.3.3.tgz", - "integrity": "sha1-FQ1rTLUiiUNp7+1qIQHCC8f0pPQ=", - "dev": true + "version": "1.3.3", + "from": "coffee-script@>=1.3.3 <1.4.0", + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.3.3.tgz" }, "glob": { - "version": "https://registry.npmjs.org/glob/-/glob-3.1.21.tgz", - "integrity": "sha1-0p4KBV3qUTj00H7UDomC6DwgZs0=", - "dev": true, - "requires": { - "graceful-fs": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-1.2.3.tgz", - "inherits": "https://registry.npmjs.org/inherits/-/inherits-1.0.2.tgz", - "minimatch": "https://registry.npmjs.org/minimatch/-/minimatch-0.2.14.tgz" - } + "version": "3.1.21", + "from": "glob@>=3.1.21 <3.2.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-3.1.21.tgz" }, "graceful-fs": { - "version": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-1.2.3.tgz", - "integrity": "sha1-FaSAaldUfLLS2/J/QuiajDRRs2Q=", - "dev": true + "version": "1.2.3", + "from": "graceful-fs@>=1.2.0 <1.3.0", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-1.2.3.tgz" }, "iconv-lite": { - "version": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.2.11.tgz", - "integrity": "sha1-HOYKOleGSiktEyH/RgnKS7llrcg=", - "dev": true + "version": "0.2.11", + "from": "iconv-lite@>=0.2.11 <0.3.0", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.2.11.tgz" }, "inherits": { - "version": "https://registry.npmjs.org/inherits/-/inherits-1.0.2.tgz", - "integrity": "sha1-ykMJ2t7mtUzAuNJH6NfHoJdb3Js=", - "dev": true + "version": "1.0.2", + "from": "inherits@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-1.0.2.tgz" }, "minimatch": { - "version": "https://registry.npmjs.org/minimatch/-/minimatch-0.2.14.tgz", - "integrity": "sha1-x054BXT2PG+aCQ6Q775u9TpqdWo=", - "dev": true, - "requires": { - "lru-cache": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.7.3.tgz", - "sigmund": "https://registry.npmjs.org/sigmund/-/sigmund-1.0.1.tgz" - } - }, - "rimraf": { - "version": "https://registry.npmjs.org/rimraf/-/rimraf-2.0.3.tgz", - "integrity": "sha1-9QopZecUTpr9mYmC8V33BnMPVqk=", - "dev": true, - "requires": { - "graceful-fs": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-1.1.14.tgz" - }, - "dependencies": { - "graceful-fs": { - "version": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-1.1.14.tgz", - "integrity": "sha1-BweNtfY3f2Mh/Oqu30l94STclGU=", - "dev": true, - "optional": true - } - } + "version": "0.2.14", + "from": "minimatch@>=0.2.12 <0.3.0", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-0.2.14.tgz" } } }, "grunt-bunyan": { - "version": "https://registry.npmjs.org/grunt-bunyan/-/grunt-bunyan-0.5.0.tgz", - "integrity": "sha1-aCnXbgGZQ9owQTk2MaNuKsgpsWw=", - "requires": { - "lodash": "https://registry.npmjs.org/lodash/-/lodash-2.4.2.tgz" - }, + "version": "0.5.0", + "from": "grunt-bunyan@>=0.5.0 <0.6.0", + "resolved": "https://registry.npmjs.org/grunt-bunyan/-/grunt-bunyan-0.5.0.tgz", "dependencies": { "lodash": { - "version": "https://registry.npmjs.org/lodash/-/lodash-2.4.2.tgz", - "integrity": "sha1-+t2DS5aDBz2hebPq5tnA0VBT9z4=" + "version": "2.4.2", + "from": "lodash@>=2.4.1 <2.5.0", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-2.4.2.tgz" } } }, - "grunt-concurrent": { - "version": "https://registry.npmjs.org/grunt-concurrent/-/grunt-concurrent-0.4.2.tgz", - "integrity": "sha1-Mf2Qbm4X2oTXgLLOZNn4QGX3PgY=", - "dev": true, - "requires": { - "async": "https://registry.npmjs.org/async/-/async-0.2.10.tgz", - "lpad": "https://registry.npmjs.org/lpad/-/lpad-0.1.0.tgz" - } - }, "grunt-contrib-clean": { - "version": "https://registry.npmjs.org/grunt-contrib-clean/-/grunt-contrib-clean-0.5.0.tgz", - "integrity": "sha1-9T397ghJsce0Dp67umn0jExgecU=", - "dev": true, - "requires": { - "rimraf": "https://registry.npmjs.org/rimraf/-/rimraf-2.2.8.tgz" - } + "version": "0.6.0", + "from": "grunt-contrib-clean@>=0.6.0 <0.7.0", + "resolved": "https://registry.npmjs.org/grunt-contrib-clean/-/grunt-contrib-clean-0.6.0.tgz" }, "grunt-contrib-coffee": { - "version": "https://registry.npmjs.org/grunt-contrib-coffee/-/grunt-contrib-coffee-0.7.0.tgz", - "integrity": "sha1-ixIme3TnM4sfKcW4txj7n4mYLxM=", - "dev": true, - "requires": { - "coffee-script": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.3.tgz" - }, + "version": "0.11.1", + "from": "grunt-contrib-coffee@>=0.11.0 <0.12.0", + "resolved": "https://registry.npmjs.org/grunt-contrib-coffee/-/grunt-contrib-coffee-0.11.1.tgz", "dependencies": { - "coffee-script": { - "version": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.3.tgz", - "integrity": "sha1-Y1XTLPGwTN/2tITl5xF4Ky8MOb4=", - "dev": true + "lodash": { + "version": "2.4.2", + "from": "lodash@>=2.4.1 <2.5.0", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-2.4.2.tgz" } } }, - "grunt-contrib-requirejs": { - "version": "https://registry.npmjs.org/grunt-contrib-requirejs/-/grunt-contrib-requirejs-0.4.1.tgz", - "integrity": "sha1-hiuhZxQbio82r1RE/qsycruM9L0=", - "dev": true, - "requires": { - "requirejs": "https://registry.npmjs.org/requirejs/-/requirejs-2.1.22.tgz" - } - }, - "grunt-contrib-watch": { - "version": "https://registry.npmjs.org/grunt-contrib-watch/-/grunt-contrib-watch-0.5.3.tgz", - "integrity": "sha1-fZ61Rl1Qb6FPqspH5uh5CoLBye4=", - "dev": true, - "requires": { - "gaze": "https://registry.npmjs.org/gaze/-/gaze-0.4.3.tgz", - "tiny-lr": "https://registry.npmjs.org/tiny-lr/-/tiny-lr-0.0.4.tgz" - } - }, "grunt-execute": { - "version": "https://registry.npmjs.org/grunt-execute/-/grunt-execute-0.2.2.tgz", - "integrity": "sha1-TpRf5XlZzA3neZCDtrQq7ZYWNQo=" + "version": "0.2.2", + "from": "grunt-execute@>=0.2.2 <0.3.0", + "resolved": "https://registry.npmjs.org/grunt-execute/-/grunt-execute-0.2.2.tgz" }, - "grunt-forever": { - "version": "https://registry.npmjs.org/grunt-forever/-/grunt-forever-0.4.7.tgz", - "integrity": "sha1-dHDb4a2hFFAhZKTCoAOHXfj+EzA=", - "dev": true, - "requires": { - "forever": "https://registry.npmjs.org/forever/-/forever-0.14.2.tgz" - } - }, - "grunt-mocha-test": { - "version": "https://registry.npmjs.org/grunt-mocha-test/-/grunt-mocha-test-0.8.2.tgz", - "integrity": "sha1-emGEuYhg0Phb3qrWvqob199bvus=", - "dev": true, - "requires": { - "mocha": "https://registry.npmjs.org/mocha/-/mocha-1.14.0.tgz" - }, + "grunt-legacy-log": { + "version": "0.1.3", + "from": "grunt-legacy-log@>=0.1.0 <0.2.0", + "resolved": "https://registry.npmjs.org/grunt-legacy-log/-/grunt-legacy-log-0.1.3.tgz", "dependencies": { - "glob": { - "version": "https://registry.npmjs.org/glob/-/glob-3.2.3.tgz", - "integrity": "sha1-4xPusknHr/qlxHUoaw4RW1mDlGc=", - "dev": true, - "requires": { - "graceful-fs": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-2.0.3.tgz", - "inherits": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", - "minimatch": "https://registry.npmjs.org/minimatch/-/minimatch-0.2.14.tgz" - } + "lodash": { + "version": "2.4.2", + "from": "lodash@>=2.4.1 <2.5.0", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-2.4.2.tgz" }, - "graceful-fs": { - "version": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-2.0.3.tgz", - "integrity": "sha1-fNLNsiiko/Nule+mzBQt59GhNtA=", - "dev": true - }, - "minimatch": { - "version": "https://registry.npmjs.org/minimatch/-/minimatch-0.2.14.tgz", - "integrity": "sha1-x054BXT2PG+aCQ6Q775u9TpqdWo=", - "dev": true, - "requires": { - "lru-cache": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.7.3.tgz", - "sigmund": "https://registry.npmjs.org/sigmund/-/sigmund-1.0.1.tgz" - } - }, - "mocha": { - "version": "https://registry.npmjs.org/mocha/-/mocha-1.14.0.tgz", - "integrity": "sha1-cT223FAAGRqdA1gZXQkIeQ7LYVc=", - "dev": true, - "requires": { - "commander": "http://registry.npmjs.org/commander/-/commander-2.0.0.tgz", - "debug": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "diff": "https://registry.npmjs.org/diff/-/diff-1.0.7.tgz", - "glob": "https://registry.npmjs.org/glob/-/glob-3.2.3.tgz", - "growl": "https://registry.npmjs.org/growl/-/growl-1.7.0.tgz", - "jade": "https://registry.npmjs.org/jade/-/jade-0.26.3.tgz", - "mkdirp": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.5.tgz" - } + "underscore.string": { + "version": "2.3.3", + "from": "underscore.string@>=2.3.3 <2.4.0", + "resolved": "https://registry.npmjs.org/underscore.string/-/underscore.string-2.3.3.tgz" } } }, - "grunt-nodemon": { - "version": "https://registry.npmjs.org/grunt-nodemon/-/grunt-nodemon-0.2.1.tgz", - "integrity": "sha1-G48kiVKSCX3IFNFgOpfo/sHJJPM=", - "dev": true, - "requires": { - "nodemon": "https://registry.npmjs.org/nodemon/-/nodemon-1.0.20.tgz" - } - }, - "har-schema": { - "version": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", - "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=", - "dev": true - }, - "har-validator": { - "version": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.0.tgz", - "integrity": "sha1-RGV/VoiiLP1LckhugbOj+xF0LCk=", - "dev": true, - "requires": { - "ajv": "https://registry.npmjs.org/ajv/-/ajv-5.5.2.tgz", - "har-schema": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz" - } - }, - "has-color": { - "version": "https://registry.npmjs.org/has-color/-/has-color-0.1.7.tgz", - "integrity": "sha1-ZxRKUmDDT8PMpnfQQdr1L+e3iy8=", - "dev": true - }, - "has-flag": { - "version": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=" - }, - "hawk": { - "version": "https://registry.npmjs.org/hawk/-/hawk-0.10.2.tgz", - "integrity": "sha1-mzYd7pWpMWQObVBOBWCaj8OsRdI=", - "requires": { - "boom": "https://registry.npmjs.org/boom/-/boom-0.3.8.tgz", - "cryptiles": "https://registry.npmjs.org/cryptiles/-/cryptiles-0.1.3.tgz", - "hoek": "https://registry.npmjs.org/hoek/-/hoek-0.7.6.tgz", - "sntp": "https://registry.npmjs.org/sntp/-/sntp-0.1.4.tgz" - } - }, - "he": { - "version": "https://registry.npmjs.org/he/-/he-1.1.1.tgz", - "integrity": "sha1-k0EP0hsAlzUVH4howvJx80J+I/0=" - }, - "heapdump": { - "version": "https://registry.npmjs.org/heapdump/-/heapdump-0.3.9.tgz", - "integrity": "sha1-A8dOsN9dZ74Jgug0KbqcnSs7f3g=" - }, - "hoek": { - "version": "https://registry.npmjs.org/hoek/-/hoek-0.7.6.tgz", - "integrity": "sha1-YPvZBFV1Qc0rh5Wr8wihs3cOFVo=" - }, - "hooker": { - "version": "https://registry.npmjs.org/hooker/-/hooker-0.2.3.tgz", - "integrity": "sha1-uDT3I8xKJCqmWWNFnfbZhMXT2Vk=", - "dev": true - }, - "http-errors": { - "version": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz", - "integrity": "sha1-i1VoC7S+KDoLW/TqLjhYC+HZMg0=", - "requires": { - "depd": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", - "inherits": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", - "setprototypeof": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.0.tgz", - "statuses": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz" - } - }, - "http-signature": { - "version": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", - "integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=", - "dev": true, - "requires": { - "assert-plus": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", - "jsprim": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", - "sshpk": "https://registry.npmjs.org/sshpk/-/sshpk-1.14.2.tgz" - } - }, - "i": { - "version": "https://registry.npmjs.org/i/-/i-0.3.6.tgz", - "integrity": "sha1-2WyScyB28HJxG2sQ/X1PZa2O4j0=", - "dev": true - }, - "iconv-lite": { - "version": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.23.tgz", - "integrity": "sha1-KXhx9jvlB63Pv8pxXQzQ7thOmmM=", - "requires": { - "safer-buffer": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz" - } - }, - "ieee754": { - "version": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.8.tgz", - "integrity": "sha1-vjPUCsEO8ZJnAfbwii2G+/0a0+Q=" - }, - "inflight": { - "version": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", - "requires": { - "once": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "wrappy": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" - } - }, - "inherits": { - "version": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", - "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=" - }, - "ini": { - "version": "https://registry.npmjs.org/ini/-/ini-1.3.5.tgz", - "integrity": "sha1-7uJfVtscnsYIXgwid4CD9Zar+Sc=", - "dev": true - }, - "ipaddr.js": { - "version": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.8.0.tgz", - "integrity": "sha1-6qM9bd16zo9/b+DJygRA5wZzix4=" - }, - "is-typedarray": { - "version": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", - "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=", - "dev": true - }, - "isarray": { - "version": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" - }, - "isstream": { - "version": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", - "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=", - "dev": true - }, - "jade": { - "version": "https://registry.npmjs.org/jade/-/jade-0.26.3.tgz", - "integrity": "sha1-jxDXl32NefL2/4YqgbBRPMslaGw=", - "dev": true, - "requires": { - "commander": "https://registry.npmjs.org/commander/-/commander-0.6.1.tgz", - "mkdirp": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.0.tgz" - }, + "grunt-legacy-log-utils": { + "version": "0.1.1", + "from": "grunt-legacy-log-utils@>=0.1.1 <0.2.0", + "resolved": "https://registry.npmjs.org/grunt-legacy-log-utils/-/grunt-legacy-log-utils-0.1.1.tgz", "dependencies": { - "commander": { - "version": "https://registry.npmjs.org/commander/-/commander-0.6.1.tgz", - "integrity": "sha1-+mihT2qUXVTbvlDYzbMyDp47GgY=", - "dev": true + "lodash": { + "version": "2.4.2", + "from": "lodash@>=2.4.1 <2.5.0", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-2.4.2.tgz" + }, + "underscore.string": { + "version": "2.3.3", + "from": "underscore.string@>=2.3.3 <2.4.0", + "resolved": "https://registry.npmjs.org/underscore.string/-/underscore.string-2.3.3.tgz" + } + } + }, + "grunt-legacy-util": { + "version": "0.2.0", + "from": "grunt-legacy-util@>=0.2.0 <0.3.0", + "resolved": "https://registry.npmjs.org/grunt-legacy-util/-/grunt-legacy-util-0.2.0.tgz", + "dependencies": { + "async": { + "version": "0.1.22", + "from": "async@>=0.1.22 <0.2.0", + "resolved": "https://registry.npmjs.org/async/-/async-0.1.22.tgz" + } + } + }, + "grunt-mocha-test": { + "version": "0.11.0", + "from": "grunt-mocha-test@>=0.11.0 <0.12.0", + "resolved": "https://registry.npmjs.org/grunt-mocha-test/-/grunt-mocha-test-0.11.0.tgz", + "dependencies": { + "fs-extra": { + "version": "0.9.1", + "from": "fs-extra@>=0.9.1 <0.10.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-0.9.1.tgz" + }, + "glob": { + "version": "3.2.3", + "from": "glob@3.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-3.2.3.tgz" + }, + "graceful-fs": { + "version": "2.0.3", + "from": "graceful-fs@>=2.0.0 <2.1.0", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-2.0.3.tgz" + }, + "jsonfile": { + "version": "1.1.1", + "from": "jsonfile@>=1.1.0 <1.2.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-1.1.1.tgz" + }, + "minimatch": { + "version": "0.2.14", + "from": "minimatch@>=0.2.11 <0.3.0", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-0.2.14.tgz" }, "mkdirp": { - "version": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.0.tgz", - "integrity": "sha1-G79asbqCevI1dRQ0kEJkVfSB/h4=", - "dev": true + "version": "0.5.1", + "from": "mkdirp@>=0.5.0 <0.6.0", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz" + }, + "mocha": { + "version": "1.20.1", + "from": "mocha@>=1.20.0 <1.21.0", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-1.20.1.tgz", + "dependencies": { + "mkdirp": { + "version": "0.3.5", + "from": "mkdirp@0.3.5", + "resolved": "http://registry.npmjs.org/mkdirp/-/mkdirp-0.3.5.tgz" + } + } + }, + "ncp": { + "version": "0.5.1", + "from": "ncp@>=0.5.1 <0.6.0", + "resolved": "https://registry.npmjs.org/ncp/-/ncp-0.5.1.tgz" + } + } + }, + "gtoken": { + "version": "2.3.0", + "from": "gtoken@>=2.3.0 <3.0.0", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-2.3.0.tgz", + "dependencies": { + "mime": { + "version": "2.4.0", + "from": "mime@>=2.2.0 <3.0.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.0.tgz" + }, + "pify": { + "version": "3.0.0", + "from": "pify@>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz" + } + } + }, + "has": { + "version": "1.0.3", + "from": "has@>=1.0.1 <2.0.0", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz" + }, + "has-ansi": { + "version": "0.1.0", + "from": "has-ansi@>=0.1.0 <0.2.0", + "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-0.1.0.tgz" + }, + "has-flag": { + "version": "3.0.0", + "from": "has-flag@>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz" + }, + "has-symbols": { + "version": "1.0.0", + "from": "has-symbols@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.0.tgz" + }, + "hawk": { + "version": "0.10.2", + "from": "hawk@>=0.10.2 <0.11.0", + "resolved": "https://registry.npmjs.org/hawk/-/hawk-0.10.2.tgz" + }, + "he": { + "version": "1.1.1", + "from": "he@1.1.1", + "resolved": "https://registry.npmjs.org/he/-/he-1.1.1.tgz" + }, + "heapdump": { + "version": "0.3.12", + "from": "heapdump@>=0.3.2 <0.4.0", + "resolved": "https://registry.npmjs.org/heapdump/-/heapdump-0.3.12.tgz" + }, + "hex2dec": { + "version": "1.1.0", + "from": "hex2dec@>=1.0.1 <2.0.0", + "resolved": "https://registry.npmjs.org/hex2dec/-/hex2dec-1.1.0.tgz" + }, + "hoek": { + "version": "0.7.6", + "from": "hoek@>=0.7.0 <0.8.0", + "resolved": "https://registry.npmjs.org/hoek/-/hoek-0.7.6.tgz" + }, + "hooker": { + "version": "0.2.3", + "from": "hooker@>=0.2.3 <0.3.0", + "resolved": "https://registry.npmjs.org/hooker/-/hooker-0.2.3.tgz" + }, + "http-errors": { + "version": "1.6.3", + "from": "http-errors@>=1.6.3 <1.7.0", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz" + }, + "https-proxy-agent": { + "version": "2.2.1", + "from": "https-proxy-agent@>=2.2.1 <3.0.0", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-2.2.1.tgz", + "dependencies": { + "debug": { + "version": "3.2.6", + "from": "debug@>=3.1.0 <4.0.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz" + }, + "ms": { + "version": "2.1.1", + "from": "ms@>=2.1.1 <3.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz" + } + } + }, + "iconv-lite": { + "version": "0.4.23", + "from": "iconv-lite@0.4.23", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.23.tgz" + }, + "ieee754": { + "version": "1.1.8", + "from": "ieee754@1.1.8", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.8.tgz" + }, + "inflight": { + "version": "1.0.6", + "from": "inflight@>=1.0.4 <2.0.0", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz" + }, + "inherits": { + "version": "2.0.3", + "from": "inherits@2.0.3", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" + }, + "ipaddr.js": { + "version": "1.8.0", + "from": "ipaddr.js@1.8.0", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.8.0.tgz" + }, + "is": { + "version": "3.2.1", + "from": "is@>=3.2.1 <4.0.0", + "resolved": "https://registry.npmjs.org/is/-/is-3.2.1.tgz" + }, + "is-buffer": { + "version": "1.1.6", + "from": "is-buffer@>=1.1.5 <2.0.0", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz" + }, + "is-callable": { + "version": "1.1.4", + "from": "is-callable@>=1.1.3 <2.0.0", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.4.tgz" + }, + "is-date-object": { + "version": "1.0.1", + "from": "is-date-object@>=1.0.1 <2.0.0", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.1.tgz" + }, + "is-regex": { + "version": "1.0.4", + "from": "is-regex@>=1.0.4 <2.0.0", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.4.tgz" + }, + "is-symbol": { + "version": "1.0.2", + "from": "is-symbol@>=1.0.2 <2.0.0", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.2.tgz" + }, + "isarray": { + "version": "1.0.0", + "from": "isarray@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz" + }, + "jade": { + "version": "0.26.3", + "from": "jade@0.26.3", + "resolved": "https://registry.npmjs.org/jade/-/jade-0.26.3.tgz", + "dependencies": { + "commander": { + "version": "0.6.1", + "from": "commander@0.6.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-0.6.1.tgz" + }, + "mkdirp": { + "version": "0.3.0", + "from": "mkdirp@0.3.0", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.0.tgz" } } }, "jmespath": { - "version": "https://registry.npmjs.org/jmespath/-/jmespath-0.15.0.tgz", - "integrity": "sha1-o/Iiqarp+Wb10nx5ZRDigJF2Qhc=" + "version": "0.15.0", + "from": "jmespath@0.15.0", + "resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.15.0.tgz" }, "js-yaml": { - "version": "https://registry.npmjs.org/js-yaml/-/js-yaml-2.0.5.tgz", - "integrity": "sha1-olrmUJmZ6X3yeMZxnaEb0Gh3Q6g=", - "dev": true, - "requires": { - "argparse": "https://registry.npmjs.org/argparse/-/argparse-0.1.16.tgz", - "esprima": "https://registry.npmjs.org/esprima/-/esprima-1.0.4.tgz" - } + "version": "2.0.5", + "from": "js-yaml@>=2.0.5 <2.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-2.0.5.tgz" }, - "jsbn": { - "version": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", - "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=", - "dev": true, - "optional": true - }, - "json-schema": { - "version": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", - "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=", - "dev": true - }, - "json-schema-traverse": { - "version": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz", - "integrity": "sha1-NJptRMU6Ud6JtAgFxdXlm0F9M0A=", - "dev": true + "json-bigint": { + "version": "0.3.0", + "from": "json-bigint@>=0.3.0 <0.4.0", + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-0.3.0.tgz" }, "json-stringify-safe": { - "version": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", - "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=" + "version": "5.0.1", + "from": "json-stringify-safe@5.0.1", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz" }, "jsonfile": { - "version": "https://registry.npmjs.org/jsonfile/-/jsonfile-2.4.0.tgz", - "integrity": "sha1-NzaitCi4e72gzIO1P6PWM6NcKug=", - "requires": { - "graceful-fs": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.11.tgz" - } - }, - "jsprim": { - "version": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", - "integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=", - "dev": true, - "requires": { - "assert-plus": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", - "extsprintf": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", - "json-schema": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", - "verror": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz" - } + "version": "2.4.0", + "from": "jsonfile@>=2.1.0 <3.0.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-2.4.0.tgz" }, "just-extend": { - "version": "https://registry.npmjs.org/just-extend/-/just-extend-3.0.0.tgz", - "integrity": "sha1-zuAEAx6qv2QG2gOnuE5P6deO8og=" + "version": "3.0.0", + "from": "just-extend@>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/just-extend/-/just-extend-3.0.0.tgz" + }, + "jwa": { + "version": "1.1.6", + "from": "jwa@>=1.1.5 <2.0.0", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.1.6.tgz" + }, + "jws": { + "version": "3.1.5", + "from": "jws@>=3.1.5 <4.0.0", + "resolved": "https://registry.npmjs.org/jws/-/jws-3.1.5.tgz" }, "klaw": { - "version": "https://registry.npmjs.org/klaw/-/klaw-1.3.1.tgz", - "integrity": "sha1-QIhDO0azsbolnXh4XY6W9zugJDk=", - "requires": { - "graceful-fs": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.11.tgz" - } + "version": "1.3.1", + "from": "klaw@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/klaw/-/klaw-1.3.1.tgz" }, "knox": { - "version": "https://registry.npmjs.org/knox/-/knox-0.9.2.tgz", - "integrity": "sha1-NzZZNmniTwJP2vcjtqHcSv2DmnE=", - "requires": { - "debug": "https://registry.npmjs.org/debug/-/debug-1.0.5.tgz", - "mime": "https://registry.npmjs.org/mime/-/mime-1.4.1.tgz", - "once": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "stream-counter": "https://registry.npmjs.org/stream-counter/-/stream-counter-1.0.0.tgz", - "xml2js": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.19.tgz" - }, + "version": "0.9.2", + "from": "knox@>=0.9.1 <0.10.0", + "resolved": "https://registry.npmjs.org/knox/-/knox-0.9.2.tgz", "dependencies": { "debug": { - "version": "https://registry.npmjs.org/debug/-/debug-1.0.5.tgz", - "integrity": "sha1-9yQSF0MPmd7EwrRz6rkiKOh0wqw=", - "requires": { - "ms": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz" - } + "version": "1.0.5", + "from": "debug@>=1.0.2 <2.0.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-1.0.5.tgz" } } }, - "lazy": { - "version": "https://registry.npmjs.org/lazy/-/lazy-1.0.11.tgz", - "integrity": "sha1-2qBoIGKCVCwIgojpdcKXwa53tpA=", - "dev": true - }, "lodash": { - "version": "http://registry.npmjs.org/lodash/-/lodash-0.9.2.tgz", - "integrity": "sha1-jzSZxSRdNG1oLlsNO0B2fgnxqSw=", - "dev": true + "version": "0.9.2", + "from": "lodash@>=0.9.2 <0.10.0", + "resolved": "http://registry.npmjs.org/lodash/-/lodash-0.9.2.tgz" }, "lodash.get": { - "version": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz", - "integrity": "sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk=" + "version": "4.4.2", + "from": "lodash.get@>=4.4.2 <5.0.0", + "resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz" + }, + "lodash.isstring": { + "version": "4.0.1", + "from": "lodash.isstring@>=4.0.1 <5.0.0", + "resolved": "https://registry.npmjs.org/lodash.isstring/-/lodash.isstring-4.0.1.tgz" }, "logger-sharelatex": { - "version": "git+https://github.com/sharelatex/logger-sharelatex.git#13562f8866708fc86aef8202bf5a2ce4d1c6eed7", - "integrity": "sha1-UyQrP2SrbwokPapKc8xi//DC7Xg=", - "requires": { - "bunyan": "https://registry.npmjs.org/bunyan/-/bunyan-1.5.1.tgz", - "chai": "https://registry.npmjs.org/chai/-/chai-4.1.2.tgz", - "coffee-script": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.12.4.tgz", - "grunt-bunyan": "https://registry.npmjs.org/grunt-bunyan/-/grunt-bunyan-0.5.0.tgz", - "grunt-execute": "https://registry.npmjs.org/grunt-execute/-/grunt-execute-0.2.2.tgz", - "raven": "https://registry.npmjs.org/raven/-/raven-1.2.1.tgz", - "sandboxed-module": "https://registry.npmjs.org/sandboxed-module/-/sandboxed-module-2.0.3.tgz", - "sinon": "https://registry.npmjs.org/sinon/-/sinon-6.3.3.tgz", - "timekeeper": "https://registry.npmjs.org/timekeeper/-/timekeeper-1.0.0.tgz" - }, + "version": "1.5.7", + "from": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.7", + "resolved": "git+https://github.com/sharelatex/logger-sharelatex.git#13562f8866708fc86aef8202bf5a2ce4d1c6eed7", "dependencies": { "coffee-script": { - "version": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.12.4.tgz", - "integrity": "sha1-/hvO2X/h+zknuZjytFYW4GWL4f8=" + "version": "1.12.4", + "from": "coffee-script@1.12.4", + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.12.4.tgz" } } }, "lolex": { - "version": "https://registry.npmjs.org/lolex/-/lolex-2.7.4.tgz", - "integrity": "sha1-ZRTeLDKR6dbwnUndzkqV99TVqT8=" - }, - "lpad": { - "version": "https://registry.npmjs.org/lpad/-/lpad-0.1.0.tgz", - "integrity": "sha1-5MYMKROTIcWXDeSTtJauDXdM0qc=", - "dev": true + "version": "3.0.0", + "from": "lolex@>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/lolex/-/lolex-3.0.0.tgz" }, "lru-cache": { - "version": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.7.3.tgz", - "integrity": "sha1-bUUk6LlV+V1PW1iFHOId1y+06VI=", - "dev": true + "version": "2.7.3", + "from": "lru-cache@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.7.3.tgz" }, "lsmod": { - "version": "https://registry.npmjs.org/lsmod/-/lsmod-1.0.0.tgz", - "integrity": "sha1-mgD3bco26yP6BTUK/htYXUKZ5ks=" + "version": "1.0.0", + "from": "lsmod@1.0.0", + "resolved": "https://registry.npmjs.org/lsmod/-/lsmod-1.0.0.tgz" }, "lynx": { - "version": "https://registry.npmjs.org/lynx/-/lynx-0.1.1.tgz", - "integrity": "sha1-Mxjc7xaQi4KG6Bisz9sxzXQkj50=", - "requires": { - "mersenne": "https://registry.npmjs.org/mersenne/-/mersenne-0.0.4.tgz", - "statsd-parser": "https://registry.npmjs.org/statsd-parser/-/statsd-parser-0.0.4.tgz" - } + "version": "0.1.1", + "from": "lynx@>=0.1.1 <0.2.0", + "resolved": "https://registry.npmjs.org/lynx/-/lynx-0.1.1.tgz" }, "media-typer": { - "version": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", - "integrity": "sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=" + "version": "0.3.0", + "from": "media-typer@0.3.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz" }, "merge-descriptors": { - "version": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", - "integrity": "sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E=" + "version": "1.0.1", + "from": "merge-descriptors@1.0.1", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz" }, "mersenne": { - "version": "https://registry.npmjs.org/mersenne/-/mersenne-0.0.4.tgz", - "integrity": "sha1-QB/ex+whzbngPNPTAhOY2iGycIU=" + "version": "0.0.4", + "from": "mersenne@>=0.0.3 <0.1.0", + "resolved": "https://registry.npmjs.org/mersenne/-/mersenne-0.0.4.tgz" }, "methods": { - "version": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", - "integrity": "sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=" + "version": "1.1.2", + "from": "methods@>=1.1.2 <1.2.0", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz" }, "metrics-sharelatex": { "version": "2.0.3", "from": "git+https://github.com/sharelatex/metrics-sharelatex.git#v2.0.3", "resolved": "git+https://github.com/sharelatex/metrics-sharelatex.git#02522a50b7a5ec46eed9b0a93513ef992a21eb45", - "requires": { - "coffee-script": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz", - "lynx": "https://registry.npmjs.org/lynx/-/lynx-0.1.1.tgz", - "underscore": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz" - }, "dependencies": { "coffee-script": { - "version": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz", - "integrity": "sha1-gIs5bhEPU9AhoZpO8fZb4OjjX6M=" + "version": "1.6.0", + "from": "coffee-script@1.6.0", + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz" }, "underscore": { - "version": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz", - "integrity": "sha1-izixDKze9jM3uLJOT/htRa6lKag=" + "version": "1.6.0", + "from": "underscore@>=1.6.0 <1.7.0", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz" } } }, "mime": { - "version": "https://registry.npmjs.org/mime/-/mime-1.4.1.tgz", - "integrity": "sha1-Eh+evEnjdm8xGnbh+hyAA8SwOqY=" + "version": "1.4.1", + "from": "mime@1.4.1", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.4.1.tgz" }, "mime-db": { - "version": "https://registry.npmjs.org/mime-db/-/mime-db-1.36.0.tgz", - "integrity": "sha1-UCBHjbPH/pOq17vMTc+GnEM2M5c=" + "version": "1.37.0", + "from": "mime-db@>=1.37.0 <1.38.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.37.0.tgz" }, "mime-types": { - "version": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.20.tgz", - "integrity": "sha1-kwy3GdVx6QNzhSD4RwkRVIyizBk=", - "requires": { - "mime-db": "https://registry.npmjs.org/mime-db/-/mime-db-1.36.0.tgz" - } + "version": "2.1.21", + "from": "mime-types@>=2.1.18 <2.2.0", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.21.tgz" }, "minimatch": { - "version": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "integrity": "sha1-UWbihkV/AzBgZL5Ul+jbsMPTIIM=", - "requires": { - "brace-expansion": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz" - } + "version": "3.0.4", + "from": "minimatch@>=2.0.0 <3.0.0||>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz" }, "minimist": { - "version": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", - "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=" + "version": "0.0.8", + "from": "minimist@0.0.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz" }, "mkdirp": { - "version": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.5.tgz", - "integrity": "sha1-3j5fiWHIjHh+4TaN+EmsRBPsqNc=" + "version": "0.3.5", + "from": "mkdirp@>=0.3.5 <0.4.0", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.5.tgz" }, "mocha": { - "version": "https://registry.npmjs.org/mocha/-/mocha-5.2.0.tgz", - "integrity": "sha1-bYrlCPWRZ/lA8rWzxKYSrlDJCuY=", - "requires": { - "browser-stdout": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", - "commander": "https://registry.npmjs.org/commander/-/commander-2.15.1.tgz", - "debug": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz", - "diff": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz", - "escape-string-regexp": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "glob": "https://registry.npmjs.org/glob/-/glob-7.1.2.tgz", - "growl": "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz", - "he": "https://registry.npmjs.org/he/-/he-1.1.1.tgz", - "minimatch": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "mkdirp": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", - "supports-color": "https://registry.npmjs.org/supports-color/-/supports-color-5.4.0.tgz" - }, + "version": "5.2.0", + "from": "mocha@5.2.0", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-5.2.0.tgz", "dependencies": { "commander": { - "version": "https://registry.npmjs.org/commander/-/commander-2.15.1.tgz", - "integrity": "sha1-30boZ9D8Kuxmo0ZitAapzK//Ww8=" + "version": "2.15.1", + "from": "commander@2.15.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.15.1.tgz" }, "debug": { - "version": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz", - "integrity": "sha1-W7WgZyYotkFJVmuhaBnmFRjGcmE=", - "requires": { - "ms": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz" - } + "version": "3.1.0", + "from": "debug@3.1.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz" }, "diff": { - "version": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz", - "integrity": "sha1-gAwN0eCov7yVg1wgKtIg/jF+WhI=" + "version": "3.5.0", + "from": "diff@3.5.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz" }, "glob": { - "version": "https://registry.npmjs.org/glob/-/glob-7.1.2.tgz", - "integrity": "sha1-wZyd+aAocC1nhhI4SmVSQExjbRU=", - "requires": { - "fs.realpath": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "inflight": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "inherits": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", - "minimatch": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "once": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "path-is-absolute": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz" - } + "version": "7.1.2", + "from": "glob@7.1.2", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.2.tgz" }, "growl": { - "version": "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz", - "integrity": "sha1-8nNdwig2dPpnR4sQGBBZNVw2nl4=" + "version": "1.10.5", + "from": "growl@1.10.5", + "resolved": "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz" }, "mkdirp": { - "version": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", - "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", - "requires": { - "minimist": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz" - } + "version": "0.5.1", + "from": "mkdirp@0.5.1", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz" }, "supports-color": { - "version": "https://registry.npmjs.org/supports-color/-/supports-color-5.4.0.tgz", - "integrity": "sha1-HGszdALCE3YF7+GfEP7DkPb6q1Q=", - "requires": { - "has-flag": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz" - } + "version": "5.4.0", + "from": "supports-color@5.4.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.4.0.tgz" } } }, - "ms": { - "version": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + "module-details-from-path": { + "version": "1.0.3", + "from": "module-details-from-path@>=1.0.3 <2.0.0", + "resolved": "https://registry.npmjs.org/module-details-from-path/-/module-details-from-path-1.0.3.tgz" }, - "mute-stream": { - "version": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz", - "integrity": "sha1-MHXOk7whuPq0PhvE2n6BFe0ee6s=", - "dev": true + "ms": { + "version": "2.0.0", + "from": "ms@2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz" }, "mv": { - "version": "https://registry.npmjs.org/mv/-/mv-2.1.1.tgz", - "integrity": "sha1-rmzg1vbV4KT32JN5jQPB6pVZtqI=", + "version": "2.1.1", + "from": "mv@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/mv/-/mv-2.1.1.tgz", "optional": true, - "requires": { - "mkdirp": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", - "ncp": "https://registry.npmjs.org/ncp/-/ncp-2.0.0.tgz", - "rimraf": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.5.tgz" - }, "dependencies": { "mkdirp": { - "version": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", - "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", - "optional": true, - "requires": { - "minimist": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz" - } + "version": "0.5.1", + "from": "mkdirp@>=0.5.1 <0.6.0", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", + "optional": true }, "rimraf": { - "version": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.5.tgz", - "integrity": "sha1-7nEM5dk6j9uFb7Xqj/Di11k0sto=", - "optional": true, - "requires": { - "glob": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz" - } + "version": "2.4.5", + "from": "rimraf@>=2.4.0 <2.5.0", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.5.tgz", + "optional": true } } }, "nan": { - "version": "https://registry.npmjs.org/nan/-/nan-2.11.0.tgz", - "integrity": "sha1-V042Dk2VSrFpZuwQLAwEn9lhoJk=", - "optional": true - }, - "natives": { - "version": "https://registry.npmjs.org/natives/-/natives-1.1.5.tgz", - "integrity": "sha1-O9vbQQQCPl3SObVvx+89mhesxqo=", - "dev": true - }, - "nconf": { - "version": "https://registry.npmjs.org/nconf/-/nconf-0.6.9.tgz", - "integrity": "sha1-lXDvFe1vmuays8jV5xtm0xk81mE=", - "dev": true, - "requires": { - "async": "https://registry.npmjs.org/async/-/async-0.2.9.tgz", - "ini": "https://registry.npmjs.org/ini/-/ini-1.3.5.tgz", - "optimist": "https://registry.npmjs.org/optimist/-/optimist-0.6.0.tgz" - }, - "dependencies": { - "async": { - "version": "https://registry.npmjs.org/async/-/async-0.2.9.tgz", - "integrity": "sha1-32MGD789Myhqdqr21Vophtn/hhk=", - "dev": true - }, - "optimist": { - "version": "https://registry.npmjs.org/optimist/-/optimist-0.6.0.tgz", - "integrity": "sha1-aUJIJvNAX3nxQub8PZrljU27kgA=", - "dev": true, - "requires": { - "minimist": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", - "wordwrap": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz" - } - } - } + "version": "2.11.1", + "from": "nan@>=2.11.1 <3.0.0", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.11.1.tgz" }, "ncp": { - "version": "https://registry.npmjs.org/ncp/-/ncp-2.0.0.tgz", - "integrity": "sha1-GVoh1sRuNh0vsSgbo4uR6d9727M=", + "version": "2.0.0", + "from": "ncp@>=2.0.0 <2.1.0", + "resolved": "https://registry.npmjs.org/ncp/-/ncp-2.0.0.tgz", "optional": true }, "negotiator": { - "version": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.1.tgz", - "integrity": "sha1-KzJxhOiZIQEXeyhWP7XnECrNDKk=" + "version": "0.6.1", + "from": "negotiator@0.6.1", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.1.tgz" }, "nise": { - "version": "https://registry.npmjs.org/nise/-/nise-1.4.5.tgz", - "integrity": "sha1-l5qXoZxI1ie7U3A3Jq6NU86NSz4=", - "requires": { - "@sinonjs/formatio": "https://registry.npmjs.org/@sinonjs/formatio/-/formatio-3.0.0.tgz", - "just-extend": "https://registry.npmjs.org/just-extend/-/just-extend-3.0.0.tgz", - "lolex": "https://registry.npmjs.org/lolex/-/lolex-2.7.4.tgz", - "path-to-regexp": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.7.0.tgz", - "text-encoding": "https://registry.npmjs.org/text-encoding/-/text-encoding-0.6.4.tgz" - }, + "version": "1.4.6", + "from": "nise@>=1.4.6 <2.0.0", + "resolved": "https://registry.npmjs.org/nise/-/nise-1.4.6.tgz", "dependencies": { "isarray": { - "version": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", - "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" + "version": "0.0.1", + "from": "isarray@0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + }, + "lolex": { + "version": "2.7.5", + "from": "lolex@>=2.3.2 <3.0.0", + "resolved": "https://registry.npmjs.org/lolex/-/lolex-2.7.5.tgz" }, "path-to-regexp": { - "version": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.7.0.tgz", - "integrity": "sha1-Wf3g9DW62suhA6hOnTvGTpa5k30=", - "requires": { - "isarray": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" - } + "version": "1.7.0", + "from": "path-to-regexp@>=1.7.0 <2.0.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.7.0.tgz" } } }, + "node-fetch": { + "version": "2.3.0", + "from": "node-fetch@>=2.2.0 <3.0.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.3.0.tgz" + }, + "node-forge": { + "version": "0.7.6", + "from": "node-forge@>=0.7.4 <0.8.0", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.7.6.tgz" + }, "node-transloadit": { - "version": "https://registry.npmjs.org/node-transloadit/-/node-transloadit-0.0.4.tgz", - "integrity": "sha1-4ZoHheON94NblO2AANHjXmg7zsE=", - "requires": { - "request": "https://registry.npmjs.org/request/-/request-2.16.6.tgz", - "underscore": "https://registry.npmjs.org/underscore/-/underscore-1.2.1.tgz" - }, + "version": "0.0.4", + "from": "node-transloadit@0.0.4", + "resolved": "https://registry.npmjs.org/node-transloadit/-/node-transloadit-0.0.4.tgz", "dependencies": { "json-stringify-safe": { - "version": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-3.0.0.tgz", - "integrity": "sha1-nbew5TDH8onF6MhDKvGRwv91pbM=" + "version": "3.0.0", + "from": "json-stringify-safe@>=3.0.0 <3.1.0", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-3.0.0.tgz" }, "mime": { - "version": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz", - "integrity": "sha1-WCA+7Ybjpe8XrtK32evUfwpg3RA=" + "version": "1.2.11", + "from": "mime@>=1.2.7 <1.3.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz" }, "qs": { - "version": "https://registry.npmjs.org/qs/-/qs-0.5.6.tgz", - "integrity": "sha1-MbGtBYVnZRxSaSFQa5qHk5EaA4Q=" + "version": "0.5.6", + "from": "qs@>=0.5.4 <0.6.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-0.5.6.tgz" }, "request": { - "version": "https://registry.npmjs.org/request/-/request-2.16.6.tgz", - "integrity": "sha1-hy/kRa5y3iZrN4edatfclI+gHK0=", - "requires": { - "aws-sign": "https://registry.npmjs.org/aws-sign/-/aws-sign-0.2.1.tgz", - "cookie-jar": "https://registry.npmjs.org/cookie-jar/-/cookie-jar-0.2.0.tgz", - "forever-agent": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.2.0.tgz", - "form-data": "https://registry.npmjs.org/form-data/-/form-data-0.0.10.tgz", - "hawk": "https://registry.npmjs.org/hawk/-/hawk-0.10.2.tgz", - "json-stringify-safe": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-3.0.0.tgz", - "mime": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz", - "node-uuid": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.8.tgz", - "oauth-sign": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.2.0.tgz", - "qs": "https://registry.npmjs.org/qs/-/qs-0.5.6.tgz", - "tunnel-agent": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.2.0.tgz" - } + "version": "2.16.6", + "from": "request@>=2.16.6 <2.17.0", + "resolved": "https://registry.npmjs.org/request/-/request-2.16.6.tgz" }, "underscore": { - "version": "https://registry.npmjs.org/underscore/-/underscore-1.2.1.tgz", - "integrity": "sha1-/FxrB2VnPZKi1KyLTcCqiHAuK9Q=" + "version": "1.2.1", + "from": "underscore@1.2.1", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.2.1.tgz" } } }, "node-uuid": { - "version": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.8.tgz", - "integrity": "sha1-sEDrCSOWivq/jTL7HxfxFn/auQc=" - }, - "nodemon": { - "version": "https://registry.npmjs.org/nodemon/-/nodemon-1.0.20.tgz", - "integrity": "sha1-vBOKNwaMt426UIhbYkl6/f7u3aQ=", - "dev": true, - "requires": { - "minimatch": "https://registry.npmjs.org/minimatch/-/minimatch-0.2.14.tgz", - "ps-tree": "https://registry.npmjs.org/ps-tree/-/ps-tree-0.0.3.tgz", - "update-notifier": "https://registry.npmjs.org/update-notifier/-/update-notifier-0.1.10.tgz" - }, - "dependencies": { - "minimatch": { - "version": "https://registry.npmjs.org/minimatch/-/minimatch-0.2.14.tgz", - "integrity": "sha1-x054BXT2PG+aCQ6Q775u9TpqdWo=", - "dev": true, - "requires": { - "lru-cache": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.7.3.tgz", - "sigmund": "https://registry.npmjs.org/sigmund/-/sigmund-1.0.1.tgz" - } - } - } + "version": "1.4.8", + "from": "node-uuid@>=1.4.1 <1.5.0", + "resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.8.tgz" }, "nopt": { - "version": "https://registry.npmjs.org/nopt/-/nopt-1.0.10.tgz", - "integrity": "sha1-bd0hvSoxQXuScn3Vhfim83YI6+4=", - "dev": true, - "requires": { - "abbrev": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz" - } - }, - "noptify": { - "version": "https://registry.npmjs.org/noptify/-/noptify-0.0.3.tgz", - "integrity": "sha1-WPZUpz2XU98MUdlobckhBKZ/S7s=", - "dev": true, - "requires": { - "nopt": "https://registry.npmjs.org/nopt/-/nopt-2.0.0.tgz" - }, - "dependencies": { - "nopt": { - "version": "https://registry.npmjs.org/nopt/-/nopt-2.0.0.tgz", - "integrity": "sha1-ynQW8gpeP5w7hhgPlilfo9C1Lg0=", - "dev": true, - "requires": { - "abbrev": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz" - } - } - } - }, - "nssocket": { - "version": "https://registry.npmjs.org/nssocket/-/nssocket-0.5.3.tgz", - "integrity": "sha1-iDyi7GBfXtZKTVGQsmJUAZKPj40=", - "dev": true, - "requires": { - "eventemitter2": "http://registry.npmjs.org/eventemitter2/-/eventemitter2-0.4.14.tgz", - "lazy": "https://registry.npmjs.org/lazy/-/lazy-1.0.11.tgz" - } + "version": "1.0.10", + "from": "nopt@>=1.0.10 <1.1.0", + "resolved": "https://registry.npmjs.org/nopt/-/nopt-1.0.10.tgz" }, "oauth-sign": { - "version": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.2.0.tgz", - "integrity": "sha1-oOahcV2u0GLzIrYit/5a/RA1tuI=" + "version": "0.2.0", + "from": "oauth-sign@>=0.2.0 <0.3.0", + "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.2.0.tgz" }, - "object-assign": { - "version": "https://registry.npmjs.org/object-assign/-/object-assign-2.1.1.tgz", - "integrity": "sha1-Q8NuXVaf+OSBbE76i+AtJpZ8GKo=", - "dev": true + "object-keys": { + "version": "1.0.12", + "from": "object-keys@>=1.0.12 <2.0.0", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.0.12.tgz" + }, + "object.getownpropertydescriptors": { + "version": "2.0.3", + "from": "object.getownpropertydescriptors@>=2.0.3 <3.0.0", + "resolved": "https://registry.npmjs.org/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.0.3.tgz" }, "on-finished": { - "version": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", - "integrity": "sha1-IPEzZIGwg811M3mSoWlxqi2QaUc=", - "requires": { - "ee-first": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz" - } + "version": "2.3.0", + "from": "on-finished@>=2.3.0 <2.4.0", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz" }, "once": { - "version": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", - "requires": { - "wrappy": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" - } + "version": "1.4.0", + "from": "once@>=1.3.0 <2.0.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz" }, - "optimist": { - "version": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz", - "integrity": "sha1-2j6nRob6IaGaERwybpDrFaAZZoY=", - "dev": true, - "requires": { - "minimist": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", - "wordwrap": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz" - } + "p-limit": { + "version": "2.0.0", + "from": "p-limit@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.0.0.tgz" }, - "os-homedir": { - "version": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz", - "integrity": "sha1-/7xJiDNuDoM94MFox+8VISGqf7M=", - "dev": true - }, - "os-tmpdir": { - "version": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", - "integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=", - "dev": true - }, - "osenv": { - "version": "https://registry.npmjs.org/osenv/-/osenv-0.1.5.tgz", - "integrity": "sha1-hc36+uso6Gd/QW4odZK18/SepBA=", - "dev": true, - "requires": { - "os-homedir": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz", - "os-tmpdir": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz" - } + "p-try": { + "version": "2.0.0", + "from": "p-try@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.0.0.tgz" }, "parseurl": { - "version": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.2.tgz", - "integrity": "sha1-/CidTtiZMRlGDBViUyYs3I3mW/M=" + "version": "1.3.2", + "from": "parseurl@>=1.3.2 <1.4.0", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.2.tgz" }, "path-is-absolute": { - "version": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=" + "version": "1.0.1", + "from": "path-is-absolute@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz" + }, + "path-parse": { + "version": "1.0.6", + "from": "path-parse@>=1.0.5 <2.0.0", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz" }, "path-to-regexp": { - "version": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", - "integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=" + "version": "0.1.7", + "from": "path-to-regexp@0.1.7", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz" }, "pathval": { - "version": "https://registry.npmjs.org/pathval/-/pathval-1.1.0.tgz", - "integrity": "sha1-uULm1L3mUwBe9rcTYd74cn0GReA=" + "version": "1.1.0", + "from": "pathval@>=1.1.0 <2.0.0", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.0.tgz" }, - "performance-now": { - "version": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", - "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=", - "dev": true - }, - "pkginfo": { - "version": "https://registry.npmjs.org/pkginfo/-/pkginfo-0.3.1.tgz", - "integrity": "sha1-Wyn2qB9wcXFC4J52W76rl7T4HiE=", - "dev": true + "pify": { + "version": "4.0.1", + "from": "pify@>=4.0.0 <5.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz" }, "pngcrush": { - "version": "https://registry.npmjs.org/pngcrush/-/pngcrush-0.0.3.tgz", - "integrity": "sha1-v2dW6s2h+rNJwHdo6AXMEA0o+Tc=", - "requires": { - "gettemporaryfilepath": "https://registry.npmjs.org/gettemporaryfilepath/-/gettemporaryfilepath-0.0.1.tgz" - } + "version": "0.0.3", + "from": "pngcrush@0.0.3", + "resolved": "https://registry.npmjs.org/pngcrush/-/pngcrush-0.0.3.tgz" }, "process-nextick-args": { - "version": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.0.tgz", - "integrity": "sha1-o31zL0JxtKsa0HDTVQjoKQeI/6o=" + "version": "2.0.0", + "from": "process-nextick-args@>=2.0.0 <2.1.0", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.0.tgz" }, - "prompt": { - "version": "https://registry.npmjs.org/prompt/-/prompt-0.2.14.tgz", - "integrity": "sha1-V3VPZPVD/XsIRXB8gY7OYY8F/9w=", - "dev": true, - "requires": { - "pkginfo": "https://registry.npmjs.org/pkginfo/-/pkginfo-0.3.1.tgz", - "read": "https://registry.npmjs.org/read/-/read-1.0.7.tgz", - "revalidator": "https://registry.npmjs.org/revalidator/-/revalidator-0.1.8.tgz", - "utile": "https://registry.npmjs.org/utile/-/utile-0.2.1.tgz", - "winston": "https://registry.npmjs.org/winston/-/winston-0.8.3.tgz" - } + "prom-client": { + "version": "11.2.0", + "from": "prom-client@>=11.1.3 <12.0.0", + "resolved": "https://registry.npmjs.org/prom-client/-/prom-client-11.2.0.tgz" }, "proxy-addr": { - "version": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.4.tgz", - "integrity": "sha1-7PxzO/Iv+Mb0B/onUye5q2fki5M=", - "requires": { - "forwarded": "https://registry.npmjs.org/forwarded/-/forwarded-0.1.2.tgz", - "ipaddr.js": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.8.0.tgz" - } + "version": "2.0.4", + "from": "proxy-addr@>=2.0.4 <2.1.0", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.4.tgz" }, - "ps-tree": { - "version": "https://registry.npmjs.org/ps-tree/-/ps-tree-0.0.3.tgz", - "integrity": "sha1-2/jXUqf+Ivp9WGNWiUmWEOknbdw=", - "dev": true, - "requires": { - "event-stream": "https://registry.npmjs.org/event-stream/-/event-stream-0.5.3.tgz" - } - }, - "psl": { - "version": "https://registry.npmjs.org/psl/-/psl-1.1.29.tgz", - "integrity": "sha1-YPWA02AXC7cip5fMcEQR5tqFDGc=", - "dev": true + "pseudomap": { + "version": "1.0.2", + "from": "pseudomap@>=1.0.2 <2.0.0", + "resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz" }, "punycode": { - "version": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", - "integrity": "sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0=" + "version": "1.3.2", + "from": "punycode@1.3.2", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz" }, "qs": { - "version": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", - "integrity": "sha1-yzroBuh0BERYTvFUzo7pjUA/PjY=" + "version": "6.5.2", + "from": "qs@6.5.2", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz" }, "querystring": { - "version": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", - "integrity": "sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=" + "version": "0.2.0", + "from": "querystring@0.2.0", + "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz" }, "range-parser": { - "version": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz", - "integrity": "sha1-9JvmtIeJTdxA3MlKMi9hEJLgDV4=" + "version": "1.2.0", + "from": "range-parser@>=1.0.2 <2.0.0", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz" }, "raven": { - "version": "https://registry.npmjs.org/raven/-/raven-1.2.1.tgz", - "integrity": "sha1-lJwTTbAooZC3u/j3kKrlQbfAIL0=", - "requires": { - "cookie": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz", - "json-stringify-safe": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", - "lsmod": "https://registry.npmjs.org/lsmod/-/lsmod-1.0.0.tgz", - "stack-trace": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.9.tgz", - "uuid": "https://registry.npmjs.org/uuid/-/uuid-3.0.0.tgz" - }, + "version": "1.2.1", + "from": "raven@>=1.1.3 <2.0.0", + "resolved": "https://registry.npmjs.org/raven/-/raven-1.2.1.tgz", "dependencies": { "uuid": { - "version": "https://registry.npmjs.org/uuid/-/uuid-3.0.0.tgz", - "integrity": "sha1-Zyj8BFnEUNeWqZwxg3VpvfZy1yg=" + "version": "3.0.0", + "from": "uuid@3.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.0.0.tgz" } } }, "raw-body": { - "version": "https://registry.npmjs.org/raw-body/-/raw-body-2.3.3.tgz", - "integrity": "sha1-GzJOzmtXBuFThVvBFIxlu39uoMM=", - "requires": { - "bytes": "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz", - "http-errors": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz", - "iconv-lite": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.23.tgz", - "unpipe": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz" - } - }, - "read": { - "version": "https://registry.npmjs.org/read/-/read-1.0.7.tgz", - "integrity": "sha1-s9oZvQUkMal2cdRKQmNK33ELQMQ=", - "dev": true, - "requires": { - "mute-stream": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz" - } + "version": "2.3.3", + "from": "raw-body@2.3.3", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.3.3.tgz" }, "readable-stream": { - "version": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz", - "integrity": "sha1-Elgg40vIQtLyqq+v5MKRbuMsFXw=", - "requires": { - "core-util-is": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", - "inherits": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", - "isarray": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", - "string_decoder": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" - }, - "dependencies": { - "isarray": { - "version": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", - "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" - } - } + "version": "2.3.6", + "from": "readable-stream@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz" }, "recluster": { - "version": "https://registry.npmjs.org/recluster/-/recluster-0.3.7.tgz", - "integrity": "sha1-aKRx3ZC2obl3ZjTPdpZAWutWeJU=" + "version": "0.3.7", + "from": "recluster@>=0.3.7 <0.4.0", + "resolved": "https://registry.npmjs.org/recluster/-/recluster-0.3.7.tgz" }, "request": { - "version": "https://registry.npmjs.org/request/-/request-2.14.0.tgz", - "integrity": "sha1-DYrLsLFMGrguAAt9OB+oyA0afYg=", - "requires": { - "form-data": "0.0.7", - "mime": "1.2.9" - }, + "version": "2.14.0", + "from": "request@2.14.0", + "resolved": "https://registry.npmjs.org/request/-/request-2.14.0.tgz", "dependencies": { "form-data": { "version": "0.0.7", - "requires": { - "async": "0.1.22", - "combined-stream": "0.0.4", - "mime": "1.2.9" - }, + "from": "form-data@~0.0.3", "dependencies": { "async": { - "version": "0.1.22" + "version": "0.1.22", + "from": "async@~0.1.9" }, "combined-stream": { "version": "0.0.4", - "requires": { - "delayed-stream": "0.0.5" - }, + "from": "combined-stream@~0.0.4", "dependencies": { "delayed-stream": { - "version": "0.0.5" + "version": "0.0.5", + "from": "delayed-stream@0.0.5" } } } } }, "mime": { - "version": "1.2.9" + "version": "1.2.9", + "from": "mime@~1.2.7" } } }, + "require-in-the-middle": { + "version": "3.1.0", + "from": "require-in-the-middle@>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/require-in-the-middle/-/require-in-the-middle-3.1.0.tgz" + }, "require-like": { - "version": "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz", - "integrity": "sha1-rW8wwTvs15cBDEaK+ndcDAprR/o=" + "version": "0.1.2", + "from": "require-like@0.1.2", + "resolved": "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz" }, - "requirejs": { - "version": "https://registry.npmjs.org/requirejs/-/requirejs-2.1.22.tgz", - "integrity": "sha1-3Xj9LTQYDA1ixyS1uK68BmTgNm8=", - "dev": true + "resolve": { + "version": "1.8.1", + "from": "resolve@>=1.5.0 <2.0.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.8.1.tgz" }, "response": { - "version": "https://registry.npmjs.org/response/-/response-0.14.0.tgz", - "integrity": "sha1-BmNS/z5rAm0EdYCUB2Y7Rob9JpY=", - "requires": { - "best-encoding": "https://registry.npmjs.org/best-encoding/-/best-encoding-0.1.1.tgz", - "bl": "https://registry.npmjs.org/bl/-/bl-0.7.0.tgz", - "caseless": "https://registry.npmjs.org/caseless/-/caseless-0.3.0.tgz", - "mime": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz" - }, + "version": "0.14.0", + "from": "response@0.14.0", + "resolved": "https://registry.npmjs.org/response/-/response-0.14.0.tgz", "dependencies": { "mime": { - "version": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz", - "integrity": "sha1-WCA+7Ybjpe8XrtK32evUfwpg3RA=" + "version": "1.2.11", + "from": "mime@>=1.2.11 <1.3.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz" } } }, - "revalidator": { - "version": "https://registry.npmjs.org/revalidator/-/revalidator-0.1.8.tgz", - "integrity": "sha1-/s5hv6DBtSoga9axgZgYS91SOjs=", - "dev": true + "retry-axios": { + "version": "0.3.2", + "from": "retry-axios@>=0.3.2 <0.4.0", + "resolved": "https://registry.npmjs.org/retry-axios/-/retry-axios-0.3.2.tgz" + }, + "retry-request": { + "version": "4.0.0", + "from": "retry-request@>=4.0.0 <5.0.0", + "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-4.0.0.tgz" }, "rimraf": { - "version": "https://registry.npmjs.org/rimraf/-/rimraf-2.2.8.tgz", - "integrity": "sha1-5Dm+Kq7jJzIZUnMPmaiSnk/FBYI=" + "version": "2.2.8", + "from": "rimraf@2.2.8", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.2.8.tgz" }, "safe-buffer": { - "version": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.1.tgz", - "integrity": "sha1-iTMSr2myEj3vcfV4iQAWce6yyFM=" + "version": "5.1.2", + "from": "safe-buffer@5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz" }, "safe-json-stringify": { - "version": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.2.0.tgz", - "integrity": "sha1-NW5EvJjx+TzkXfFLzXwBzahuCv0=", + "version": "1.2.0", + "from": "safe-json-stringify@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.2.0.tgz", "optional": true }, "safer-buffer": { - "version": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", - "integrity": "sha1-RPoWGwGHuVSd2Eu5GAL5vYOFzWo=" + "version": "2.1.2", + "from": "safer-buffer@>=2.1.2 <3.0.0", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz" }, "sandboxed-module": { - "version": "https://registry.npmjs.org/sandboxed-module/-/sandboxed-module-2.0.3.tgz", - "integrity": "sha1-x+VFkzm7y6KMUwPusz9ug4e/upY=", - "requires": { - "require-like": "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz", - "stack-trace": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.9.tgz" - } + "version": "2.0.3", + "from": "sandboxed-module@latest", + "resolved": "https://registry.npmjs.org/sandboxed-module/-/sandboxed-module-2.0.3.tgz" }, "sax": { - "version": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz", - "integrity": "sha1-e45lYZCyKOgaZq6nSEgNgozS03o=" + "version": "1.2.1", + "from": "sax@1.2.1", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz" }, "semver": { - "version": "https://registry.npmjs.org/semver/-/semver-2.3.2.tgz", - "integrity": "sha1-uYSPJdbPNjMwc+ye+IVtQvEjPlI=", - "dev": true + "version": "5.6.0", + "from": "semver@>=5.5.0 <6.0.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.6.0.tgz" }, "send": { - "version": "https://registry.npmjs.org/send/-/send-0.16.2.tgz", - "integrity": "sha1-bsyh4PjBVtFBWXVZhI32RzCmu8E=", - "requires": { - "debug": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "depd": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", - "destroy": "https://registry.npmjs.org/destroy/-/destroy-1.0.4.tgz", - "encodeurl": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", - "escape-html": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", - "etag": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", - "fresh": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", - "http-errors": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz", - "mime": "https://registry.npmjs.org/mime/-/mime-1.4.1.tgz", - "ms": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "on-finished": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", - "range-parser": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz", - "statuses": "https://registry.npmjs.org/statuses/-/statuses-1.4.0.tgz" - }, + "version": "0.16.2", + "from": "send@0.16.2", + "resolved": "https://registry.npmjs.org/send/-/send-0.16.2.tgz", "dependencies": { "statuses": { - "version": "https://registry.npmjs.org/statuses/-/statuses-1.4.0.tgz", - "integrity": "sha1-u3PURtonlhBu/MG2AaJT1sRr0Ic=" + "version": "1.4.0", + "from": "statuses@>=1.4.0 <1.5.0", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.4.0.tgz" } } }, "serve-static": { - "version": "https://registry.npmjs.org/serve-static/-/serve-static-1.13.2.tgz", - "integrity": "sha1-CV6Ecv1bRiN9tQzkhqQ/S4bGzsE=", - "requires": { - "encodeurl": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", - "escape-html": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", - "parseurl": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.2.tgz", - "send": "https://registry.npmjs.org/send/-/send-0.16.2.tgz" - } + "version": "1.13.2", + "from": "serve-static@1.13.2", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.13.2.tgz" }, "setprototypeof": { - "version": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.0.tgz", - "integrity": "sha1-0L2FU2iHtv58DYGMuWLZ2RxU5lY=" + "version": "1.1.0", + "from": "setprototypeof@1.1.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.0.tgz" }, "settings-sharelatex": { - "version": "git+https://github.com/sharelatex/settings-sharelatex.git#cbc5e41c1dbe6789721a14b3fdae05bf22546559", - "integrity": "sha1-y501ImPtJiQDJ1OCSmJfUYb0U2M=", - "requires": { - "coffee-script": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz" - }, + "version": "1.0.0", + "from": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.0.0", + "resolved": "git+https://github.com/sharelatex/settings-sharelatex.git#cbc5e41c1dbe6789721a14b3fdae05bf22546559", "dependencies": { "coffee-script": { - "version": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz", - "integrity": "sha1-gIs5bhEPU9AhoZpO8fZb4OjjX6M=" + "version": "1.6.0", + "from": "coffee-script@1.6.0", + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz" } } }, + "shimmer": { + "version": "1.2.0", + "from": "shimmer@>=1.2.0 <2.0.0", + "resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.2.0.tgz" + }, "sigmund": { - "version": "https://registry.npmjs.org/sigmund/-/sigmund-1.0.1.tgz", - "integrity": "sha1-P/IfGYytIXX587eBhT/ZTQ0ZtZA=", - "dev": true + "version": "1.0.1", + "from": "sigmund@>=1.0.0 <1.1.0", + "resolved": "https://registry.npmjs.org/sigmund/-/sigmund-1.0.1.tgz" }, "sinon": { - "version": "https://registry.npmjs.org/sinon/-/sinon-6.3.3.tgz", - "integrity": "sha1-w6VbcEYXTBOeh6PJTJIsv0MjpOc=", - "requires": { - "@sinonjs/commons": "https://registry.npmjs.org/@sinonjs/commons/-/commons-1.0.2.tgz", - "@sinonjs/formatio": "https://registry.npmjs.org/@sinonjs/formatio/-/formatio-3.0.0.tgz", - "@sinonjs/samsam": "https://registry.npmjs.org/@sinonjs/samsam/-/samsam-2.1.1.tgz", - "diff": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz", - "lodash.get": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz", - "lolex": "https://registry.npmjs.org/lolex/-/lolex-2.7.4.tgz", - "nise": "https://registry.npmjs.org/nise/-/nise-1.4.5.tgz", - "supports-color": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "type-detect": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz" - }, + "version": "7.1.1", + "from": "sinon@latest", + "resolved": "https://registry.npmjs.org/sinon/-/sinon-7.1.1.tgz", "dependencies": { "diff": { - "version": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz", - "integrity": "sha1-gAwN0eCov7yVg1wgKtIg/jF+WhI=" + "version": "3.5.0", + "from": "diff@>=3.5.0 <4.0.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz" }, "supports-color": { - "version": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha1-4uaaRKyHcveKHsCzW2id9lMO/I8=", - "requires": { - "has-flag": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz" - } + "version": "5.5.0", + "from": "supports-color@>=5.5.0 <6.0.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz" } } }, "sntp": { - "version": "https://registry.npmjs.org/sntp/-/sntp-0.1.4.tgz", - "integrity": "sha1-XvSBuVGnspr/30r9fyaDj8ESD4Q=", - "requires": { - "hoek": "https://registry.npmjs.org/hoek/-/hoek-0.7.6.tgz" - } + "version": "0.1.4", + "from": "sntp@>=0.1.0 <0.2.0", + "resolved": "https://registry.npmjs.org/sntp/-/sntp-0.1.4.tgz" }, - "sprintf-js": { - "version": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", - "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=", - "dev": true + "source-map": { + "version": "0.6.1", + "from": "source-map@>=0.6.1 <0.7.0", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz" }, - "sshpk": { - "version": "https://registry.npmjs.org/sshpk/-/sshpk-1.14.2.tgz", - "integrity": "sha1-xvxhZIo9nE52T9P8306hBeSSupg=", - "dev": true, - "requires": { - "asn1": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz", - "assert-plus": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", - "bcrypt-pbkdf": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", - "dashdash": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", - "ecc-jsbn": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", - "getpass": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", - "jsbn": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", - "safer-buffer": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", - "tweetnacl": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz" - } + "split": { + "version": "1.0.1", + "from": "split@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz" }, "stack-trace": { - "version": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.9.tgz", - "integrity": "sha1-qPbq7KkGdMMz58Q5U/J1tFFRBpU=" + "version": "0.0.9", + "from": "stack-trace@0.0.9", + "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.9.tgz" }, "statsd-parser": { - "version": "https://registry.npmjs.org/statsd-parser/-/statsd-parser-0.0.4.tgz", - "integrity": "sha1-y9JDlTzELv/VSLXSI4jtaJ7GOb0=" + "version": "0.0.4", + "from": "statsd-parser@>=0.0.4 <0.1.0", + "resolved": "https://registry.npmjs.org/statsd-parser/-/statsd-parser-0.0.4.tgz" }, "statuses": { - "version": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", - "integrity": "sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=" + "version": "1.5.0", + "from": "statuses@>=1.4.0 <2.0.0", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz" }, "stream-browserify": { - "version": "https://registry.npmjs.org/stream-browserify/-/stream-browserify-2.0.1.tgz", - "integrity": "sha1-ZiZu5fm9uZQKTkUUyvtDu3Hlyds=", - "requires": { - "inherits": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", - "readable-stream": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz" - }, - "dependencies": { - "readable-stream": { - "version": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", - "integrity": "sha1-sRwn2IuP8fvgcGQ8+UsMea4bCq8=", - "requires": { - "core-util-is": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", - "inherits": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", - "isarray": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "process-nextick-args": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.0.tgz", - "safe-buffer": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.1.tgz", - "string_decoder": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "util-deprecate": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz" - } - }, - "string_decoder": { - "version": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha1-nPFhG6YmhdcDCunkujQUnDrwP8g=", - "requires": { - "safe-buffer": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.1.tgz" - } - } - } + "version": "2.0.1", + "from": "stream-browserify@>=2.0.1 <3.0.0", + "resolved": "https://registry.npmjs.org/stream-browserify/-/stream-browserify-2.0.1.tgz" }, "stream-buffers": { - "version": "https://registry.npmjs.org/stream-buffers/-/stream-buffers-0.2.6.tgz", - "integrity": "sha1-GBwI1bs2kARfaUAbmuanoM8zE/w=" + "version": "0.2.6", + "from": "stream-buffers@>=0.2.5 <0.3.0", + "resolved": "https://registry.npmjs.org/stream-buffers/-/stream-buffers-0.2.6.tgz" }, "stream-counter": { - "version": "https://registry.npmjs.org/stream-counter/-/stream-counter-1.0.0.tgz", - "integrity": "sha1-kc8lac5NxQYf6816yyY5SloRR1E=" + "version": "1.0.0", + "from": "stream-counter@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/stream-counter/-/stream-counter-1.0.0.tgz" + }, + "stream-shift": { + "version": "1.0.0", + "from": "stream-shift@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.0.tgz" }, "string_decoder": { - "version": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", - "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=" + "version": "1.1.1", + "from": "string_decoder@>=1.1.1 <1.2.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz" }, "strip-ansi": { - "version": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-0.1.1.tgz", - "integrity": "sha1-OeipjQRNFQZgq+SmgIrPcLt7yZE=", - "dev": true + "version": "0.3.0", + "from": "strip-ansi@>=0.3.0 <0.4.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-0.3.0.tgz" + }, + "supports-color": { + "version": "0.2.0", + "from": "supports-color@>=0.2.0 <0.3.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-0.2.0.tgz" + }, + "tdigest": { + "version": "0.1.1", + "from": "tdigest@>=0.1.1 <0.2.0", + "resolved": "https://registry.npmjs.org/tdigest/-/tdigest-0.1.1.tgz" + }, + "teeny-request": { + "version": "3.9.1", + "from": "teeny-request@>=3.6.0 <4.0.0", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-3.9.1.tgz", + "dependencies": { + "uuid": { + "version": "3.3.2", + "from": "uuid@>=3.3.2 <4.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz" + } + } }, "text-encoding": { - "version": "https://registry.npmjs.org/text-encoding/-/text-encoding-0.6.4.tgz", - "integrity": "sha1-45mpgiV6J22uQou5KEXLcb3CbRk=" + "version": "0.6.4", + "from": "text-encoding@>=0.6.4 <0.7.0", + "resolved": "https://registry.npmjs.org/text-encoding/-/text-encoding-0.6.4.tgz" + }, + "through": { + "version": "2.3.8", + "from": "through@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz" + }, + "through2": { + "version": "2.0.5", + "from": "through2@>=2.0.3 <3.0.0", + "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz" }, "timekeeper": { - "version": "https://registry.npmjs.org/timekeeper/-/timekeeper-1.0.0.tgz", - "integrity": "sha1-Lziu4elLEd1m2FgP8aqdzGoroNg=" - }, - "timespan": { - "version": "https://registry.npmjs.org/timespan/-/timespan-2.3.0.tgz", - "integrity": "sha1-SQLOBAvRPYRcj1myfp1ZutbzmSk=", - "dev": true - }, - "tiny-lr": { - "version": "https://registry.npmjs.org/tiny-lr/-/tiny-lr-0.0.4.tgz", - "integrity": "sha1-gGGFR/Y/aX0Fy0DEwsSwg1Ia77Y=", - "dev": true, - "requires": { - "debug": "https://registry.npmjs.org/debug/-/debug-0.7.4.tgz", - "faye-websocket": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.4.4.tgz", - "noptify": "https://registry.npmjs.org/noptify/-/noptify-0.0.3.tgz", - "qs": "https://registry.npmjs.org/qs/-/qs-0.5.6.tgz" - }, - "dependencies": { - "debug": { - "version": "https://registry.npmjs.org/debug/-/debug-0.7.4.tgz", - "integrity": "sha1-BuHqgILCyxTjmAbiLi9vdX+Srzk=", - "dev": true - }, - "qs": { - "version": "https://registry.npmjs.org/qs/-/qs-0.5.6.tgz", - "integrity": "sha1-MbGtBYVnZRxSaSFQa5qHk5EaA4Q=", - "dev": true - } - } - }, - "tough-cookie": { - "version": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.4.3.tgz", - "integrity": "sha1-U/Nto/R3g7CSWvoG/587FlKA94E=", - "dev": true, - "requires": { - "psl": "https://registry.npmjs.org/psl/-/psl-1.1.29.tgz", - "punycode": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz" - }, - "dependencies": { - "punycode": { - "version": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", - "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4=", - "dev": true - } - } + "version": "1.0.0", + "from": "timekeeper@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/timekeeper/-/timekeeper-1.0.0.tgz" }, "tunnel-agent": { - "version": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.2.0.tgz", - "integrity": "sha1-aFPCr7GyEJ5FYp5JK9419Fnqaeg=" - }, - "tweetnacl": { - "version": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", - "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=", - "dev": true, - "optional": true + "version": "0.2.0", + "from": "tunnel-agent@>=0.2.0 <0.3.0", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.2.0.tgz" }, "type-detect": { - "version": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", - "integrity": "sha1-dkb7XxiHHPu3dJ5pvTmmOI63RQw=" + "version": "4.0.8", + "from": "type-detect@>=4.0.5 <5.0.0", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz" }, "type-is": { - "version": "https://registry.npmjs.org/type-is/-/type-is-1.6.16.tgz", - "integrity": "sha1-+JzjQVQcZysl7nrjxz3uOyvlAZQ=", - "requires": { - "media-typer": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", - "mime-types": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.20.tgz" - } + "version": "1.6.16", + "from": "type-is@>=1.6.16 <1.7.0", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.16.tgz" }, "underscore": { - "version": "https://registry.npmjs.org/underscore/-/underscore-1.5.2.tgz", - "integrity": "sha1-EzXF5PXm0zu7SwBrqMhqAPVW3gg=" + "version": "1.5.2", + "from": "underscore@>=1.5.2 <1.6.0", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.5.2.tgz" }, "underscore.string": { - "version": "https://registry.npmjs.org/underscore.string/-/underscore.string-2.2.1.tgz", - "integrity": "sha1-18D6KvXVoaZ/QlPa7pgTLnM/Dxk=", - "dev": true + "version": "2.2.1", + "from": "underscore.string@>=2.2.1 <2.3.0", + "resolved": "https://registry.npmjs.org/underscore.string/-/underscore.string-2.2.1.tgz" }, "unpipe": { - "version": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", - "integrity": "sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw=" - }, - "update-notifier": { - "version": "https://registry.npmjs.org/update-notifier/-/update-notifier-0.1.10.tgz", - "integrity": "sha1-IVy+EFM2nw1KRPhLUeuny4BIRpU=", - "dev": true, - "requires": { - "chalk": "https://registry.npmjs.org/chalk/-/chalk-0.4.0.tgz", - "configstore": "https://registry.npmjs.org/configstore/-/configstore-0.3.2.tgz", - "request": "https://registry.npmjs.org/request/-/request-2.88.0.tgz", - "semver": "https://registry.npmjs.org/semver/-/semver-2.3.2.tgz" - }, - "dependencies": { - "caseless": { - "version": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", - "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=", - "dev": true - }, - "combined-stream": { - "version": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.6.tgz", - "integrity": "sha1-cj599ugBrFYTETp+RFqbactjKBg=", - "dev": true, - "requires": { - "delayed-stream": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz" - } - }, - "delayed-stream": { - "version": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", - "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=", - "dev": true - }, - "forever-agent": { - "version": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", - "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=", - "dev": true - }, - "form-data": { - "version": "https://registry.npmjs.org/form-data/-/form-data-2.3.2.tgz", - "integrity": "sha1-SXBJi+YEwgwAXU9cI67NIda0kJk=", - "dev": true, - "requires": { - "asynckit": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "combined-stream": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.6.tgz", - "mime-types": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.20.tgz" - } - }, - "oauth-sign": { - "version": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", - "integrity": "sha1-R6ewFrqmi1+g7PPe4IqFxnmsZFU=", - "dev": true - }, - "request": { - "version": "https://registry.npmjs.org/request/-/request-2.88.0.tgz", - "integrity": "sha1-nC/KT301tZLv5Xx/ClXoEFIST+8=", - "dev": true, - "requires": { - "aws-sign2": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", - "aws4": "https://registry.npmjs.org/aws4/-/aws4-1.8.0.tgz", - "caseless": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", - "combined-stream": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.6.tgz", - "extend": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", - "forever-agent": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", - "form-data": "https://registry.npmjs.org/form-data/-/form-data-2.3.2.tgz", - "har-validator": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.0.tgz", - "http-signature": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", - "is-typedarray": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", - "isstream": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", - "json-stringify-safe": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", - "mime-types": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.20.tgz", - "oauth-sign": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", - "performance-now": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", - "qs": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", - "safe-buffer": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "tough-cookie": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.4.3.tgz", - "tunnel-agent": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", - "uuid": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz" - } - }, - "safe-buffer": { - "version": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha1-mR7GnSluAxN0fVm9/St0XDX4go0=", - "dev": true - }, - "tunnel-agent": { - "version": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", - "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", - "dev": true, - "requires": { - "safe-buffer": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz" - } - }, - "uuid": { - "version": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", - "integrity": "sha1-G0r0lV6zB3xQHCOHL8ZROBFYcTE=", - "dev": true - } - } + "version": "1.0.0", + "from": "unpipe@1.0.0", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz" }, "url": { - "version": "https://registry.npmjs.org/url/-/url-0.10.3.tgz", - "integrity": "sha1-Ah5NnHcF8hu/N9A861h2dAJ3TGQ=", - "requires": { - "punycode": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", - "querystring": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz" - } - }, - "user-home": { - "version": "https://registry.npmjs.org/user-home/-/user-home-1.1.1.tgz", - "integrity": "sha1-K1viOjK2Onyd640PKNSFcko98ZA=", - "dev": true + "version": "0.10.3", + "from": "url@0.10.3", + "resolved": "https://registry.npmjs.org/url/-/url-0.10.3.tgz" }, "util-deprecate": { - "version": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" + "version": "1.0.2", + "from": "util-deprecate@>=1.0.1 <1.1.0", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz" }, - "utile": { - "version": "https://registry.npmjs.org/utile/-/utile-0.2.1.tgz", - "integrity": "sha1-kwyI6ZCY1iIINMNWy9mncFItkNc=", - "dev": true, - "requires": { - "async": "https://registry.npmjs.org/async/-/async-0.2.10.tgz", - "deep-equal": "https://registry.npmjs.org/deep-equal/-/deep-equal-1.0.1.tgz", - "i": "https://registry.npmjs.org/i/-/i-0.3.6.tgz", - "mkdirp": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.5.tgz", - "ncp": "https://registry.npmjs.org/ncp/-/ncp-0.4.2.tgz", - "rimraf": "https://registry.npmjs.org/rimraf/-/rimraf-2.2.8.tgz" - }, - "dependencies": { - "ncp": { - "version": "https://registry.npmjs.org/ncp/-/ncp-0.4.2.tgz", - "integrity": "sha1-q8xsvT7C7Spyn/bnwfqPAXhKhXQ=", - "dev": true - } - } + "util.promisify": { + "version": "1.0.0", + "from": "util.promisify@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/util.promisify/-/util.promisify-1.0.0.tgz" }, "utils-merge": { - "version": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", - "integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=" - }, - "uuid": { - "version": "https://registry.npmjs.org/uuid/-/uuid-2.0.3.tgz", - "integrity": "sha1-Z+LoY3lyFVMN/zGOW/nc6/1Hsho=", - "dev": true + "version": "1.0.1", + "from": "utils-merge@1.0.1", + "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz" }, "vary": { - "version": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", - "integrity": "sha1-IpnwLG3tMNSllhsLn3RSShj2NPw=" - }, - "verror": { - "version": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", - "integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=", - "dev": true, - "requires": { - "assert-plus": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", - "core-util-is": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", - "extsprintf": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz" - } - }, - "watch": { - "version": "https://registry.npmjs.org/watch/-/watch-0.13.0.tgz", - "integrity": "sha1-/MbSs/DoxzSC61Qjmhn9W8+adTw=", - "dev": true, - "requires": { - "minimist": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz" - }, - "dependencies": { - "minimist": { - "version": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz", - "integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ=", - "dev": true - } - } + "version": "1.1.2", + "from": "vary@>=1.1.2 <1.2.0", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz" }, "which": { - "version": "https://registry.npmjs.org/which/-/which-1.0.9.tgz", - "integrity": "sha1-RgwdoPgQED0DIam2M6+eV15kSG8=", - "dev": true - }, - "winston": { - "version": "https://registry.npmjs.org/winston/-/winston-0.8.3.tgz", - "integrity": "sha1-ZLar9M0Brcrv1QCTk7HY6L7BnbA=", - "dev": true, - "requires": { - "async": "https://registry.npmjs.org/async/-/async-0.2.10.tgz", - "colors": "https://registry.npmjs.org/colors/-/colors-0.6.2.tgz", - "cycle": "https://registry.npmjs.org/cycle/-/cycle-1.0.3.tgz", - "eyes": "https://registry.npmjs.org/eyes/-/eyes-0.1.8.tgz", - "isstream": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", - "pkginfo": "https://registry.npmjs.org/pkginfo/-/pkginfo-0.3.1.tgz", - "stack-trace": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.9.tgz" - } - }, - "wordwrap": { - "version": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz", - "integrity": "sha1-o9XabNXAvAAI03I0u68b7WMFkQc=", - "dev": true + "version": "1.0.9", + "from": "which@>=1.0.5 <1.1.0", + "resolved": "https://registry.npmjs.org/which/-/which-1.0.9.tgz" }, "wrappy": { - "version": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" - }, - "xdg-basedir": { - "version": "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-1.0.1.tgz", - "integrity": "sha1-FP+PY6T9vLBdW27qIrNvMDO58E4=", - "dev": true, - "requires": { - "user-home": "https://registry.npmjs.org/user-home/-/user-home-1.1.1.tgz" - } + "version": "1.0.2", + "from": "wrappy@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" }, "xml2js": { - "version": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.19.tgz", - "integrity": "sha1-aGwg8hMgnpSr8NG88e+qKRx4J6c=", - "requires": { - "sax": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz", - "xmlbuilder": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-9.0.7.tgz" - } + "version": "0.4.19", + "from": "xml2js@0.4.19", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.19.tgz" }, "xmlbuilder": { - "version": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-9.0.7.tgz", - "integrity": "sha1-Ey7mPS7FVlxVfiD0wi35rKaGsQ0=" + "version": "9.0.7", + "from": "xmlbuilder@>=9.0.1 <9.1.0", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-9.0.7.tgz" + }, + "xtend": { + "version": "4.0.1", + "from": "xtend@>=4.0.1 <4.1.0", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz" + }, + "yallist": { + "version": "3.0.3", + "from": "yallist@>=3.0.2 <4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.0.3.tgz" } } } diff --git a/services/filestore/package.json b/services/filestore/package.json index 83f4d53120..046387f5c6 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -47,20 +47,9 @@ "body-parser": "^1.2.0" }, "devDependencies": { - "sinon": "", - "chai": "", - "sandboxed-module": "", "bunyan": "^1.3.5", - "grunt": "0.4.1", - "grunt-bunyan": "^0.5.0", - "grunt-execute": "^0.2.2", - "grunt-mocha-test": "~0.8.2", - "grunt-contrib-requirejs": "0.4.1", - "grunt-contrib-coffee": "0.7.0", - "grunt-contrib-watch": "0.5.3", - "grunt-nodemon": "0.2.1", - "grunt-contrib-clean": "0.5.0", - "grunt-concurrent": "0.4.2", - "grunt-forever": "~0.4.4" + "chai": "4.2.0", + "sandboxed-module": "2.0.3", + "sinon": "7.1.1" } } From 87435274522e13d09d3d755fc934e4038f981c9f Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Thu, 29 Nov 2018 16:42:26 +0000 Subject: [PATCH 277/555] bump metrics to 2.0.4 --- services/filestore/npm-shrinkwrap.json | 6 +++--- services/filestore/package.json | 8 ++++---- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/services/filestore/npm-shrinkwrap.json b/services/filestore/npm-shrinkwrap.json index 548b2736f6..9d91412475 100644 --- a/services/filestore/npm-shrinkwrap.json +++ b/services/filestore/npm-shrinkwrap.json @@ -1194,9 +1194,9 @@ "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz" }, "metrics-sharelatex": { - "version": "2.0.3", - "from": "git+https://github.com/sharelatex/metrics-sharelatex.git#v2.0.3", - "resolved": "git+https://github.com/sharelatex/metrics-sharelatex.git#02522a50b7a5ec46eed9b0a93513ef992a21eb45", + "version": "2.0.4", + "from": "git+https://github.com/sharelatex/metrics-sharelatex.git#v2.0.4", + "resolved": "git+https://github.com/sharelatex/metrics-sharelatex.git#49c4bc072f707eb6252adfec540268dfbea1615b", "dependencies": { "coffee-script": { "version": "1.6.0", diff --git a/services/filestore/package.json b/services/filestore/package.json index 046387f5c6..c13e416fa0 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -24,13 +24,15 @@ "dependencies": { "async": "~0.2.10", "aws-sdk": "^2.1.39", + "body-parser": "^1.2.0", "coffee-script": "~1.7.1", "express": "^4.2.0", "fs-extra": "^1.0.0", "heapdump": "^0.3.2", "knox": "~0.9.1", - "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v2.0.3", "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.7", + "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v2.0.4", + "mocha": "5.2.0", "node-transloadit": "0.0.4", "node-uuid": "~1.4.1", "pngcrush": "0.0.3", @@ -42,9 +44,7 @@ "settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.0.0", "stream-browserify": "^2.0.1", "stream-buffers": "~0.2.5", - "underscore": "~1.5.2", - "mocha": "5.2.0", - "body-parser": "^1.2.0" + "underscore": "~1.5.2" }, "devDependencies": { "bunyan": "^1.3.5", From 5d927d30b646d75f86738b57aebf737f64961079 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Wed, 5 Dec 2018 10:53:11 +0000 Subject: [PATCH 278/555] update build scripts to 1.1.10 --- services/filestore/.gitignore | 1 + services/filestore/Dockerfile | 2 +- services/filestore/Makefile | 2 +- services/filestore/buildscript.txt | 6 +++--- services/filestore/docker-compose.ci.yml | 2 +- services/filestore/docker-compose.yml | 2 +- services/filestore/package.json | 2 +- 7 files changed, 9 insertions(+), 8 deletions(-) diff --git a/services/filestore/.gitignore b/services/filestore/.gitignore index 9cb1da9a39..fcc07f9527 100644 --- a/services/filestore/.gitignore +++ b/services/filestore/.gitignore @@ -40,6 +40,7 @@ test/IntergrationTests/js/* data/*/* app.js +app.js.map cluster.js app/js/* test/IntergrationTests/js/* diff --git a/services/filestore/Dockerfile b/services/filestore/Dockerfile index 3ced888df3..2845544ae6 100644 --- a/services/filestore/Dockerfile +++ b/services/filestore/Dockerfile @@ -20,4 +20,4 @@ WORKDIR /app RUN chmod 0755 ./install_deps.sh && ./install_deps.sh USER node -CMD ["node","app.js"] +CMD ["node", "--expose-gc", "app.js"] diff --git a/services/filestore/Makefile b/services/filestore/Makefile index 9b34c3ccb9..d39712d9fe 100644 --- a/services/filestore/Makefile +++ b/services/filestore/Makefile @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.9 +# Version: 1.1.10 BUILD_NUMBER ?= local BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) diff --git a/services/filestore/buildscript.txt b/services/filestore/buildscript.txt index d4c335faa9..041e0b4745 100644 --- a/services/filestore/buildscript.txt +++ b/services/filestore/buildscript.txt @@ -1,9 +1,9 @@ ---script-version=1.1.9 +--script-version=1.1.10 filestore --node-version=6.9.5 --acceptance-creds=None --language=coffeescript ---dependencies=['mongo', 'redis'] ---docker-repos=['gcr.io/overleaf-ops'] +--dependencies=mongo,redis +--docker-repos=gcr.io/overleaf-ops --kube=false --build-target=docker diff --git a/services/filestore/docker-compose.ci.yml b/services/filestore/docker-compose.ci.yml index 17c4ddd2bf..5ab90e1825 100644 --- a/services/filestore/docker-compose.ci.yml +++ b/services/filestore/docker-compose.ci.yml @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.9 +# Version: 1.1.10 version: "2" diff --git a/services/filestore/docker-compose.yml b/services/filestore/docker-compose.yml index dcbc14e683..aeceafb3f3 100644 --- a/services/filestore/docker-compose.yml +++ b/services/filestore/docker-compose.yml @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.9 +# Version: 1.1.10 version: "2" diff --git a/services/filestore/package.json b/services/filestore/package.json index c13e416fa0..e35a2c009b 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -13,7 +13,7 @@ "test:unit": "npm run compile:app && npm run compile:unit_tests && npm run test:unit:_run -- --grep=$MOCHA_GREP", "compile:unit_tests": "[ ! -e test/unit/coffee ] && echo 'No unit tests to compile' || coffee -o test/unit/js -c test/unit/coffee", "compile:acceptance_tests": "[ ! -e test/acceptance/coffee ] && echo 'No acceptance tests to compile' || coffee -o test/acceptance/js -c test/acceptance/coffee", - "compile:app": "([ -e app/coffee ] && coffee $COFFEE_OPTIONS -o app/js -c app/coffee || echo 'No CoffeeScript folder to compile') && ( [ -e app.coffee ] && coffee $COFFEE_OPTIONS -c app.coffee || echo 'No CoffeeScript app to compile')", + "compile:app": "([ -e app/coffee ] && coffee -m $COFFEE_OPTIONS -o app/js -c app/coffee || echo 'No CoffeeScript folder to compile') && ( [ -e app.coffee ] && coffee -m $COFFEE_OPTIONS -c app.coffee || echo 'No CoffeeScript app to compile')", "compile:all": "npm run compile:app && npm run compile:unit_tests && npm run compile:acceptance_tests && npm run compile:smoke_tests", "start": "npm run compile:app && node $NODE_APP_OPTIONS app.js", "nodemon": "nodemon --config nodemon.json", From 25275212af52d40a2133d9305a132bd7a7834f91 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Wed, 5 Dec 2018 12:36:32 +0000 Subject: [PATCH 279/555] bump metrics to 2.0.7 --- services/filestore/npm-shrinkwrap.json | 11 +++-------- services/filestore/package.json | 2 +- 2 files changed, 4 insertions(+), 9 deletions(-) diff --git a/services/filestore/npm-shrinkwrap.json b/services/filestore/npm-shrinkwrap.json index 9d91412475..14a32672a7 100644 --- a/services/filestore/npm-shrinkwrap.json +++ b/services/filestore/npm-shrinkwrap.json @@ -1194,19 +1194,14 @@ "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz" }, "metrics-sharelatex": { - "version": "2.0.4", - "from": "git+https://github.com/sharelatex/metrics-sharelatex.git#v2.0.4", - "resolved": "git+https://github.com/sharelatex/metrics-sharelatex.git#49c4bc072f707eb6252adfec540268dfbea1615b", + "version": "2.0.7", + "from": "git+https://github.com/sharelatex/metrics-sharelatex.git#v2.0.7", + "resolved": "git+https://github.com/sharelatex/metrics-sharelatex.git#3c7dd668d1153c13acee9cceb3a8ce24495b7c86", "dependencies": { "coffee-script": { "version": "1.6.0", "from": "coffee-script@1.6.0", "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz" - }, - "underscore": { - "version": "1.6.0", - "from": "underscore@>=1.6.0 <1.7.0", - "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz" } } }, diff --git a/services/filestore/package.json b/services/filestore/package.json index e35a2c009b..fab7033316 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -31,7 +31,7 @@ "heapdump": "^0.3.2", "knox": "~0.9.1", "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.7", - "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v2.0.4", + "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v2.0.7", "mocha": "5.2.0", "node-transloadit": "0.0.4", "node-uuid": "~1.4.1", From 28271655e18401e471bdf8be61c0464005ffa570 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Thu, 6 Dec 2018 08:19:39 +0000 Subject: [PATCH 280/555] avoid problems with caching of apt-get operations --- services/filestore/Dockerfile | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/services/filestore/Dockerfile b/services/filestore/Dockerfile index 5f4f63aec0..49a9940d7f 100644 --- a/services/filestore/Dockerfile +++ b/services/filestore/Dockerfile @@ -1,5 +1,4 @@ FROM node:6.9.5 -RUN apt-get update # we also need imagemagick but it is already in the node docker image -RUN apt-get install -y --no-install-recommends ghostscript optipng +RUN apt-get update && apt-get install -y --no-install-recommends ghostscript optipng From cf12ec1154567877a63cf0edc8eb62719e7ff138 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 7 Dec 2018 17:02:34 +0000 Subject: [PATCH 281/555] use the aws sdk to copy files in S3PersistorManager to work around problems with knox --- .../app/coffee/S3PersistorManager.coffee | 17 +++++++++++------ .../unit/coffee/S3PersistorManagerTests.coffee | 10 +++++++--- 2 files changed, 18 insertions(+), 9 deletions(-) diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index d6ab47c200..8debdaad4f 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -11,6 +11,7 @@ path = require("path") LocalFileWriter = require("./LocalFileWriter") Errors = require("./Errors") _ = require("underscore") +awsS3 = require "aws-sdk/clients/s3" thirtySeconds = 30 * 1000 @@ -25,6 +26,12 @@ buildDefaultOptions = (bucketName, method, key)-> uri:"https://#{bucketName}.s3.amazonaws.com/#{key}" } +s3 = new awsS3({ + credentials: + accessKeyId: settings.filestore.s3.key, + secretAccessKey: settings.filestore.s3.secret +}) + module.exports = sendFile: (bucketName, key, fsPath, callback)-> @@ -90,12 +97,10 @@ module.exports = callback err copyFile: (bucketName, sourceKey, destKey, callback)-> - logger.log bucketName:bucketName, sourceKey:sourceKey, destKey:destKey, "copying file in s3" - s3Client = knox.createClient - key: settings.filestore.s3.key - secret: settings.filestore.s3.secret - bucket: bucketName - s3Client.copyFile sourceKey, destKey, (err)-> + logger.log bucketName:bucketName, sourceKey:sourceKey, destKey: destKey, "copying file in s3" + source = bucketName + '/' + sourceKey + # use the AWS SDK instead of knox due to problems with error handling (https://github.com/Automattic/knox/issues/114) + s3.copyObject {Bucket: bucketName, Key: destKey, CopySource: source}, (err) -> if err? logger.err err:err, bucketName:bucketName, sourceKey:sourceKey, destKey:destKey, "something went wrong copying file in aws" callback(err) diff --git a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee index b48fde7820..0860514180 100644 --- a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee @@ -25,11 +25,15 @@ describe "S3PersistorManagerTests", -> get: sinon.stub() @knox = createClient: sinon.stub().returns(@stubbedKnoxClient) + @stubbedS3Client = + copyObject:sinon.stub() + @awsS3 = sinon.stub().returns @stubbedS3Client @LocalFileWriter = writeStream: sinon.stub() deleteFile: sinon.stub() @requires = "knox": @knox + "aws-sdk/clients/s3": @awsS3 "settings-sharelatex": @settings "./LocalFileWriter":@LocalFileWriter "logger-sharelatex": @@ -207,11 +211,11 @@ describe "S3PersistorManagerTests", -> @destKey = "my/dest/key" @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires - it "should use knox to copy file", (done)-> - @stubbedKnoxClient.copyFile.callsArgWith(2, @error) + it "should use AWS SDK to copy file", (done)-> + @stubbedS3Client.copyObject.callsArgWith(1, @error) @S3PersistorManager.copyFile @bucketName, @sourceKey, @destKey, (err)=> err.should.equal @error - @stubbedKnoxClient.copyFile.calledWith(@sourceKey, @destKey).should.equal true + @stubbedS3Client.copyObject.calledWith({Bucket: @bucketName, Key: @destKey, CopySource: @bucketName + '/' + @key}).should.equal true done() describe "deleteDirectory", -> From 621425656823df768603751bb0781ef367ea5b49 Mon Sep 17 00:00:00 2001 From: Christopher Hoskin Date: Wed, 2 Jan 2019 09:47:38 +0000 Subject: [PATCH 282/555] Bump logger to v1.5.8 --- services/filestore/npm-shrinkwrap.json | 11 ++++++++--- services/filestore/package.json | 2 +- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/services/filestore/npm-shrinkwrap.json b/services/filestore/npm-shrinkwrap.json index 14a32672a7..09537db297 100644 --- a/services/filestore/npm-shrinkwrap.json +++ b/services/filestore/npm-shrinkwrap.json @@ -1142,9 +1142,9 @@ "resolved": "https://registry.npmjs.org/lodash.isstring/-/lodash.isstring-4.0.1.tgz" }, "logger-sharelatex": { - "version": "1.5.7", - "from": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.7", - "resolved": "git+https://github.com/sharelatex/logger-sharelatex.git#13562f8866708fc86aef8202bf5a2ce4d1c6eed7", + "version": "1.5.8", + "from": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.8", + "resolved": "git+https://github.com/sharelatex/logger-sharelatex.git#3f841b014572706e472c47fe0d0c0c1e569bad8c", "dependencies": { "coffee-script": { "version": "1.12.4", @@ -1202,6 +1202,11 @@ "version": "1.6.0", "from": "coffee-script@1.6.0", "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz" + }, + "underscore": { + "version": "1.6.0", + "from": "underscore@>=1.6.0 <1.7.0", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz" } } }, diff --git a/services/filestore/package.json b/services/filestore/package.json index fab7033316..e09395c41f 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -30,7 +30,7 @@ "fs-extra": "^1.0.0", "heapdump": "^0.3.2", "knox": "~0.9.1", - "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.7", + "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.8", "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v2.0.7", "mocha": "5.2.0", "node-transloadit": "0.0.4", From 3b6eba33baf04238d17312edfedded5e46f3ea1a Mon Sep 17 00:00:00 2001 From: Christopher Hoskin Date: Wed, 2 Jan 2019 09:53:53 +0000 Subject: [PATCH 283/555] Remove `Metrics.inc "startup"` from app.coffee --- services/filestore/app.coffee | 2 -- 1 file changed, 2 deletions(-) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index b172e3ba88..549cd19bfe 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -23,8 +23,6 @@ Metrics.memory.monitor(logger) app.use Metrics.http.monitor(logger) -Metrics.inc "startup" - app.use (req, res, next)-> Metrics.inc "http-request" next() From 594b403bd36f16f1ec9098562b735188f3bdf13f Mon Sep 17 00:00:00 2001 From: Christopher Hoskin Date: Wed, 2 Jan 2019 09:55:31 +0000 Subject: [PATCH 284/555] Move metrics init to start of app.coffee --- services/filestore/app.coffee | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index 549cd19bfe..c1484142b1 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -1,3 +1,5 @@ +Metrics = require "metrics-sharelatex" +Metrics.initialize("filestore") express = require('express') bodyParser = require "body-parser" logger = require('logger-sharelatex') @@ -15,8 +17,6 @@ app = express() if settings.sentry?.dsn? logger.initializeErrorReporting(settings.sentry.dsn) -Metrics = require "metrics-sharelatex" -Metrics.initialize("filestore") Metrics.open_sockets.monitor(logger) Metrics.event_loop?.monitor(logger) Metrics.memory.monitor(logger) From 676380974871ca72b587c083db1e948f687dab8b Mon Sep 17 00:00:00 2001 From: Christopher Hoskin Date: Thu, 3 Jan 2019 16:27:45 +0000 Subject: [PATCH 285/555] Bump logger to v1.5.9 --- services/filestore/npm-shrinkwrap.json | 6 +++--- services/filestore/package.json | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/services/filestore/npm-shrinkwrap.json b/services/filestore/npm-shrinkwrap.json index 3076829a85..43599f526b 100644 --- a/services/filestore/npm-shrinkwrap.json +++ b/services/filestore/npm-shrinkwrap.json @@ -1155,9 +1155,9 @@ "resolved": "https://registry.npmjs.org/lodash.pickby/-/lodash.pickby-4.6.0.tgz" }, "logger-sharelatex": { - "version": "1.5.8", - "from": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.8", - "resolved": "git+https://github.com/sharelatex/logger-sharelatex.git#3f841b014572706e472c47fe0d0c0c1e569bad8c", + "version": "1.5.9", + "from": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.9", + "resolved": "git+https://github.com/sharelatex/logger-sharelatex.git#e8e1b95052f62e107336053e4a983f81cdbdf589", "dependencies": { "coffee-script": { "version": "1.12.4", diff --git a/services/filestore/package.json b/services/filestore/package.json index e6be998bef..4f25056dee 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -30,7 +30,7 @@ "fs-extra": "^1.0.0", "heapdump": "^0.3.2", "knox": "~0.9.1", - "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.8", + "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.9", "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v2.0.12", "mocha": "5.2.0", "node-transloadit": "0.0.4", From b522fe15316ebdb64dd8dcb1ed8af50f93f31ec0 Mon Sep 17 00:00:00 2001 From: Christopher Hoskin Date: Fri, 4 Jan 2019 16:03:22 +0000 Subject: [PATCH 286/555] Remove grunt --- services/filestore/Gruntfile.coffee | 97 ----------------------------- 1 file changed, 97 deletions(-) delete mode 100644 services/filestore/Gruntfile.coffee diff --git a/services/filestore/Gruntfile.coffee b/services/filestore/Gruntfile.coffee deleted file mode 100644 index ebf94e2fc9..0000000000 --- a/services/filestore/Gruntfile.coffee +++ /dev/null @@ -1,97 +0,0 @@ -module.exports = (grunt) -> - - # Project configuration. - grunt.initConfig - forever: - app: - options: - index: "app.js" - - coffee: - server: - expand: true, - flatten: false, - cwd: 'app/coffee', - src: ['**/*.coffee'], - dest: 'app/js/', - ext: '.js' - - app_server: - expand: true, - flatten: false, - src: ['app.coffee', 'cluster.coffee'], - dest: './', - ext: '.js' - - server_tests: - expand: true, - flatten: false, - cwd: 'test/acceptence/coffee', - src: ['*.coffee', '**/*.coffee'], - dest: 'test/acceptence/js/', - ext: '.js' - - server_acc_tests: - expand: true, - flatten: false, - cwd: 'test/unit/coffee', - src: ['*.coffee', '**/*.coffee'], - dest: 'test/unit/js/', - ext: '.js' - - watch: - server_coffee: - files: ['app/*.coffee','app/**/*.coffee', 'test/unit/coffee/**/*.coffee', 'test/unit/coffee/*.coffee', "app.coffee", "cluster.coffee"] - tasks: ["clean", 'coffee', 'mochaTest'] - - clean: ["app/js", "test/unit/js", "app.js"] - - nodemon: - dev: - script: 'app.js' - options: - ext:"*.coffee" - - execute: - app: - src: "app.js" - - concurrent: - dev: - tasks: ['nodemon', 'watch'] - options: - logConcurrentOutput: true - - mochaTest: - unit: - src: ["test/unit/js/#{grunt.option('feature') or '**'}/*.js"] - options: - reporter: grunt.option('reporter') or 'spec' - grep: grunt.option("grep") - acceptence: - src: ["test/acceptence/js/#{grunt.option('feature') or '**'}/*.js"] - options: - reporter: grunt.option('reporter') or 'spec' - grep: grunt.option("grep") - - - grunt.loadNpmTasks 'grunt-contrib-coffee' - grunt.loadNpmTasks 'grunt-contrib-watch' - grunt.loadNpmTasks 'grunt-nodemon' - grunt.loadNpmTasks 'grunt-contrib-clean' - grunt.loadNpmTasks 'grunt-concurrent' - grunt.loadNpmTasks 'grunt-mocha-test' - grunt.loadNpmTasks 'grunt-forever' - grunt.loadNpmTasks 'grunt-bunyan' - grunt.loadNpmTasks 'grunt-execute' - - grunt.registerTask "test:unit", ["coffee", "mochaTest:unit"] - grunt.registerTask "test:acceptence", ["coffee", "mochaTest:acceptence"] - grunt.registerTask "test:acceptance", ["test:acceptence"] - - grunt.registerTask "ci", "test:unit" - grunt.registerTask 'default', ['coffee', 'bunyan','execute'] - - grunt.registerTask "compile", "coffee" - grunt.registerTask "install", "compile" - From c8d146b571b29ba7c4b7e4ce58314c5d33f8e0a3 Mon Sep 17 00:00:00 2001 From: Christopher Hoskin Date: Fri, 4 Jan 2019 16:03:45 +0000 Subject: [PATCH 287/555] Bump settings to v1.1.0 --- services/filestore/npm-shrinkwrap.json | 6 +++--- services/filestore/package.json | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/services/filestore/npm-shrinkwrap.json b/services/filestore/npm-shrinkwrap.json index 43599f526b..0de5dd3f0a 100644 --- a/services/filestore/npm-shrinkwrap.json +++ b/services/filestore/npm-shrinkwrap.json @@ -1689,9 +1689,9 @@ "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.0.tgz" }, "settings-sharelatex": { - "version": "1.0.0", - "from": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.0.0", - "resolved": "git+https://github.com/sharelatex/settings-sharelatex.git#cbc5e41c1dbe6789721a14b3fdae05bf22546559", + "version": "1.1.0", + "from": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.1.0", + "resolved": "git+https://github.com/sharelatex/settings-sharelatex.git#93f63d029b52fef8825c3a401b2b6a7ba29b4750", "dependencies": { "coffee-script": { "version": "1.6.0", diff --git a/services/filestore/package.json b/services/filestore/package.json index 4f25056dee..158f653aee 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -41,7 +41,7 @@ "request": "2.14.0", "response": "0.14.0", "rimraf": "2.2.8", - "settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.0.0", + "settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.1.0", "stream-browserify": "^2.0.1", "stream-buffers": "~0.2.5", "underscore": "~1.5.2" From 04211924496a784ab1f990884abaa4a6a837f6fb Mon Sep 17 00:00:00 2001 From: Christopher Hoskin Date: Fri, 4 Jan 2019 16:22:13 +0000 Subject: [PATCH 288/555] Ignore any file ending in .map, wherever it is --- services/filestore/.gitignore | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/.gitignore b/services/filestore/.gitignore index fcc07f9527..5d1086933c 100644 --- a/services/filestore/.gitignore +++ b/services/filestore/.gitignore @@ -40,7 +40,7 @@ test/IntergrationTests/js/* data/*/* app.js -app.js.map +**/*.map cluster.js app/js/* test/IntergrationTests/js/* From 3614f217e6be0ec606156750f188adb7677f92bb Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Mon, 7 Jan 2019 15:54:24 +0000 Subject: [PATCH 289/555] add enableConversions flag to disable conversions which we can't do on k8 --- .../app/coffee/ImageOptimiser.coffee | 4 +++ services/filestore/app/coffee/SafeExec.coffee | 5 +++ .../filestore/config/settings.defaults.coffee | 1 + .../unit/coffee/ImageOptimiserTests.coffee | 34 +++++++++++++------ .../{SafeExec.coffee => SafeExecTests.coffee} | 10 +++++- 5 files changed, 43 insertions(+), 11 deletions(-) rename services/filestore/test/unit/coffee/{SafeExec.coffee => SafeExecTests.coffee} (81%) diff --git a/services/filestore/app/coffee/ImageOptimiser.coffee b/services/filestore/app/coffee/ImageOptimiser.coffee index 4888e00224..4c4a353f21 100644 --- a/services/filestore/app/coffee/ImageOptimiser.coffee +++ b/services/filestore/app/coffee/ImageOptimiser.coffee @@ -1,5 +1,6 @@ exec = require('child_process').exec logger = require("logger-sharelatex") +Settings = require "settings-sharelatex" module.exports = @@ -10,6 +11,9 @@ module.exports = opts = timeout: 30 * 1000 killSignal: "SIGKILL" + if !Settings.enableConversions + error = new Error("Image conversions are disabled") + return callback(error) exec args, opts,(err, stdout, stderr)-> if err? and err.signal == 'SIGKILL' logger.warn {err: err, stderr: stderr, localPath: localPath}, "optimiser timeout reached" diff --git a/services/filestore/app/coffee/SafeExec.coffee b/services/filestore/app/coffee/SafeExec.coffee index aa8121a360..3559d35c95 100644 --- a/services/filestore/app/coffee/SafeExec.coffee +++ b/services/filestore/app/coffee/SafeExec.coffee @@ -1,6 +1,7 @@ _ = require("underscore") logger = require("logger-sharelatex") child_process = require('child_process') +Settings = require "settings-sharelatex" # execute a command in the same way as 'exec' but with a timeout that # kills all child processes @@ -9,6 +10,10 @@ child_process = require('child_process') # group, then we can kill everything in that process group. module.exports = (command, options, callback = (err, stdout, stderr) ->) -> + if !Settings.enableConversions + error = new Error("Image conversions are disabled") + return callback(error) + # options are {timeout: number-of-milliseconds, killSignal: signal-name} [cmd, args...] = command diff --git a/services/filestore/config/settings.defaults.coffee b/services/filestore/config/settings.defaults.coffee index e04c86c6ea..7c5b01ef90 100644 --- a/services/filestore/config/settings.defaults.coffee +++ b/services/filestore/config/settings.defaults.coffee @@ -50,6 +50,7 @@ settings = # Any commands to wrap the convert utility in, for example ["nice"], or ["firejail", "--profile=/etc/firejail/convert.profile"] convertCommandPrefix: [] + enableConversions: false # Filestore health check # ---------------------- diff --git a/services/filestore/test/unit/coffee/ImageOptimiserTests.coffee b/services/filestore/test/unit/coffee/ImageOptimiserTests.coffee index e16d8e0917..398eacf70f 100644 --- a/services/filestore/test/unit/coffee/ImageOptimiserTests.coffee +++ b/services/filestore/test/unit/coffee/ImageOptimiserTests.coffee @@ -11,13 +11,16 @@ describe "ImageOptimiser", -> beforeEach -> @child_process = exec : sinon.stub() - + @settings = + enableConversions:true @optimiser = SandboxedModule.require modulePath, requires: 'child_process': @child_process "logger-sharelatex": log:-> err:-> warn:-> + "settings-sharelatex": @settings + @sourcePath = "/this/path/here.eps" @error = "Error" @@ -33,18 +36,29 @@ describe "ImageOptimiser", -> done() - it "should return the errro the file", (done)-> + it "should return the error", (done)-> @child_process.exec.callsArgWith(2, @error) @optimiser.compressPng @sourcePath, (err)=> err.should.equal @error done() - describe 'when optimiser is sigkilled', -> + describe 'when enableConversions is disabled', -> - it 'should not produce an error', (done) -> - @error = new Error('woops') - @error.signal = 'SIGKILL' - @child_process.exec.callsArgWith(2, @error) - @optimiser.compressPng @sourcePath, (err)=> - expect(err).to.equal(null) - done() + it 'should produce an error', (done) -> + @settings.enableConversions = false + @child_process.exec.callsArgWith(2) + @optimiser.compressPng @sourcePath, (err)=> + @child_process.exec.called.should.equal false + expect(err).to.exist + done() + + + describe 'when optimiser is sigkilled', -> + + it 'should not produce an error', (done) -> + @error = new Error('woops') + @error.signal = 'SIGKILL' + @child_process.exec.callsArgWith(2, @error) + @optimiser.compressPng @sourcePath, (err)=> + expect(err).to.equal(null) + done() diff --git a/services/filestore/test/unit/coffee/SafeExec.coffee b/services/filestore/test/unit/coffee/SafeExecTests.coffee similarity index 81% rename from services/filestore/test/unit/coffee/SafeExec.coffee rename to services/filestore/test/unit/coffee/SafeExecTests.coffee index 10d920df11..1be22f3993 100644 --- a/services/filestore/test/unit/coffee/SafeExec.coffee +++ b/services/filestore/test/unit/coffee/SafeExecTests.coffee @@ -9,11 +9,13 @@ SandboxedModule = require('sandboxed-module') describe "SafeExec", -> beforeEach -> - + @settings = + enableConversions:true @safe_exec = SandboxedModule.require modulePath, requires: "logger-sharelatex": log:-> err:-> + "settings-sharelatex": @settings @options = {timeout: 10*1000, killSignal: "SIGTERM" } describe "safe_exec", -> @@ -24,6 +26,12 @@ describe "SafeExec", -> should.not.exist(err) done() + it "should error when conversions are disabled", (done) -> + @settings.enableConversions = false + @safe_exec ["/bin/echo", "hello"], @options, (err, stdout, stderr) => + expect(err).to.exist + done() + it "should execute a command with non-zero exit status", (done) -> @safe_exec ["/usr/bin/env", "false"], @options, (err, stdout, stderr) => stdout.should.equal "" From 9d93eee3e8bfeb0dbd32bbc458b674504e49b811 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 9 Jan 2019 10:31:59 +0000 Subject: [PATCH 290/555] return a 404 error (instead of a 500) when copying a missing file --- services/filestore/app/coffee/FileController.coffee | 7 +++++-- .../filestore/app/coffee/S3PersistorManager.coffee | 10 ++++++++-- .../test/unit/coffee/FileControllerTests.coffee | 9 +++++++++ .../test/unit/coffee/S3PersistorManagerTests.coffee | 7 +++++++ 4 files changed, 29 insertions(+), 4 deletions(-) diff --git a/services/filestore/app/coffee/FileController.coffee b/services/filestore/app/coffee/FileController.coffee index 24fd5229de..60dcc207b7 100644 --- a/services/filestore/app/coffee/FileController.coffee +++ b/services/filestore/app/coffee/FileController.coffee @@ -60,8 +60,11 @@ module.exports = FileController = logger.log key:key, bucket:bucket, oldProject_id:oldProject_id, oldFile_id:oldFile_id, "reciving request to copy file" PersistorManager.copyFile bucket, "#{oldProject_id}/#{oldFile_id}", key, (err)-> if err? - logger.log err:err, oldProject_id:oldProject_id, oldFile_id:oldFile_id, "something went wrong copying file" - res.send 500 + if err instanceof Errors.NotFoundError + res.send 404 + else + logger.log err:err, oldProject_id:oldProject_id, oldFile_id:oldFile_id, "something went wrong copying file" + res.send 500 else res.send 200 diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index 8debdaad4f..dd9aae3bf7 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -102,8 +102,14 @@ module.exports = # use the AWS SDK instead of knox due to problems with error handling (https://github.com/Automattic/knox/issues/114) s3.copyObject {Bucket: bucketName, Key: destKey, CopySource: source}, (err) -> if err? - logger.err err:err, bucketName:bucketName, sourceKey:sourceKey, destKey:destKey, "something went wrong copying file in aws" - callback(err) + if err.code is 'NoSuchKey' + logger.err bucketName:bucketName, sourceKey:sourceKey, "original file not found in s3 when copying" + callback(new Errors.NotFoundError("original file not found in S3 when copying")) + else + logger.err err:err, bucketName:bucketName, sourceKey:sourceKey, destKey:destKey, "something went wrong copying file in aws" + callback(err) + else + callback() deleteFile: (bucketName, key, callback)-> logger.log bucketName:bucketName, key:key, "delete file in s3" diff --git a/services/filestore/test/unit/coffee/FileControllerTests.coffee b/services/filestore/test/unit/coffee/FileControllerTests.coffee index 591644de60..0645aff27c 100644 --- a/services/filestore/test/unit/coffee/FileControllerTests.coffee +++ b/services/filestore/test/unit/coffee/FileControllerTests.coffee @@ -28,6 +28,8 @@ describe "FileController", -> "./LocalFileWriter":@LocalFileWriter "./FileHandler": @FileHandler "./PersistorManager":@PersistorManager + "./Errors": @Errors = + NotFoundError: sinon.stub() "settings-sharelatex": @settings "logger-sharelatex": log:-> @@ -111,6 +113,13 @@ describe "FileController", -> done() @controller.copyFile @req, @res + it "should send a 404 if the original file was not found", (done) -> + @PersistorManager.copyFile.callsArgWith(3, new @Errors.NotFoundError()) + @res.send = (code)=> + code.should.equal 404 + done() + @controller.copyFile @req, @res + it "should send a 500 if there was an error", (done)-> @PersistorManager.copyFile.callsArgWith(3, "error") @res.send = (code)=> diff --git a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee index 0860514180..7fc70c5065 100644 --- a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee @@ -218,6 +218,13 @@ describe "S3PersistorManagerTests", -> @stubbedS3Client.copyObject.calledWith({Bucket: @bucketName, Key: @destKey, CopySource: @bucketName + '/' + @key}).should.equal true done() + it "should return a NotFoundError object if the original file does not exist", (done)-> + NoSuchKeyError = {code: "NoSuchKey"} + @stubbedS3Client.copyObject.callsArgWith(1, NoSuchKeyError) + @S3PersistorManager.copyFile @bucketName, @sourceKey, @destKey, (err)=> + expect(err instanceof @Errors.NotFoundError).to.equal true + done() + describe "deleteDirectory", -> beforeEach -> From ea845f49351ccd21e5cc87ba3d049fe6c5b39ec9 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Wed, 30 Jan 2019 16:28:37 +0000 Subject: [PATCH 291/555] bulk upgrade to 1.1.12, moved to npm rather than git --- services/filestore/Jenkinsfile | 10 +- services/filestore/Makefile | 6 +- services/filestore/buildscript.txt | 5 +- services/filestore/docker-compose.ci.yml | 13 +- services/filestore/docker-compose.yml | 12 +- services/filestore/npm-shrinkwrap.json | 966 +++++++++-------------- services/filestore/package.json | 7 +- 7 files changed, 429 insertions(+), 590 deletions(-) diff --git a/services/filestore/Jenkinsfile b/services/filestore/Jenkinsfile index 1ab9a075e1..35bd318ab2 100644 --- a/services/filestore/Jenkinsfile +++ b/services/filestore/Jenkinsfile @@ -48,8 +48,11 @@ pipeline { } } - stage('Package and publish build') { + stage('Package and docker push') { steps { + sh 'echo ${BUILD_NUMBER} > build_number.txt' + sh 'touch build.tar.gz' // Avoid tar warning about files changing during read + sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make tar' withCredentials([file(credentialsId: 'gcr.io_overleaf-ops', variable: 'DOCKER_REPO_KEY_PATH')]) { sh 'docker login -u _json_key --password-stdin https://gcr.io/overleaf-ops < ${DOCKER_REPO_KEY_PATH}' @@ -60,9 +63,12 @@ pipeline { } } - stage('Publish build number') { + stage('Publish to s3') { steps { sh 'echo ${BRANCH_NAME}-${BUILD_NUMBER} > build_number.txt' + withAWS(credentials:'S3_CI_BUILDS_AWS_KEYS', region:"${S3_REGION_BUILD_ARTEFACTS}") { + s3Upload(file:'build.tar.gz', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/${BUILD_NUMBER}.tar.gz") + } withAWS(credentials:'S3_CI_BUILDS_AWS_KEYS', region:"${S3_REGION_BUILD_ARTEFACTS}") { // The deployment process uses this file to figure out the latest build s3Upload(file:'build_number.txt', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/latest") diff --git a/services/filestore/Makefile b/services/filestore/Makefile index d39712d9fe..db33518816 100644 --- a/services/filestore/Makefile +++ b/services/filestore/Makefile @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.10 +# Version: 1.1.12 BUILD_NUMBER ?= local BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) @@ -13,7 +13,6 @@ DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \ MOCHA_GREP=${MOCHA_GREP} \ docker-compose ${DOCKER_COMPOSE_FLAGS} - clean: docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) @@ -40,6 +39,9 @@ build: --tag gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ . +tar: + $(DOCKER_COMPOSE) up tar + publish: docker push $(DOCKER_REPO)/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) diff --git a/services/filestore/buildscript.txt b/services/filestore/buildscript.txt index 041e0b4745..0ba90519b3 100644 --- a/services/filestore/buildscript.txt +++ b/services/filestore/buildscript.txt @@ -1,9 +1,8 @@ ---script-version=1.1.10 filestore +--language=coffeescript --node-version=6.9.5 --acceptance-creds=None ---language=coffeescript --dependencies=mongo,redis --docker-repos=gcr.io/overleaf-ops ---kube=false --build-target=docker +--script-version=1.1.12 diff --git a/services/filestore/docker-compose.ci.yml b/services/filestore/docker-compose.ci.yml index 5ab90e1825..36b52f8f8b 100644 --- a/services/filestore/docker-compose.ci.yml +++ b/services/filestore/docker-compose.ci.yml @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.10 +# Version: 1.1.12 version: "2" @@ -11,6 +11,7 @@ services: user: node command: npm run test:unit:_run + test_acceptance: build: . image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER @@ -26,6 +27,16 @@ services: user: node command: npm run test:acceptance:_run + + + tar: + build: . + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + volumes: + - ./:/tmp/build/ + command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . + user: root + redis: image: redis diff --git a/services/filestore/docker-compose.yml b/services/filestore/docker-compose.yml index aeceafb3f3..8bb7857cb6 100644 --- a/services/filestore/docker-compose.yml +++ b/services/filestore/docker-compose.yml @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.10 +# Version: 1.1.12 version: "2" @@ -33,6 +33,16 @@ services: - redis command: npm run test:acceptance + + + tar: + build: . + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + volumes: + - ./:/tmp/build/ + command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . + user: root + redis: image: redis diff --git a/services/filestore/npm-shrinkwrap.json b/services/filestore/npm-shrinkwrap.json index 0de5dd3f0a..7caf791bae 100644 --- a/services/filestore/npm-shrinkwrap.json +++ b/services/filestore/npm-shrinkwrap.json @@ -29,6 +29,21 @@ "from": "@google-cloud/common@>=0.26.0 <0.27.0", "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.26.2.tgz" }, + "nan": { + "version": "2.12.1", + "from": "nan@>=2.11.1 <3.0.0", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.12.1.tgz" + }, + "readable-stream": { + "version": "3.1.1", + "from": "readable-stream@>=2.0.0 <3.0.0||>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.1.1.tgz" + }, + "string_decoder": { + "version": "1.2.0", + "from": "string_decoder@>=1.1.1 <2.0.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.2.0.tgz" + }, "through2": { "version": "3.0.0", "from": "through2@>=3.0.0 <4.0.0", @@ -47,14 +62,19 @@ "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-0.3.1.tgz" }, "@google-cloud/trace-agent": { - "version": "3.5.0", + "version": "3.5.2", "from": "@google-cloud/trace-agent@>=3.2.0 <4.0.0", - "resolved": "https://registry.npmjs.org/@google-cloud/trace-agent/-/trace-agent-3.5.0.tgz", + "resolved": "https://registry.npmjs.org/@google-cloud/trace-agent/-/trace-agent-3.5.2.tgz", "dependencies": { "@google-cloud/common": { - "version": "0.28.0", - "from": "@google-cloud/common@>=0.28.0 <0.29.0", - "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.28.0.tgz" + "version": "0.30.2", + "from": "@google-cloud/common@>=0.30.0 <0.31.0", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.30.2.tgz" + }, + "google-auth-library": { + "version": "3.0.1", + "from": "google-auth-library@>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-3.0.1.tgz" }, "uuid": { "version": "3.3.2", @@ -118,28 +138,6 @@ "from": "@sindresorhus/is@>=0.13.0 <0.14.0", "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-0.13.0.tgz" }, - "@sinonjs/commons": { - "version": "1.3.0", - "from": "@sinonjs/commons@>=1.2.0 <2.0.0", - "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-1.3.0.tgz" - }, - "@sinonjs/formatio": { - "version": "3.0.0", - "from": "@sinonjs/formatio@>=3.0.0 <4.0.0", - "resolved": "https://registry.npmjs.org/@sinonjs/formatio/-/formatio-3.0.0.tgz", - "dependencies": { - "@sinonjs/samsam": { - "version": "2.1.0", - "from": "@sinonjs/samsam@2.1.0", - "resolved": "https://registry.npmjs.org/@sinonjs/samsam/-/samsam-2.1.0.tgz" - } - } - }, - "@sinonjs/samsam": { - "version": "2.1.2", - "from": "@sinonjs/samsam@>=2.1.2 <3.0.0", - "resolved": "https://registry.npmjs.org/@sinonjs/samsam/-/samsam-2.1.2.tgz" - }, "@types/caseless": { "version": "0.12.1", "from": "@types/caseless@*", @@ -166,9 +164,9 @@ "resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.0.tgz" }, "@types/node": { - "version": "10.12.18", + "version": "10.12.20", "from": "@types/node@*", - "resolved": "https://registry.npmjs.org/@types/node/-/node-10.12.18.tgz" + "resolved": "https://registry.npmjs.org/@types/node/-/node-10.12.20.tgz" }, "@types/request": { "version": "2.48.1", @@ -181,14 +179,9 @@ "resolved": "https://registry.npmjs.org/@types/semver/-/semver-5.5.0.tgz" }, "@types/tough-cookie": { - "version": "2.3.4", + "version": "2.3.5", "from": "@types/tough-cookie@*", - "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-2.3.4.tgz" - }, - "abbrev": { - "version": "1.1.1", - "from": "abbrev@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz" + "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-2.3.5.tgz" }, "accept-encoding": { "version": "0.1.0", @@ -210,52 +203,30 @@ "from": "agent-base@>=4.1.0 <5.0.0", "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.2.1.tgz" }, - "ansi-regex": { - "version": "0.2.1", - "from": "ansi-regex@>=0.2.0 <0.3.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-0.2.1.tgz" - }, - "ansi-styles": { - "version": "1.1.0", - "from": "ansi-styles@>=1.1.0 <2.0.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-1.1.0.tgz" - }, - "argparse": { - "version": "0.1.16", - "from": "argparse@>=0.1.11 <0.2.0", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-0.1.16.tgz", - "dependencies": { - "underscore": { - "version": "1.7.0", - "from": "underscore@>=1.7.0 <1.8.0", - "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.7.0.tgz" - }, - "underscore.string": { - "version": "2.4.0", - "from": "underscore.string@>=2.4.0 <2.5.0", - "resolved": "https://registry.npmjs.org/underscore.string/-/underscore.string-2.4.0.tgz" - } - } + "ajv": { + "version": "6.7.0", + "from": "ajv@>=6.5.5 <7.0.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.7.0.tgz" }, "array-flatten": { "version": "1.1.1", "from": "array-flatten@1.1.1", "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz" }, - "array-from": { - "version": "2.1.1", - "from": "array-from@>=2.1.1 <3.0.0", - "resolved": "https://registry.npmjs.org/array-from/-/array-from-2.1.1.tgz" - }, "arrify": { "version": "1.0.1", "from": "arrify@>=1.0.1 <2.0.0", "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz" }, - "assertion-error": { - "version": "1.1.0", - "from": "assertion-error@>=1.1.0 <2.0.0", - "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz" + "asn1": { + "version": "0.2.4", + "from": "asn1@>=0.2.3 <0.3.0", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz" + }, + "assert-plus": { + "version": "1.0.0", + "from": "assert-plus@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz" }, "async": { "version": "0.2.10", @@ -267,10 +238,15 @@ "from": "async-listener@>=0.6.0 <0.7.0", "resolved": "https://registry.npmjs.org/async-listener/-/async-listener-0.6.10.tgz" }, + "asynckit": { + "version": "0.4.0", + "from": "asynckit@>=0.4.0 <0.5.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz" + }, "aws-sdk": { - "version": "2.366.0", + "version": "2.315.0", "from": "aws-sdk@>=2.1.39 <3.0.0", - "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.366.0.tgz", + "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.315.0.tgz", "dependencies": { "uuid": { "version": "3.1.0", @@ -284,10 +260,20 @@ "from": "aws-sign@>=0.2.0 <0.3.0", "resolved": "https://registry.npmjs.org/aws-sign/-/aws-sign-0.2.1.tgz" }, + "aws-sign2": { + "version": "0.7.0", + "from": "aws-sign2@>=0.7.0 <0.8.0", + "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz" + }, + "aws4": { + "version": "1.8.0", + "from": "aws4@>=1.8.0 <2.0.0", + "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.8.0.tgz" + }, "axios": { "version": "0.18.0", "from": "axios@>=0.18.0 <0.19.0", - "resolved": "http://registry.npmjs.org/axios/-/axios-0.18.0.tgz" + "resolved": "https://registry.npmjs.org/axios/-/axios-0.18.0.tgz" }, "balanced-match": { "version": "1.0.0", @@ -299,6 +285,11 @@ "from": "base64-js@>=1.0.2 <2.0.0", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.3.0.tgz" }, + "bcrypt-pbkdf": { + "version": "1.0.2", + "from": "bcrypt-pbkdf@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz" + }, "best-encoding": { "version": "0.1.1", "from": "best-encoding@>=0.1.1 <0.2.0", @@ -310,9 +301,9 @@ "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-7.2.1.tgz" }, "bindings": { - "version": "1.3.1", + "version": "1.4.0", "from": "bindings@>=1.2.1 <2.0.0", - "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.3.1.tgz" + "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.4.0.tgz" }, "bintrees": { "version": "1.0.1", @@ -322,24 +313,7 @@ "bl": { "version": "0.7.0", "from": "bl@>=0.7.0 <0.8.0", - "resolved": "https://registry.npmjs.org/bl/-/bl-0.7.0.tgz", - "dependencies": { - "isarray": { - "version": "0.0.1", - "from": "isarray@0.0.1", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" - }, - "readable-stream": { - "version": "1.0.34", - "from": "readable-stream@>=1.0.2 <1.1.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz" - }, - "string_decoder": { - "version": "0.10.31", - "from": "string_decoder@>=0.10.0 <0.11.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" - } - } + "resolved": "https://registry.npmjs.org/bl/-/bl-0.7.0.tgz" }, "body-parser": { "version": "1.18.3", @@ -391,41 +365,16 @@ "from": "caseless@>=0.3.0 <0.4.0", "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.3.0.tgz" }, - "chai": { - "version": "4.2.0", - "from": "chai@latest", - "resolved": "https://registry.npmjs.org/chai/-/chai-4.2.0.tgz" - }, - "chalk": { - "version": "0.5.1", - "from": "chalk@>=0.5.0 <0.6.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-0.5.1.tgz" - }, - "check-error": { - "version": "1.0.2", - "from": "check-error@>=1.0.2 <2.0.0", - "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz" - }, "coffee-script": { - "version": "1.7.1", - "from": "coffee-script@>=1.7.1 <1.8.0", - "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.7.1.tgz" - }, - "colors": { - "version": "0.6.2", - "from": "colors@>=0.6.2 <0.7.0", - "resolved": "https://registry.npmjs.org/colors/-/colors-0.6.2.tgz" + "version": "1.12.4", + "from": "coffee-script@1.12.4", + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.12.4.tgz" }, "combined-stream": { "version": "0.0.7", "from": "combined-stream@>=0.0.4 <0.1.0", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-0.0.7.tgz" }, - "commander": { - "version": "2.0.0", - "from": "commander@2.0.0", - "resolved": "http://registry.npmjs.org/commander/-/commander-2.0.0.tgz" - }, "concat-map": { "version": "0.0.1", "from": "concat-map@0.0.1", @@ -476,21 +425,16 @@ "from": "cryptiles@>=0.1.0 <0.2.0", "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-0.1.3.tgz" }, - "dateformat": { - "version": "1.0.2-1.2.3", - "from": "dateformat@1.0.2-1.2.3", - "resolved": "https://registry.npmjs.org/dateformat/-/dateformat-1.0.2-1.2.3.tgz" + "dashdash": { + "version": "1.14.1", + "from": "dashdash@>=1.12.0 <2.0.0", + "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz" }, "debug": { "version": "2.6.9", "from": "debug@2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz" }, - "deep-eql": { - "version": "3.0.1", - "from": "deep-eql@>=3.0.1 <4.0.0", - "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-3.0.1.tgz" - }, "delay": { "version": "4.1.0", "from": "delay@>=4.0.1 <5.0.0", @@ -511,11 +455,6 @@ "from": "destroy@>=1.0.4 <1.1.0", "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.0.4.tgz" }, - "diff": { - "version": "1.0.7", - "from": "diff@1.0.7", - "resolved": "https://registry.npmjs.org/diff/-/diff-1.0.7.tgz" - }, "dtrace-provider": { "version": "0.6.0", "from": "dtrace-provider@>=0.6.0 <0.7.0", @@ -525,7 +464,24 @@ "duplexify": { "version": "3.6.1", "from": "duplexify@>=3.6.0 <4.0.0", - "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.6.1.tgz" + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.6.1.tgz", + "dependencies": { + "readable-stream": { + "version": "2.3.6", + "from": "readable-stream@>=2.0.0 <3.0.0", + "resolved": "http://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz" + }, + "string_decoder": { + "version": "1.1.1", + "from": "string_decoder@>=1.1.1 <1.2.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz" + } + } + }, + "ecc-jsbn": { + "version": "0.1.2", + "from": "ecc-jsbn@>=0.1.1 <0.2.0", + "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz" }, "ecdsa-sig-formatter": { "version": "1.0.10", @@ -577,36 +533,58 @@ "from": "escape-string-regexp@>=1.0.0 <2.0.0", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz" }, - "esprima": { - "version": "1.0.4", - "from": "esprima@>=1.0.2 <1.1.0", - "resolved": "https://registry.npmjs.org/esprima/-/esprima-1.0.4.tgz" - }, "etag": { "version": "1.8.1", "from": "etag@>=1.8.1 <1.9.0", "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz" }, - "eventemitter2": { - "version": "0.4.14", - "from": "eventemitter2@>=0.4.13 <0.5.0", - "resolved": "http://registry.npmjs.org/eventemitter2/-/eventemitter2-0.4.14.tgz" - }, "events": { "version": "1.1.1", "from": "events@1.1.1", "resolved": "https://registry.npmjs.org/events/-/events-1.1.1.tgz" }, - "exit": { - "version": "0.1.2", - "from": "exit@>=0.1.1 <0.2.0", - "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz" - }, "express": { - "version": "4.16.4", + "version": "4.16.3", "from": "express@>=4.2.0 <5.0.0", - "resolved": "https://registry.npmjs.org/express/-/express-4.16.4.tgz", + "resolved": "https://registry.npmjs.org/express/-/express-4.16.3.tgz", "dependencies": { + "body-parser": { + "version": "1.18.2", + "from": "body-parser@1.18.2", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.18.2.tgz" + }, + "iconv-lite": { + "version": "0.4.19", + "from": "iconv-lite@0.4.19", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.19.tgz" + }, + "qs": { + "version": "6.5.1", + "from": "qs@6.5.1", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.1.tgz" + }, + "raw-body": { + "version": "2.3.2", + "from": "raw-body@2.3.2", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.3.2.tgz", + "dependencies": { + "depd": { + "version": "1.1.1", + "from": "depd@1.1.1", + "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.1.tgz" + }, + "http-errors": { + "version": "1.6.2", + "from": "http-errors@1.6.2", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.2.tgz" + }, + "setprototypeof": { + "version": "1.0.3", + "from": "setprototypeof@1.0.3", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.0.3.tgz" + } + } + }, "statuses": { "version": "1.4.0", "from": "statuses@>=1.4.0 <1.5.0", @@ -616,9 +594,34 @@ }, "extend": { "version": "3.0.2", - "from": "extend@>=3.0.1 <4.0.0", + "from": "extend@>=3.0.2 <3.1.0", "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz" }, + "extsprintf": { + "version": "1.3.0", + "from": "extsprintf@1.3.0", + "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz" + }, + "fast-deep-equal": { + "version": "2.0.1", + "from": "fast-deep-equal@>=2.0.1 <3.0.0", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz" + }, + "fast-json-stable-stringify": { + "version": "2.0.0", + "from": "fast-json-stable-stringify@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz" + }, + "fast-text-encoding": { + "version": "1.0.0", + "from": "fast-text-encoding@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/fast-text-encoding/-/fast-text-encoding-1.0.0.tgz" + }, + "file-uri-to-path": { + "version": "1.0.0", + "from": "file-uri-to-path@1.0.0", + "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz" + }, "finalhandler": { "version": "1.1.1", "from": "finalhandler@1.1.1", @@ -636,32 +639,10 @@ "from": "findit2@>=2.2.3 <3.0.0", "resolved": "https://registry.npmjs.org/findit2/-/findit2-2.2.3.tgz" }, - "findup-sync": { - "version": "0.1.3", - "from": "findup-sync@>=0.1.2 <0.2.0", - "resolved": "https://registry.npmjs.org/findup-sync/-/findup-sync-0.1.3.tgz", - "dependencies": { - "glob": { - "version": "3.2.11", - "from": "glob@>=3.2.9 <3.3.0", - "resolved": "https://registry.npmjs.org/glob/-/glob-3.2.11.tgz" - }, - "lodash": { - "version": "2.4.2", - "from": "lodash@>=2.4.1 <2.5.0", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-2.4.2.tgz" - }, - "minimatch": { - "version": "0.3.0", - "from": "minimatch@>=0.3.0 <0.4.0", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-0.3.0.tgz" - } - } - }, "follow-redirects": { - "version": "1.6.0", + "version": "1.6.1", "from": "follow-redirects@>=1.3.0 <2.0.0", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.6.0.tgz", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.6.1.tgz", "dependencies": { "debug": { "version": "3.1.0", @@ -708,24 +689,19 @@ "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz" }, "gaxios": { - "version": "1.0.4", - "from": "gaxios@>=1.0.2 <2.0.0", - "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-1.0.4.tgz" + "version": "1.2.7", + "from": "gaxios@>=1.0.4 <2.0.0", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-1.2.7.tgz" }, "gcp-metadata": { "version": "0.9.3", "from": "gcp-metadata@>=0.9.0 <0.10.0", "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.9.3.tgz" }, - "get-func-name": { - "version": "2.0.0", - "from": "get-func-name@>=2.0.0 <3.0.0", - "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz" - }, - "getobject": { - "version": "0.1.0", - "from": "getobject@>=0.1.0 <0.2.0", - "resolved": "https://registry.npmjs.org/getobject/-/getobject-0.1.0.tgz" + "getpass": { + "version": "0.1.7", + "from": "getpass@>=0.1.1 <0.2.0", + "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz" }, "gettemporaryfilepath": { "version": "0.0.1", @@ -747,11 +723,6 @@ "version": "0.7.0", "from": "gcp-metadata@>=0.7.0 <0.8.0", "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.7.0.tgz" - }, - "lru-cache": { - "version": "5.1.1", - "from": "lru-cache@>=5.0.0 <6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz" } } }, @@ -761,212 +732,31 @@ "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-1.0.3.tgz" }, "graceful-fs": { - "version": "4.1.15", + "version": "4.1.11", "from": "graceful-fs@>=4.1.2 <5.0.0", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.15.tgz" - }, - "growl": { - "version": "1.7.0", - "from": "growl@>=1.7.0 <1.8.0", - "resolved": "https://registry.npmjs.org/growl/-/growl-1.7.0.tgz" - }, - "grunt": { - "version": "0.4.5", - "from": "grunt@>=0.4.5 <0.5.0", - "resolved": "https://registry.npmjs.org/grunt/-/grunt-0.4.5.tgz", - "dependencies": { - "async": { - "version": "0.1.22", - "from": "async@>=0.1.22 <0.2.0", - "resolved": "https://registry.npmjs.org/async/-/async-0.1.22.tgz" - }, - "coffee-script": { - "version": "1.3.3", - "from": "coffee-script@>=1.3.3 <1.4.0", - "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.3.3.tgz" - }, - "glob": { - "version": "3.1.21", - "from": "glob@>=3.1.21 <3.2.0", - "resolved": "https://registry.npmjs.org/glob/-/glob-3.1.21.tgz" - }, - "graceful-fs": { - "version": "1.2.3", - "from": "graceful-fs@>=1.2.0 <1.3.0", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-1.2.3.tgz" - }, - "iconv-lite": { - "version": "0.2.11", - "from": "iconv-lite@>=0.2.11 <0.3.0", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.2.11.tgz" - }, - "inherits": { - "version": "1.0.2", - "from": "inherits@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-1.0.2.tgz" - }, - "minimatch": { - "version": "0.2.14", - "from": "minimatch@>=0.2.12 <0.3.0", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-0.2.14.tgz" - } - } - }, - "grunt-bunyan": { - "version": "0.5.0", - "from": "grunt-bunyan@>=0.5.0 <0.6.0", - "resolved": "https://registry.npmjs.org/grunt-bunyan/-/grunt-bunyan-0.5.0.tgz", - "dependencies": { - "lodash": { - "version": "2.4.2", - "from": "lodash@>=2.4.1 <2.5.0", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-2.4.2.tgz" - } - } - }, - "grunt-contrib-clean": { - "version": "0.6.0", - "from": "grunt-contrib-clean@>=0.6.0 <0.7.0", - "resolved": "https://registry.npmjs.org/grunt-contrib-clean/-/grunt-contrib-clean-0.6.0.tgz" - }, - "grunt-contrib-coffee": { - "version": "0.11.1", - "from": "grunt-contrib-coffee@>=0.11.0 <0.12.0", - "resolved": "https://registry.npmjs.org/grunt-contrib-coffee/-/grunt-contrib-coffee-0.11.1.tgz", - "dependencies": { - "lodash": { - "version": "2.4.2", - "from": "lodash@>=2.4.1 <2.5.0", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-2.4.2.tgz" - } - } - }, - "grunt-execute": { - "version": "0.2.2", - "from": "grunt-execute@>=0.2.2 <0.3.0", - "resolved": "https://registry.npmjs.org/grunt-execute/-/grunt-execute-0.2.2.tgz" - }, - "grunt-legacy-log": { - "version": "0.1.3", - "from": "grunt-legacy-log@>=0.1.0 <0.2.0", - "resolved": "https://registry.npmjs.org/grunt-legacy-log/-/grunt-legacy-log-0.1.3.tgz", - "dependencies": { - "lodash": { - "version": "2.4.2", - "from": "lodash@>=2.4.1 <2.5.0", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-2.4.2.tgz" - }, - "underscore.string": { - "version": "2.3.3", - "from": "underscore.string@>=2.3.3 <2.4.0", - "resolved": "https://registry.npmjs.org/underscore.string/-/underscore.string-2.3.3.tgz" - } - } - }, - "grunt-legacy-log-utils": { - "version": "0.1.1", - "from": "grunt-legacy-log-utils@>=0.1.1 <0.2.0", - "resolved": "https://registry.npmjs.org/grunt-legacy-log-utils/-/grunt-legacy-log-utils-0.1.1.tgz", - "dependencies": { - "lodash": { - "version": "2.4.2", - "from": "lodash@>=2.4.1 <2.5.0", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-2.4.2.tgz" - }, - "underscore.string": { - "version": "2.3.3", - "from": "underscore.string@>=2.3.3 <2.4.0", - "resolved": "https://registry.npmjs.org/underscore.string/-/underscore.string-2.3.3.tgz" - } - } - }, - "grunt-legacy-util": { - "version": "0.2.0", - "from": "grunt-legacy-util@>=0.2.0 <0.3.0", - "resolved": "https://registry.npmjs.org/grunt-legacy-util/-/grunt-legacy-util-0.2.0.tgz", - "dependencies": { - "async": { - "version": "0.1.22", - "from": "async@>=0.1.22 <0.2.0", - "resolved": "https://registry.npmjs.org/async/-/async-0.1.22.tgz" - } - } - }, - "grunt-mocha-test": { - "version": "0.11.0", - "from": "grunt-mocha-test@>=0.11.0 <0.12.0", - "resolved": "https://registry.npmjs.org/grunt-mocha-test/-/grunt-mocha-test-0.11.0.tgz", - "dependencies": { - "fs-extra": { - "version": "0.9.1", - "from": "fs-extra@>=0.9.1 <0.10.0", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-0.9.1.tgz" - }, - "glob": { - "version": "3.2.3", - "from": "glob@3.2.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-3.2.3.tgz" - }, - "graceful-fs": { - "version": "2.0.3", - "from": "graceful-fs@>=2.0.0 <2.1.0", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-2.0.3.tgz" - }, - "jsonfile": { - "version": "1.1.1", - "from": "jsonfile@>=1.1.0 <1.2.0", - "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-1.1.1.tgz" - }, - "minimatch": { - "version": "0.2.14", - "from": "minimatch@>=0.2.11 <0.3.0", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-0.2.14.tgz" - }, - "mkdirp": { - "version": "0.5.1", - "from": "mkdirp@>=0.5.0 <0.6.0", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz" - }, - "mocha": { - "version": "1.20.1", - "from": "mocha@>=1.20.0 <1.21.0", - "resolved": "https://registry.npmjs.org/mocha/-/mocha-1.20.1.tgz", - "dependencies": { - "mkdirp": { - "version": "0.3.5", - "from": "mkdirp@0.3.5", - "resolved": "http://registry.npmjs.org/mkdirp/-/mkdirp-0.3.5.tgz" - } - } - }, - "ncp": { - "version": "0.5.1", - "from": "ncp@>=0.5.1 <0.6.0", - "resolved": "https://registry.npmjs.org/ncp/-/ncp-0.5.1.tgz" - } - } + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.11.tgz" }, "gtoken": { - "version": "2.3.0", + "version": "2.3.2", "from": "gtoken@>=2.3.0 <3.0.0", - "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-2.3.0.tgz", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-2.3.2.tgz", "dependencies": { "mime": { "version": "2.4.0", "from": "mime@>=2.2.0 <3.0.0", "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.0.tgz" - }, - "pify": { - "version": "3.0.0", - "from": "pify@>=3.0.0 <4.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz" } } }, - "has-ansi": { - "version": "0.1.0", - "from": "has-ansi@>=0.1.0 <0.2.0", - "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-0.1.0.tgz" + "har-schema": { + "version": "2.0.0", + "from": "har-schema@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz" + }, + "har-validator": { + "version": "5.1.3", + "from": "har-validator@>=5.1.0 <5.2.0", + "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.3.tgz" }, "has-flag": { "version": "3.0.0", @@ -984,30 +774,30 @@ "resolved": "https://registry.npmjs.org/he/-/he-1.1.1.tgz" }, "heapdump": { - "version": "0.3.12", + "version": "0.3.9", "from": "heapdump@>=0.3.2 <0.4.0", - "resolved": "https://registry.npmjs.org/heapdump/-/heapdump-0.3.12.tgz" + "resolved": "https://registry.npmjs.org/heapdump/-/heapdump-0.3.9.tgz" }, "hex2dec": { - "version": "1.1.1", + "version": "1.1.2", "from": "hex2dec@>=1.0.1 <2.0.0", - "resolved": "https://registry.npmjs.org/hex2dec/-/hex2dec-1.1.1.tgz" + "resolved": "https://registry.npmjs.org/hex2dec/-/hex2dec-1.1.2.tgz" }, "hoek": { "version": "0.7.6", "from": "hoek@>=0.7.0 <0.8.0", "resolved": "https://registry.npmjs.org/hoek/-/hoek-0.7.6.tgz" }, - "hooker": { - "version": "0.2.3", - "from": "hooker@>=0.2.3 <0.3.0", - "resolved": "https://registry.npmjs.org/hooker/-/hooker-0.2.3.tgz" - }, "http-errors": { "version": "1.6.3", "from": "http-errors@>=1.6.3 <1.7.0", "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz" }, + "http-signature": { + "version": "1.2.0", + "from": "http-signature@>=1.2.0 <1.3.0", + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz" + }, "https-proxy-agent": { "version": "2.2.1", "from": "https-proxy-agent@>=2.2.1 <3.0.0", @@ -1060,43 +850,46 @@ "from": "is-buffer@>=1.1.5 <2.0.0", "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz" }, + "is-typedarray": { + "version": "1.0.0", + "from": "is-typedarray@>=1.0.0 <1.1.0", + "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz" + }, "isarray": { "version": "1.0.0", "from": "isarray@>=1.0.0 <2.0.0", "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz" }, - "jade": { - "version": "0.26.3", - "from": "jade@0.26.3", - "resolved": "https://registry.npmjs.org/jade/-/jade-0.26.3.tgz", - "dependencies": { - "commander": { - "version": "0.6.1", - "from": "commander@0.6.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-0.6.1.tgz" - }, - "mkdirp": { - "version": "0.3.0", - "from": "mkdirp@0.3.0", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.0.tgz" - } - } + "isstream": { + "version": "0.1.2", + "from": "isstream@>=0.1.0 <0.2.0", + "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz" }, "jmespath": { "version": "0.15.0", "from": "jmespath@0.15.0", "resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.15.0.tgz" }, - "js-yaml": { - "version": "2.0.5", - "from": "js-yaml@>=2.0.5 <2.1.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-2.0.5.tgz" + "jsbn": { + "version": "0.1.1", + "from": "jsbn@>=0.1.0 <0.2.0", + "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz" }, "json-bigint": { "version": "0.3.0", "from": "json-bigint@>=0.3.0 <0.4.0", "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-0.3.0.tgz" }, + "json-schema": { + "version": "0.2.3", + "from": "json-schema@0.2.3", + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz" + }, + "json-schema-traverse": { + "version": "0.4.1", + "from": "json-schema-traverse@>=0.4.1 <0.5.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz" + }, "json-stringify-safe": { "version": "5.0.1", "from": "json-stringify-safe@5.0.1", @@ -1107,20 +900,20 @@ "from": "jsonfile@>=2.1.0 <3.0.0", "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-2.4.0.tgz" }, - "just-extend": { - "version": "3.0.0", - "from": "just-extend@>=3.0.0 <4.0.0", - "resolved": "https://registry.npmjs.org/just-extend/-/just-extend-3.0.0.tgz" + "jsprim": { + "version": "1.4.1", + "from": "jsprim@>=1.2.2 <2.0.0", + "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz" }, "jwa": { - "version": "1.1.6", - "from": "jwa@>=1.1.5 <2.0.0", - "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.1.6.tgz" + "version": "1.2.0", + "from": "jwa@>=1.2.0 <2.0.0", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.2.0.tgz" }, "jws": { - "version": "3.1.5", + "version": "3.2.1", "from": "jws@>=3.1.5 <4.0.0", - "resolved": "https://registry.npmjs.org/jws/-/jws-3.1.5.tgz" + "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.1.tgz" }, "klaw": { "version": "1.3.1", @@ -1139,47 +932,77 @@ } } }, - "lodash": { - "version": "0.9.2", - "from": "lodash@>=0.9.2 <0.10.0", - "resolved": "http://registry.npmjs.org/lodash/-/lodash-0.9.2.tgz" - }, - "lodash.get": { - "version": "4.4.2", - "from": "lodash.get@>=4.4.2 <5.0.0", - "resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz" - }, "lodash.pickby": { "version": "4.6.0", "from": "lodash.pickby@>=4.6.0 <5.0.0", "resolved": "https://registry.npmjs.org/lodash.pickby/-/lodash.pickby-4.6.0.tgz" }, "logger-sharelatex": { - "version": "1.5.9", - "from": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.9", - "resolved": "git+https://github.com/sharelatex/logger-sharelatex.git#e8e1b95052f62e107336053e4a983f81cdbdf589", + "version": "1.6.0", + "from": "logger-sharelatex@1.6.0", + "resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-1.6.0.tgz", "dependencies": { - "coffee-script": { - "version": "1.12.4", - "from": "coffee-script@1.12.4", - "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.12.4.tgz" + "caseless": { + "version": "0.12.0", + "from": "caseless@>=0.12.0 <0.13.0", + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz" + }, + "combined-stream": { + "version": "1.0.7", + "from": "combined-stream@>=1.0.6 <1.1.0", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.7.tgz" + }, + "delayed-stream": { + "version": "1.0.0", + "from": "delayed-stream@>=1.0.0 <1.1.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz" + }, + "forever-agent": { + "version": "0.6.1", + "from": "forever-agent@>=0.6.1 <0.7.0", + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz" + }, + "form-data": { + "version": "2.3.3", + "from": "form-data@>=2.3.2 <2.4.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz" + }, + "oauth-sign": { + "version": "0.9.0", + "from": "oauth-sign@>=0.9.0 <0.10.0", + "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz" + }, + "request": { + "version": "2.88.0", + "from": "request@>=2.88.0 <3.0.0", + "resolved": "https://registry.npmjs.org/request/-/request-2.88.0.tgz" + }, + "safe-buffer": { + "version": "5.1.2", + "from": "safe-buffer@>=5.1.2 <6.0.0", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz" + }, + "tunnel-agent": { + "version": "0.6.0", + "from": "tunnel-agent@>=0.6.0 <0.7.0", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz" + }, + "uuid": { + "version": "3.3.2", + "from": "uuid@>=3.3.2 <4.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz" } } }, - "lolex": { - "version": "3.0.0", - "from": "lolex@>=3.0.0 <4.0.0", - "resolved": "https://registry.npmjs.org/lolex/-/lolex-3.0.0.tgz" - }, "long": { "version": "4.0.0", "from": "long@>=4.0.0 <5.0.0", "resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz" }, "lru-cache": { - "version": "2.7.3", - "from": "lru-cache@>=2.0.0 <3.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.7.3.tgz" + "version": "5.1.1", + "from": "lru-cache@>=5.0.0 <6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz" }, "lsmod": { "version": "1.0.0", @@ -1212,9 +1035,9 @@ "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz" }, "metrics-sharelatex": { - "version": "2.0.12", - "from": "git+https://github.com/sharelatex/metrics-sharelatex.git#v2.0.12", - "resolved": "git+https://github.com/sharelatex/metrics-sharelatex.git#3ac1621ef049e2f2d88a83b3a41011333d609662", + "version": "2.1.1", + "from": "metrics-sharelatex@2.1.1", + "resolved": "https://registry.npmjs.org/metrics-sharelatex/-/metrics-sharelatex-2.1.1.tgz", "dependencies": { "coffee-script": { "version": "1.6.0", @@ -1234,14 +1057,14 @@ "resolved": "https://registry.npmjs.org/mime/-/mime-1.4.1.tgz" }, "mime-db": { - "version": "1.37.0", - "from": "mime-db@>=1.37.0 <1.38.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.37.0.tgz" + "version": "1.36.0", + "from": "mime-db@>=1.36.0 <1.37.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.36.0.tgz" }, "mime-types": { - "version": "2.1.21", + "version": "2.1.20", "from": "mime-types@>=2.1.18 <2.2.0", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.21.tgz" + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.20.tgz" }, "minimatch": { "version": "3.0.4", @@ -1253,11 +1076,6 @@ "from": "minimist@0.0.8", "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz" }, - "mkdirp": { - "version": "0.3.5", - "from": "mkdirp@>=0.3.5 <0.4.0", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.5.tgz" - }, "mocha": { "version": "5.2.0", "from": "mocha@5.2.0", @@ -1331,9 +1149,10 @@ } }, "nan": { - "version": "2.11.1", - "from": "nan@>=2.11.1 <3.0.0", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.11.1.tgz" + "version": "2.11.0", + "from": "nan@>=2.0.8 <3.0.0", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.11.0.tgz", + "optional": true }, "ncp": { "version": "2.0.0", @@ -1346,28 +1165,6 @@ "from": "negotiator@0.6.1", "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.1.tgz" }, - "nise": { - "version": "1.4.6", - "from": "nise@>=1.4.6 <2.0.0", - "resolved": "https://registry.npmjs.org/nise/-/nise-1.4.6.tgz", - "dependencies": { - "isarray": { - "version": "0.0.1", - "from": "isarray@0.0.1", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" - }, - "lolex": { - "version": "2.7.5", - "from": "lolex@>=2.3.2 <3.0.0", - "resolved": "https://registry.npmjs.org/lolex/-/lolex-2.7.5.tgz" - }, - "path-to-regexp": { - "version": "1.7.0", - "from": "path-to-regexp@>=1.7.0 <2.0.0", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.7.0.tgz" - } - } - }, "node-fetch": { "version": "2.3.0", "from": "node-fetch@>=2.2.0 <3.0.0", @@ -1415,11 +1212,6 @@ "from": "node-uuid@>=1.4.1 <1.5.0", "resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.8.tgz" }, - "nopt": { - "version": "1.0.10", - "from": "nopt@>=1.0.10 <1.1.0", - "resolved": "https://registry.npmjs.org/nopt/-/nopt-1.0.10.tgz" - }, "oauth-sign": { "version": "0.2.0", "from": "oauth-sign@>=0.2.0 <0.3.0", @@ -1475,10 +1267,10 @@ "from": "path-to-regexp@0.1.7", "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz" }, - "pathval": { - "version": "1.1.0", - "from": "pathval@>=1.1.0 <2.0.0", - "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.0.tgz" + "performance-now": { + "version": "2.1.0", + "from": "performance-now@>=2.1.0 <3.0.0", + "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz" }, "pify": { "version": "4.0.1", @@ -1512,9 +1304,14 @@ }, "proxy-addr": { "version": "2.0.4", - "from": "proxy-addr@>=2.0.4 <2.1.0", + "from": "proxy-addr@>=2.0.3 <2.1.0", "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.4.tgz" }, + "psl": { + "version": "1.1.31", + "from": "psl@>=1.1.24 <2.0.0", + "resolved": "https://registry.npmjs.org/psl/-/psl-1.1.31.tgz" + }, "punycode": { "version": "1.3.2", "from": "punycode@1.3.2", @@ -1553,9 +1350,16 @@ "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.3.3.tgz" }, "readable-stream": { - "version": "2.3.6", - "from": "readable-stream@>=2.0.0 <3.0.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz" + "version": "1.0.34", + "from": "readable-stream@>=1.0.2 <1.1.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz", + "dependencies": { + "isarray": { + "version": "0.0.1", + "from": "isarray@0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + } + } }, "recluster": { "version": "0.3.7", @@ -1601,12 +1405,13 @@ "require-like": { "version": "0.1.2", "from": "require-like@0.1.2", - "resolved": "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz" + "resolved": "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz", + "dev": true }, "resolve": { - "version": "1.9.0", + "version": "1.10.0", "from": "resolve@>=1.5.0 <2.0.0", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.9.0.tgz" + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.10.0.tgz" }, "response": { "version": "0.14.0", @@ -1622,7 +1427,7 @@ }, "retry-axios": { "version": "0.3.2", - "from": "retry-axios@>=0.3.2 <0.4.0", + "from": "retry-axios@0.3.2", "resolved": "https://registry.npmjs.org/retry-axios/-/retry-axios-0.3.2.tgz" }, "retry-request": { @@ -1636,9 +1441,9 @@ "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.2.8.tgz" }, "safe-buffer": { - "version": "5.1.2", - "from": "safe-buffer@5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz" + "version": "5.1.1", + "from": "safe-buffer@5.1.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.1.tgz" }, "safe-json-stringify": { "version": "1.2.0", @@ -1654,7 +1459,8 @@ "sandboxed-module": { "version": "2.0.3", "from": "sandboxed-module@latest", - "resolved": "https://registry.npmjs.org/sandboxed-module/-/sandboxed-module-2.0.3.tgz" + "resolved": "https://registry.npmjs.org/sandboxed-module/-/sandboxed-module-2.0.3.tgz", + "dev": true }, "sax": { "version": "1.2.1", @@ -1690,8 +1496,8 @@ }, "settings-sharelatex": { "version": "1.1.0", - "from": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.1.0", - "resolved": "git+https://github.com/sharelatex/settings-sharelatex.git#93f63d029b52fef8825c3a401b2b6a7ba29b4750", + "from": "settings-sharelatex@1.1.0", + "resolved": "https://registry.npmjs.org/settings-sharelatex/-/settings-sharelatex-1.1.0.tgz", "dependencies": { "coffee-script": { "version": "1.6.0", @@ -1701,31 +1507,9 @@ } }, "shimmer": { - "version": "1.2.0", + "version": "1.2.1", "from": "shimmer@>=1.2.0 <2.0.0", - "resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.2.0.tgz" - }, - "sigmund": { - "version": "1.0.1", - "from": "sigmund@>=1.0.0 <1.1.0", - "resolved": "https://registry.npmjs.org/sigmund/-/sigmund-1.0.1.tgz" - }, - "sinon": { - "version": "7.1.1", - "from": "sinon@latest", - "resolved": "https://registry.npmjs.org/sinon/-/sinon-7.1.1.tgz", - "dependencies": { - "diff": { - "version": "3.5.0", - "from": "diff@>=3.5.0 <4.0.0", - "resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz" - }, - "supports-color": { - "version": "5.5.0", - "from": "supports-color@>=5.5.0 <6.0.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz" - } - } + "resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.2.1.tgz" }, "sntp": { "version": "0.1.4", @@ -1742,6 +1526,11 @@ "from": "split@>=1.0.0 <2.0.0", "resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz" }, + "sshpk": { + "version": "1.16.1", + "from": "sshpk@>=1.7.0 <2.0.0", + "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz" + }, "stack-trace": { "version": "0.0.9", "from": "stack-trace@0.0.9", @@ -1760,7 +1549,19 @@ "stream-browserify": { "version": "2.0.1", "from": "stream-browserify@>=2.0.1 <3.0.0", - "resolved": "https://registry.npmjs.org/stream-browserify/-/stream-browserify-2.0.1.tgz" + "resolved": "https://registry.npmjs.org/stream-browserify/-/stream-browserify-2.0.1.tgz", + "dependencies": { + "readable-stream": { + "version": "2.3.6", + "from": "readable-stream@>=2.0.2 <3.0.0", + "resolved": "http://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz" + }, + "string_decoder": { + "version": "1.1.1", + "from": "string_decoder@>=1.1.1 <1.2.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz" + } + } }, "stream-buffers": { "version": "0.2.6", @@ -1778,19 +1579,9 @@ "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.0.tgz" }, "string_decoder": { - "version": "1.1.1", - "from": "string_decoder@>=1.1.1 <1.2.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz" - }, - "strip-ansi": { - "version": "0.3.0", - "from": "strip-ansi@>=0.3.0 <0.4.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-0.3.0.tgz" - }, - "supports-color": { - "version": "0.2.0", - "from": "supports-color@>=0.2.0 <0.3.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-0.2.0.tgz" + "version": "0.10.31", + "from": "string_decoder@>=0.10.0 <0.11.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" }, "symbol-observable": { "version": "1.2.0", @@ -1804,21 +1595,16 @@ }, "teeny-request": { "version": "3.11.3", - "from": "teeny-request@>=3.11.1 <4.0.0", + "from": "teeny-request@>=3.6.0 <4.0.0", "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-3.11.3.tgz", "dependencies": { "uuid": { "version": "3.3.2", - "from": "uuid@>=3.3.2 <4.0.0", + "from": "uuid@^3.3.2", "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz" } } }, - "text-encoding": { - "version": "0.6.4", - "from": "text-encoding@>=0.6.4 <0.7.0", - "resolved": "https://registry.npmjs.org/text-encoding/-/text-encoding-0.6.4.tgz" - }, "through": { "version": "2.3.8", "from": "through@>=2.0.0 <3.0.0", @@ -1827,22 +1613,41 @@ "through2": { "version": "2.0.5", "from": "through2@>=2.0.3 <3.0.0", - "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz" + "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", + "dependencies": { + "readable-stream": { + "version": "2.3.6", + "from": "readable-stream@^2.1.5", + "resolved": "http://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz" + }, + "string_decoder": { + "version": "1.1.1", + "from": "string_decoder@~1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz" + } + } }, - "timekeeper": { - "version": "1.0.0", - "from": "timekeeper@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/timekeeper/-/timekeeper-1.0.0.tgz" + "tough-cookie": { + "version": "2.4.3", + "from": "tough-cookie@>=2.4.3 <2.5.0", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.4.3.tgz", + "dependencies": { + "punycode": { + "version": "1.4.1", + "from": "punycode@>=1.4.1 <2.0.0", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz" + } + } }, "tunnel-agent": { "version": "0.2.0", "from": "tunnel-agent@>=0.2.0 <0.3.0", "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.2.0.tgz" }, - "type-detect": { - "version": "4.0.8", - "from": "type-detect@>=4.0.5 <5.0.0", - "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz" + "tweetnacl": { + "version": "0.14.5", + "from": "tweetnacl@>=0.14.0 <0.15.0", + "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz" }, "type-is": { "version": "1.6.16", @@ -1854,16 +1659,23 @@ "from": "underscore@>=1.5.2 <1.6.0", "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.5.2.tgz" }, - "underscore.string": { - "version": "2.2.1", - "from": "underscore.string@>=2.2.1 <2.3.0", - "resolved": "https://registry.npmjs.org/underscore.string/-/underscore.string-2.2.1.tgz" - }, "unpipe": { "version": "1.0.0", "from": "unpipe@1.0.0", "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz" }, + "uri-js": { + "version": "4.2.2", + "from": "uri-js@>=4.2.2 <5.0.0", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.2.2.tgz", + "dependencies": { + "punycode": { + "version": "2.1.1", + "from": "punycode@>=2.1.0 <3.0.0", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz" + } + } + }, "url": { "version": "0.10.3", "from": "url@0.10.3", @@ -1884,10 +1696,10 @@ "from": "vary@>=1.1.2 <1.2.0", "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz" }, - "which": { - "version": "1.0.9", - "from": "which@>=1.0.5 <1.1.0", - "resolved": "https://registry.npmjs.org/which/-/which-1.0.9.tgz" + "verror": { + "version": "1.10.0", + "from": "verror@1.10.0", + "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz" }, "wrappy": { "version": "1.0.2", diff --git a/services/filestore/package.json b/services/filestore/package.json index 158f653aee..a9163666cd 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -25,13 +25,12 @@ "async": "~0.2.10", "aws-sdk": "^2.1.39", "body-parser": "^1.2.0", - "coffee-script": "~1.7.1", "express": "^4.2.0", "fs-extra": "^1.0.0", "heapdump": "^0.3.2", "knox": "~0.9.1", - "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.9", - "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v2.0.12", + "logger-sharelatex": "^1.6.0", + "metrics-sharelatex": "^2.1.1", "mocha": "5.2.0", "node-transloadit": "0.0.4", "node-uuid": "~1.4.1", @@ -41,7 +40,7 @@ "request": "2.14.0", "response": "0.14.0", "rimraf": "2.2.8", - "settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.1.0", + "settings-sharelatex": "^1.1.0", "stream-browserify": "^2.0.1", "stream-buffers": "~0.2.5", "underscore": "~1.5.2" From ca9231b04ef64e25e9dd7306e82c2d5bc5fea864 Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Tue, 5 Feb 2019 11:19:02 +0000 Subject: [PATCH 292/555] set enable conversions to false by default but enable in compse --- services/filestore/config/settings.defaults.coffee | 2 +- services/filestore/docker-compose.ci.yml | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/services/filestore/config/settings.defaults.coffee b/services/filestore/config/settings.defaults.coffee index 7c5b01ef90..77bc1bfb34 100644 --- a/services/filestore/config/settings.defaults.coffee +++ b/services/filestore/config/settings.defaults.coffee @@ -50,7 +50,7 @@ settings = # Any commands to wrap the convert utility in, for example ["nice"], or ["firejail", "--profile=/etc/firejail/convert.profile"] convertCommandPrefix: [] - enableConversions: false + enableConversions: if process.env['ENABLE_CONVERSIONS'] == 'true' then true else false # Filestore health check # ---------------------- diff --git a/services/filestore/docker-compose.ci.yml b/services/filestore/docker-compose.ci.yml index 36b52f8f8b..5a7200e4f1 100644 --- a/services/filestore/docker-compose.ci.yml +++ b/services/filestore/docker-compose.ci.yml @@ -21,6 +21,7 @@ services: MONGO_HOST: mongo POSTGRES_HOST: postgres MOCHA_GREP: ${MOCHA_GREP} + ENABLE_CONVERSIONS: true depends_on: - mongo - redis From 3fccf79ca802828f112835c8a1c7f647e1ac1bcc Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Tue, 5 Feb 2019 11:32:02 +0000 Subject: [PATCH 293/555] cleanup --- services/filestore/app.coffee | 2 +- .../app/coffee/FSPersistorManager.coffee | 6 +-- .../app/coffee/HealthCheckController.coffee | 1 - .../app/coffee/S3PersistorManager.coffee | 3 +- services/filestore/docker-compose.ci.yml | 2 +- .../coffee/FSPersistorManagerTests.coffee | 27 +++++++----- .../coffee/S3PersistorManagerTests.coffee | 44 +++++++++++-------- 7 files changed, 48 insertions(+), 37 deletions(-) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index c1484142b1..b4fff5b698 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -94,7 +94,7 @@ app.post "/shutdown", (req, res)-> app.get '/status', (req, res)-> if appIsOk - res.send('filestore sharelatex up - hello james') + res.send('filestore sharelatex up') else logger.log "app is not ok - shutting down" res.send("server is being shut down", 500) diff --git a/services/filestore/app/coffee/FSPersistorManager.coffee b/services/filestore/app/coffee/FSPersistorManager.coffee index 733202e4cd..353e2ef099 100644 --- a/services/filestore/app/coffee/FSPersistorManager.coffee +++ b/services/filestore/app/coffee/FSPersistorManager.coffee @@ -49,13 +49,13 @@ module.exports = sourceStream.on 'error', (err) -> logger.err err:err, location:location, name:name, "Error reading from file" if err.code == 'ENOENT' - callback new Errors.NotFoundError(err.message), null + return callback new Errors.NotFoundError(err.message), null else - callback err, null + return callback err, null sourceStream.on 'readable', () -> # This can be called multiple times, but the callback wrapper # ensures the callback is only called once - callback null, sourceStream + return callback null, sourceStream copyFile: (location, fromName, toName, callback = (err)->)-> diff --git a/services/filestore/app/coffee/HealthCheckController.coffee b/services/filestore/app/coffee/HealthCheckController.coffee index 7b7a80bfc8..db3f111c5e 100644 --- a/services/filestore/app/coffee/HealthCheckController.coffee +++ b/services/filestore/app/coffee/HealthCheckController.coffee @@ -15,7 +15,6 @@ checkCanStoreFiles = (callback)-> req = {params:{}, query:{}, headers:{}} req.params.project_id = settings.health_check.project_id req.params.file_id = settings.health_check.file_id - console.log settings myWritableStreamBuffer = new streamBuffers.WritableStreamBuffer(initialSize: 100) res = { send: (code) -> diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index 0c55f8b0e6..2b183730d6 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -91,7 +91,8 @@ module.exports = else if res.statusCode not in [200, 206] logger.log bucketName:bucketName, key:key, "error getting file from s3: #{res.statusCode}" return callback new Error("Got non-200 response from S3: #{res.statusCode}"), null - callback null, res + else + return callback null, res s3Stream.on 'error', (err) -> logger.err err:err, bucketName:bucketName, key:key, "error getting file stream from s3" callback err diff --git a/services/filestore/docker-compose.ci.yml b/services/filestore/docker-compose.ci.yml index 5a7200e4f1..e7ac6e84a7 100644 --- a/services/filestore/docker-compose.ci.yml +++ b/services/filestore/docker-compose.ci.yml @@ -21,7 +21,7 @@ services: MONGO_HOST: mongo POSTGRES_HOST: postgres MOCHA_GREP: ${MOCHA_GREP} - ENABLE_CONVERSIONS: true + ENABLE_CONVERSIONS: "true" depends_on: - mongo - redis diff --git a/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee b/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee index c4a7d83d06..7df7adfd8b 100644 --- a/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee @@ -34,6 +34,8 @@ describe "FSPersistorManagerTests", -> err:-> "response":response "rimraf":@Rimraf + "./Errors": @Errors = + NotFoundError: sinon.stub() @location = "/tmp" @name1 = "530f2407e7ef165704000007/530f838b46d9a9e859000008" @name1Filtered ="530f2407e7ef165704000007_530f838b46d9a9e859000008" @@ -119,32 +121,33 @@ describe "FSPersistorManagerTests", -> describe "error conditions", -> - beforeEach -> - @fakeCode = 'ENOENT' - @Fs.createReadStream.returns( - on: (key, callback) => - err = new Error() - err.message = "this is from a test" - err.code = @fakeCode - callback(err, null) - ) - describe "when the file does not exist", -> beforeEach -> @fakeCode = 'ENOENT' - + @Fs.createReadStream.returns( + on: (key, callback) => + err = new Error() + err.code = @fakeCode + callback(err, null) + ) it "should give a NotFoundError", (done) -> @FSPersistorManager.getFileStream @location, @name1, @opts, (err,res)=> expect(res).to.equal null expect(err).to.not.equal null - expect(err.name == "NotFoundError").to.equal true + expect(err instanceof @Errors.NotFoundError).to.equal true done() describe "when some other error happens", -> beforeEach -> @fakeCode = 'SOMETHINGHORRIBLE' + @Fs.createReadStream.returns( + on: (key, callback) => + err = new Error() + err.code = @fakeCode + callback(err, null) + ) it "should give an Error", (done) -> @FSPersistorManager.getFileStream @location, @name1, @opts, (err,res)=> diff --git a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee index e552886995..1eee09dc29 100644 --- a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee @@ -39,6 +39,8 @@ describe "S3PersistorManagerTests", -> "logger-sharelatex": log:-> err:-> + "./Errors": @Errors = + NotFoundError: sinon.stub() @key = "my/key" @bucketName = "my-bucket" @error = "my errror" @@ -108,44 +110,50 @@ describe "S3PersistorManagerTests", -> describe "error conditions", -> - beforeEach -> - @fakeResponse = - statusCode: 500 - @stubbedKnoxClient.get.returns( - on: (key, callback) => - if key == 'response' - callback(@fakeResponse) - end: -> - ) - describe "when the file doesn't exist", -> beforeEach -> - @bucketName = "mybucket" - @key = "somekey" - @fakeResponse.statusCode = 404 + @fakeResponse = + statusCode: 404 + @stubbedKnoxClient.get.returns( + on: (key, callback) => + if key == 'response' + callback(@fakeResponse) + end: -> + ) it "should produce a NotFoundError", (done) -> @S3PersistorManager.getFileStream @bucketName, @key, @opts, (err, stream)=> # empty callback expect(stream).to.equal null expect(err).to.not.equal null - expect(err.name == "NotFoundError").to.equal true + expect(err instanceof @Errors.NotFoundError).to.equal true done() it "should have bucket and key in the Error message", (done) -> @S3PersistorManager.getFileStream @bucketName, @key, @opts, (err, stream)=> # empty callback - expect(err.message).to.not.equal null - err.message.should.match(new RegExp(".*#{@bucketName}.*")) - err.message.should.match(new RegExp(".*#{@key}.*")) + error_message = @Errors.NotFoundError.lastCall.args[0] + expect(error_message).to.not.equal null + error_message.should.match(new RegExp(".*#{@bucketName}.*")) + error_message.should.match(new RegExp(".*#{@key}.*")) done() describe "when the S3 service produces an error", -> + beforeEach -> + @fakeResponse = + statusCode: 500 + @stubbedKnoxClient.get.returns( + on: (key, callback) => + if key == 'response' + callback(@fakeResponse) + end: -> + ) it "should produce an error", (done) -> @S3PersistorManager.getFileStream @bucketName, @key, @opts, (err, stream)=> # empty callback expect(stream).to.equal null expect(err).to.not.equal null expect(err instanceof Error).to.equal true + @Errors.NotFoundError.called.should.equal false done() describe "sendFile", -> @@ -308,4 +316,4 @@ describe "S3PersistorManagerTests", -> @stubbedKnoxClient.list.callsArgWith(1, null, data) @S3PersistorManager.directorySize @bucketName, @key, (err, totalSize)=> totalSize.should.equal 3072 - done() + done() \ No newline at end of file From 5eec00db516492ceb2587595068406f3feacdffa Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Tue, 5 Feb 2019 14:06:51 +0000 Subject: [PATCH 294/555] don't run conversions test when disabled --- services/filestore/app/coffee/HealthCheckController.coffee | 2 ++ 1 file changed, 2 insertions(+) diff --git a/services/filestore/app/coffee/HealthCheckController.coffee b/services/filestore/app/coffee/HealthCheckController.coffee index db3f111c5e..643e15adb3 100644 --- a/services/filestore/app/coffee/HealthCheckController.coffee +++ b/services/filestore/app/coffee/HealthCheckController.coffee @@ -33,6 +33,8 @@ checkCanStoreFiles = (callback)-> callback(err) checkFileConvert = (callback)-> + if !settings.enableConversions + return callback() imgPath = path.join(settings.path.uploadFolder, "/tiny.pdf") async.waterfall [ (cb)-> From 0f48c77e7e608e8f3d81fd84504beb48886a8cae Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Tue, 5 Feb 2019 17:19:33 +0000 Subject: [PATCH 295/555] enable conversions for local docker compose --- services/filestore/docker-compose.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/services/filestore/docker-compose.yml b/services/filestore/docker-compose.yml index 8bb7857cb6..60d387bf51 100644 --- a/services/filestore/docker-compose.yml +++ b/services/filestore/docker-compose.yml @@ -27,6 +27,7 @@ services: MONGO_HOST: mongo POSTGRES_HOST: postgres MOCHA_GREP: ${MOCHA_GREP} + ENABLE_CONVERSIONS: "true" user: node depends_on: - mongo From 2adae20368d4be3b443a09e324fd458eac0436ad Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Wed, 6 Feb 2019 11:43:11 +0000 Subject: [PATCH 296/555] remove a console.log --- services/filestore/app/coffee/LocalFileWriter.coffee | 1 - 1 file changed, 1 deletion(-) diff --git a/services/filestore/app/coffee/LocalFileWriter.coffee b/services/filestore/app/coffee/LocalFileWriter.coffee index 72422b7696..7046da15e7 100644 --- a/services/filestore/app/coffee/LocalFileWriter.coffee +++ b/services/filestore/app/coffee/LocalFileWriter.coffee @@ -53,5 +53,4 @@ module.exports = if !key? key = uuid.v1() key = key.replace(/\//g,"-") - console.log Settings.path.uploadFolder, key path.join(Settings.path.uploadFolder, key) From 37b039abf00c19e56657b4f50c5a3fb7d911027d Mon Sep 17 00:00:00 2001 From: Henry Oswald Date: Tue, 5 Mar 2019 17:20:16 +0000 Subject: [PATCH 297/555] add sentry to settings.defaults.coffee --- services/filestore/config/settings.defaults.coffee | 3 +++ 1 file changed, 3 insertions(+) diff --git a/services/filestore/config/settings.defaults.coffee b/services/filestore/config/settings.defaults.coffee index 77bc1bfb34..550cfd2694 100644 --- a/services/filestore/config/settings.defaults.coffee +++ b/services/filestore/config/settings.defaults.coffee @@ -52,6 +52,9 @@ settings = enableConversions: if process.env['ENABLE_CONVERSIONS'] == 'true' then true else false + sentry: + dsn: process.env.SENTRY_DSN + # Filestore health check # ---------------------- # Project and file details to check in persistor when calling /health_check From b9d3b1a842a4c9864e620a52ef5c0565fee0db3a Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 15 Mar 2019 15:35:28 +0000 Subject: [PATCH 298/555] fix beginShutdown to use server.close() --- services/filestore/app.coffee | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index b4fff5b698..3e927a2b51 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -117,7 +117,7 @@ beginShutdown = () -> process.exit 1 , 120*1000 killTimer.unref?() # prevent timer from keeping process alive - app.close () -> + server.close () -> logger.log "closed all connections" Metrics.close() process.disconnect?() @@ -128,7 +128,7 @@ port = settings.internal.filestore.port or 3009 host = "0.0.0.0" if !module.parent # Called directly - app.listen port, host, (error) -> + server = app.listen port, host, (error) -> logger.info "Filestore starting up, listening on #{host}:#{port}" From c59a3db4e8fb21e13b53a885fc469187f3e5c819 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Thu, 2 May 2019 02:04:59 +0200 Subject: [PATCH 299/555] [FSPersistorManager] fix the stream opening for node10+ Attaching a `readable` listener causes the stream to hang otherwise. Signed-off-by: Jakob Ackermann --- .../app/coffee/FSPersistorManager.coffee | 22 ++++++------- .../coffee/FSPersistorManagerTests.coffee | 33 +++++++++---------- 2 files changed, 25 insertions(+), 30 deletions(-) diff --git a/services/filestore/app/coffee/FSPersistorManager.coffee b/services/filestore/app/coffee/FSPersistorManager.coffee index 353e2ef099..7e2875fc28 100644 --- a/services/filestore/app/coffee/FSPersistorManager.coffee +++ b/services/filestore/app/coffee/FSPersistorManager.coffee @@ -41,20 +41,18 @@ module.exports = callback(err) # opts may be {start: Number, end: Number} - getFileStream: (location, name, opts, _callback = (err, res)->) -> - callback = _.once _callback + getFileStream: (location, name, opts, callback = (err, res)->) -> filteredName = filterName name logger.log location:location, name:filteredName, "getting file" - sourceStream = fs.createReadStream "#{location}/#{filteredName}", opts - sourceStream.on 'error', (err) -> - logger.err err:err, location:location, name:name, "Error reading from file" - if err.code == 'ENOENT' - return callback new Errors.NotFoundError(err.message), null - else - return callback err, null - sourceStream.on 'readable', () -> - # This can be called multiple times, but the callback wrapper - # ensures the callback is only called once + fs.open "#{location}/#{filteredName}", 'r', (err, fd) -> + if err? + logger.err err:err, location:location, name:name, "Error reading from file" + if err.code == 'ENOENT' + return callback new Errors.NotFoundError(err.message), null + else + return callback err, null + opts.fd = fd + sourceStream = fs.createReadStream null, opts return callback null, sourceStream diff --git a/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee b/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee index 7df7adfd8b..4cfde16f70 100644 --- a/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee @@ -19,6 +19,7 @@ describe "FSPersistorManagerTests", -> rmdir:sinon.stub() exists:sinon.stub() readdir:sinon.stub() + open:sinon.stub() openSync:sinon.stub() fstatSync:sinon.stub() closeSync:sinon.stub() @@ -103,20 +104,21 @@ describe "FSPersistorManagerTests", -> @opts = {} it "should use correct file location", (done) -> - @Fs.createReadStream.returns({on: ->}) @FSPersistorManager.getFileStream @location, @name1, @opts, (err,res) => - @Fs.createReadStream.calledWith("#{@location}/#{@name1Filtered}").should.equal true + @Fs.open.calledWith("#{@location}/#{@name1Filtered}").should.equal true done() describe "with start and end options", -> beforeEach -> - @opts = {start: 0, end: 8} + @fd = 2019 + @opts_in = {start: 0, end: 8} + @opts = {start: 0, end: 8, fd: @fd} + @Fs.open.callsArgWith(2, null, @fd) it 'should pass the options to createReadStream', (done) -> - @Fs.createReadStream.returns({on: ->}) - @FSPersistorManager.getFileStream @location, @name1, @opts, (err,res)=> - @Fs.createReadStream.calledWith("#{@location}/#{@name1Filtered}", @opts).should.equal true + @FSPersistorManager.getFileStream @location, @name1, @opts_in, (err,res)=> + @Fs.createReadStream.calledWith(null, @opts).should.equal true done() describe "error conditions", -> @@ -125,12 +127,10 @@ describe "FSPersistorManagerTests", -> beforeEach -> @fakeCode = 'ENOENT' - @Fs.createReadStream.returns( - on: (key, callback) => - err = new Error() - err.code = @fakeCode - callback(err, null) - ) + err = new Error() + err.code = @fakeCode + @Fs.open.callsArgWith(2, err, null) + it "should give a NotFoundError", (done) -> @FSPersistorManager.getFileStream @location, @name1, @opts, (err,res)=> expect(res).to.equal null @@ -142,12 +142,9 @@ describe "FSPersistorManagerTests", -> beforeEach -> @fakeCode = 'SOMETHINGHORRIBLE' - @Fs.createReadStream.returns( - on: (key, callback) => - err = new Error() - err.code = @fakeCode - callback(err, null) - ) + err = new Error() + err.code = @fakeCode + @Fs.open.callsArgWith(2, err, null) it "should give an Error", (done) -> @FSPersistorManager.getFileStream @location, @name1, @opts, (err,res)=> From a79adcd325add5cf704b028780d49dbc0eb4164c Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Thu, 2 May 2019 01:26:35 +0200 Subject: [PATCH 300/555] [logging] do not overwrite the logger name Signed-off-by: Jakob Ackermann --- .../app/coffee/FSPersistorManager.coffee | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/services/filestore/app/coffee/FSPersistorManager.coffee b/services/filestore/app/coffee/FSPersistorManager.coffee index 353e2ef099..ef3effbaf2 100644 --- a/services/filestore/app/coffee/FSPersistorManager.coffee +++ b/services/filestore/app/coffee/FSPersistorManager.coffee @@ -44,10 +44,10 @@ module.exports = getFileStream: (location, name, opts, _callback = (err, res)->) -> callback = _.once _callback filteredName = filterName name - logger.log location:location, name:filteredName, "getting file" + logger.log location:location, filteredName:filteredName, "getting file" sourceStream = fs.createReadStream "#{location}/#{filteredName}", opts sourceStream.on 'error', (err) -> - logger.err err:err, location:location, name:name, "Error reading from file" + logger.err err:err, location:location, filteredName:name, "Error reading from file" if err.code == 'ENOENT' return callback new Errors.NotFoundError(err.message), null else @@ -76,10 +76,10 @@ module.exports = deleteFile: (location, name, callback)-> filteredName = filterName name - logger.log location:location, name:filteredName, "delete file" + logger.log location:location, filteredName:filteredName, "delete file" fs.unlink "#{location}/#{filteredName}", (err) -> if err? - logger.err err:err, location:location, name:filteredName, "Error on delete." + logger.err err:err, location:location, filteredName:filteredName, "Error on delete." callback err else callback() @@ -88,24 +88,24 @@ module.exports = filteredName = filterName name.replace(/\/$/,'') rimraf "#{location}/#{filteredName}", (err) -> if err? - logger.err err:err, location:location, name:filteredName, "Error on rimraf rmdir." + logger.err err:err, location:location, filteredName:filteredName, "Error on rimraf rmdir." callback err else callback() checkIfFileExists:(location, name, callback = (err,exists)->)-> filteredName = filterName name - logger.log location:location, name:filteredName, "checking if file exists" + logger.log location:location, filteredName:filteredName, "checking if file exists" fs.exists "#{location}/#{filteredName}", (exists) -> - logger.log location:location, name:filteredName, exists:exists, "checked if file exists" + logger.log location:location, filteredName:filteredName, exists:exists, "checked if file exists" callback null, exists directorySize:(location, name, callback)-> filteredName = filterName name.replace(/\/$/,'') - logger.log location:location, name:filteredName, "get project size in file system" + logger.log location:location, filteredName:filteredName, "get project size in file system" fs.readdir "#{location}/#{filteredName}", (err, files) -> if err? - logger.err err:err, location:location, name:filteredName, "something went wrong listing prefix in aws" + logger.err err:err, location:location, filteredName:filteredName, "something went wrong listing prefix in aws" return callback(err) totalSize = 0 _.each files, (entry)-> From 32a54a7e375917406ab0aebc26eb84b3ca69d059 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timoth=C3=A9e=20Alby?= Date: Tue, 7 May 2019 17:22:35 +0200 Subject: [PATCH 301/555] Update README.md --- services/filestore/README.md | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/services/filestore/README.md b/services/filestore/README.md index cd126ab053..2772b71494 100644 --- a/services/filestore/README.md +++ b/services/filestore/README.md @@ -1,11 +1,9 @@ -filestore-sharelatex +overleaf/filestore ==================== An API for CRUD operations on binary files stored in S3 -[![Build Status](https://travis-ci.org/sharelatex/filestore-sharelatex.png?branch=master)](https://travis-ci.org/sharelatex/filestore-sharelatex) - -filestore acts as a proxy between the CLSIs and (currently) Amazon S3 storage, presenting a RESTful HTTP interface to the CLSIs on port 3009 by default. Urls are mapped to node functions in https://github.com/sharelatex/filestore-sharelatex/blob/master/app.coffee . URLs are of the form: +filestore acts as a proxy between the CLSIs and (currently) Amazon S3 storage, presenting a RESTful HTTP interface to the CLSIs on port 3009 by default. Urls are mapped to node functions in https://github.com/overleaf/filestore/blob/master/app.coffee . URLs are of the form: * `/project/:project_id/file/:file_id` * `/template/:template_id/v/:version/:format` @@ -22,4 +20,4 @@ License The code in this repository is released under the GNU AFFERO GENERAL PUBLIC LICENSE, version 3. A copy can be found in the `LICENSE` file. -Copyright (c) ShareLaTeX, 2014. +Copyright (c) Overleaf, 2014-2019. From f081546ec0de3564719cc62fb37e29a038091c2c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timoth=C3=A9e=20Alby?= Date: Tue, 7 May 2019 17:37:07 +0100 Subject: [PATCH 302/555] update Git URL in Jenkinsfile --- services/filestore/Jenkinsfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/filestore/Jenkinsfile b/services/filestore/Jenkinsfile index 35bd318ab2..dd741ce239 100644 --- a/services/filestore/Jenkinsfile +++ b/services/filestore/Jenkinsfile @@ -4,10 +4,10 @@ pipeline { agent any environment { - GIT_PROJECT = "filestore-sharelatex" + GIT_PROJECT = "filestore" JENKINS_WORKFLOW = "filestore-sharelatex" TARGET_URL = "${env.JENKINS_URL}blue/organizations/jenkins/${JENKINS_WORKFLOW}/detail/$BRANCH_NAME/$BUILD_NUMBER/pipeline" - GIT_API_URL = "https://api.github.com/repos/sharelatex/${GIT_PROJECT}/statuses/$GIT_COMMIT" + GIT_API_URL = "https://api.github.com/repos/overleaf/${GIT_PROJECT}/statuses/$GIT_COMMIT" } triggers { From 3575c89d03216d537ba1e2fbdb0d05e3e7d944e0 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Thu, 13 Jun 2019 16:57:49 -0400 Subject: [PATCH 303/555] Return file size on HEAD request This will be used by the file preview feature when it gets partial content. --- services/filestore/app.coffee | 13 +-- .../app/coffee/FSPersistorManager.coffee | 15 +++- .../app/coffee/FileController.coffee | 24 ++++-- .../filestore/app/coffee/FileHandler.coffee | 3 + .../app/coffee/S3PersistorManager.coffee | 26 +++++- .../acceptance/coffee/SendingFileTest.coffee | 9 ++- .../coffee/FSPersistorManagerTests.coffee | 29 +++++++ .../unit/coffee/FileControllerTests.coffee | 37 ++++++++- .../test/unit/coffee/FileHandlerTests.coffee | 1 - .../coffee/S3PersistorManagerTests.coffee | 81 ++++++++++++------- 10 files changed, 191 insertions(+), 47 deletions(-) diff --git a/services/filestore/app.coffee b/services/filestore/app.coffee index 3e927a2b51..6c21686826 100644 --- a/services/filestore/app.coffee +++ b/services/filestore/app.coffee @@ -63,22 +63,23 @@ app.use (req, res, next) -> Metrics.injectMetricsRoute(app) +app.head "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.getFileHead app.get "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.getFile app.post "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.insertFile +app.put "/project/:project_id/file/:file_id", keyBuilder.userFileKey, bodyParser.json(), fileController.copyFile +app.del "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.deleteFile -app.put "/project/:project_id/file/:file_id", keyBuilder.userFileKey, bodyParser.json(), fileController.copyFile -app.del "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.deleteFile - +app.head "/template/:template_id/v/:version/:format", keyBuilder.templateFileKey, fileController.getFileHead app.get "/template/:template_id/v/:version/:format", keyBuilder.templateFileKey, fileController.getFile app.get "/template/:template_id/v/:version/:format/:sub_type", keyBuilder.templateFileKey, fileController.getFile app.post "/template/:template_id/v/:version/:format", keyBuilder.templateFileKey, fileController.insertFile +app.head "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.getFileHead app.get "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.getFile app.post "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.insertFile - -app.put "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, bodyParser.json(), fileController.copyFile -app.del "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.deleteFile +app.put "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, bodyParser.json(), fileController.copyFile +app.del "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.deleteFile app.get "/project/:project_id/size", keyBuilder.publicProjectKey, fileController.directorySize diff --git a/services/filestore/app/coffee/FSPersistorManager.coffee b/services/filestore/app/coffee/FSPersistorManager.coffee index 353e2ef099..b33d0d85bf 100644 --- a/services/filestore/app/coffee/FSPersistorManager.coffee +++ b/services/filestore/app/coffee/FSPersistorManager.coffee @@ -1,5 +1,6 @@ logger = require("logger-sharelatex") fs = require("fs") +path = require("path") LocalFileWriter = require("./LocalFileWriter") Errors = require('./Errors') rimraf = require("rimraf") @@ -35,7 +36,7 @@ module.exports = if err? logger.err location:location, target:target, fsPath:fsPath, err:err, "something went wrong writing stream to disk" return callback err - @sendFile location, target, fsPath, (err) -> + @sendFile location, target, fsPath, (err) -> # delete the temporary file created above and return the original error LocalFileWriter.deleteFile fsPath, () -> callback(err) @@ -57,6 +58,18 @@ module.exports = # ensures the callback is only called once return callback null, sourceStream + getFileSize: (location, filename, callback) -> + fullPath = path.join(location, filterName(filename)) + fs.stat fullPath, (err, stats) -> + if err? + if err.code == 'ENOENT' + logger.log({location:location, filename:filename}, "file not found") + callback(new Errors.NotFoundError(err.message)) + else + logger.err({err:err, location:location, filename:filename}, "failed to stat file") + callback(err) + return + callback(null, stats.size) copyFile: (location, fromName, toName, callback = (err)->)-> filteredFromName=filterName fromName diff --git a/services/filestore/app/coffee/FileController.coffee b/services/filestore/app/coffee/FileController.coffee index 60dcc207b7..f98dbd1e49 100644 --- a/services/filestore/app/coffee/FileController.coffee +++ b/services/filestore/app/coffee/FileController.coffee @@ -21,7 +21,7 @@ module.exports = FileController = style: style, } metrics.inc "getFile" - logger.log key:key, bucket:bucket, format:format, style: style, "reciving request to get file" + logger.log key:key, bucket:bucket, format:format, style: style, "receiving request to get file" if req.headers.range? range = FileController._get_range(req.headers.range) options.start = range.start @@ -41,10 +41,24 @@ module.exports = FileController = logger.log key:key, bucket:bucket, format:format, style:style, "sending file to response" fileStream.pipe res + getFileHead: (req, res) -> + {key, bucket} = req + metrics.inc("getFileSize") + logger.log({ key: key, bucket: bucket }, "receiving request to get file metadata") + FileHandler.getFileSize bucket, key, (err, fileSize) -> + if err? + if err instanceof Errors.NotFoundError + res.status(404).end() + else + res.status(500).end() + return + res.set("Content-Length", fileSize) + res.status(200).end() + insertFile: (req, res)-> metrics.inc "insertFile" {key, bucket} = req - logger.log key:key, bucket:bucket, "reciving request to insert file" + logger.log key:key, bucket:bucket, "receiving request to insert file" FileHandler.insertFile bucket, key, req, (err)-> if err? logger.log err: err, key: key, bucket: bucket, "error inserting file" @@ -57,7 +71,7 @@ module.exports = FileController = {key, bucket} = req oldProject_id = req.body.source.project_id oldFile_id = req.body.source.file_id - logger.log key:key, bucket:bucket, oldProject_id:oldProject_id, oldFile_id:oldFile_id, "reciving request to copy file" + logger.log key:key, bucket:bucket, oldProject_id:oldProject_id, oldFile_id:oldFile_id, "receiving request to copy file" PersistorManager.copyFile bucket, "#{oldProject_id}/#{oldFile_id}", key, (err)-> if err? if err instanceof Errors.NotFoundError @@ -71,7 +85,7 @@ module.exports = FileController = deleteFile: (req, res)-> metrics.inc "deleteFile" {key, bucket} = req - logger.log key:key, bucket:bucket, "reciving request to delete file" + logger.log key:key, bucket:bucket, "receiving request to delete file" FileHandler.deleteFile bucket, key, (err)-> if err? logger.log err:err, key:key, bucket:bucket, "something went wrong deleting file" @@ -90,7 +104,7 @@ module.exports = FileController = directorySize: (req, res)-> metrics.inc "projectSize" {project_id, bucket} = req - logger.log project_id:project_id, bucket:bucket, "reciving request to project size" + logger.log project_id:project_id, bucket:bucket, "receiving request to project size" FileHandler.getDirectorySize bucket, project_id, (err, size)-> if err? logger.log err: err, project_id: project_id, bucket: bucket, "error inserting file" diff --git a/services/filestore/app/coffee/FileHandler.coffee b/services/filestore/app/coffee/FileHandler.coffee index 6189c8a906..cb8d78a0fe 100644 --- a/services/filestore/app/coffee/FileHandler.coffee +++ b/services/filestore/app/coffee/FileHandler.coffee @@ -31,6 +31,9 @@ module.exports = FileHandler = else @_getConvertedFile bucket, key, opts, callback + getFileSize: (bucket, key, callback) -> + PersistorManager.getFileSize(bucket, key, callback) + _getStandardFile: (bucket, key, opts, callback)-> PersistorManager.getFileStream bucket, key, opts, (err, fileStream)-> if err? and !(err instanceof Errors.NotFoundError) diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index 2b183730d6..50486a0ce3 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -91,12 +91,36 @@ module.exports = else if res.statusCode not in [200, 206] logger.log bucketName:bucketName, key:key, "error getting file from s3: #{res.statusCode}" return callback new Error("Got non-200 response from S3: #{res.statusCode}"), null - else + else return callback null, res s3Stream.on 'error', (err) -> logger.err err:err, bucketName:bucketName, key:key, "error getting file stream from s3" callback err + getFileSize: (bucketName, key, callback) -> + logger.log({ bucketName: bucketName, key: key }, "getting file size from S3") + s3.headObject { Bucket: bucketName, Key: key }, (err, data) -> + if err? + if err.statusCode in [403, 404] + # S3 returns a 403 instead of a 404 when the user doesn't have + # permission to list the bucket contents. + logger.log({ + bucketName: bucketName, + key: key + }, "file not found in s3") + callback( + new Errors.NotFoundError("File not found in S3: #{bucketName}:#{key}") + ) + else + logger.err({ + bucketName: bucketName, + key: key, + err: err + }, "error performing S3 HeadObject") + callback(err) + return + callback(null, data.ContentLength) + copyFile: (bucketName, sourceKey, destKey, callback)-> logger.log bucketName:bucketName, sourceKey:sourceKey, destKey: destKey, "copying file in s3" source = bucketName + '/' + sourceKey diff --git a/services/filestore/test/acceptance/coffee/SendingFileTest.coffee b/services/filestore/test/acceptance/coffee/SendingFileTest.coffee index cee06e82a3..b77afb866b 100644 --- a/services/filestore/test/acceptance/coffee/SendingFileTest.coffee +++ b/services/filestore/test/acceptance/coffee/SendingFileTest.coffee @@ -32,7 +32,7 @@ describe "Filestore", -> - it "should send a 200 for status endpoing", (done)-> + it "should send a 200 for status endpoint", (done)-> request "#{@filestoreUrl}/status", (err, response, body)-> response.statusCode.should.equal 200 body.indexOf("filestore").should.not.equal -1 @@ -59,6 +59,13 @@ describe "Filestore", -> response.statusCode.should.equal 404 done() + it "should return the file size on a HEAD request", (done) -> + expectedLength = Buffer.byteLength(@constantFileContent) + request.head @fileUrl, (err, res) => + expect(res.statusCode).to.equal(200) + expect(res.headers['content-length']).to.equal(expectedLength.toString()) + done() + it "should be able get the file back", (done)-> @timeout(1000 * 10) request.get @fileUrl, (err, response, body)=> diff --git a/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee b/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee index 7df7adfd8b..a08fc86682 100644 --- a/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee @@ -22,6 +22,7 @@ describe "FSPersistorManagerTests", -> openSync:sinon.stub() fstatSync:sinon.stub() closeSync:sinon.stub() + stat:sinon.stub() @Rimraf = sinon.stub() @LocalFileWriter = writeStream: sinon.stub() @@ -156,7 +157,35 @@ describe "FSPersistorManagerTests", -> expect(err instanceof Error).to.equal true done() + describe "getFileSize", -> + it "should return the file size", (done) -> + expectedFileSize = 75382 + @Fs.stat.yields(new Error("fs.stat got unexpected arguments")) + @Fs.stat.withArgs("#{@location}/#{@name1Filtered}") + .yields(null, { size: expectedFileSize }) + @FSPersistorManager.getFileSize @location, @name1, (err, fileSize) => + if err? + return done(err) + expect(fileSize).to.equal(expectedFileSize) + done() + + it "should throw a NotFoundError if the file does not exist", (done) -> + error = new Error() + error.code = "ENOENT" + @Fs.stat.yields(error) + + @FSPersistorManager.getFileSize @location, @name1, (err, fileSize) => + expect(err).to.be.instanceof(@Errors.NotFoundError) + done() + + it "should rethrow any other error", (done) -> + error = new Error() + @Fs.stat.yields(error) + + @FSPersistorManager.getFileSize @location, @name1, (err, fileSize) => + expect(err).to.equal(error) + done() describe "copyFile", -> beforeEach -> diff --git a/services/filestore/test/unit/coffee/FileControllerTests.coffee b/services/filestore/test/unit/coffee/FileControllerTests.coffee index 09398b8c01..821aadb68d 100644 --- a/services/filestore/test/unit/coffee/FileControllerTests.coffee +++ b/services/filestore/test/unit/coffee/FileControllerTests.coffee @@ -20,6 +20,7 @@ describe "FileController", -> user_files:"user_files" @FileHandler = getFile: sinon.stub() + getFileSize: sinon.stub() deleteFile: sinon.stub() insertFile: sinon.stub() getDirectorySize: sinon.stub() @@ -49,7 +50,8 @@ describe "FileController", -> file_id:@file_id headers: {} @res = - setHeader: -> + set: sinon.stub().returnsThis() + status: sinon.stub().returnsThis() @fileStream = {} describe "getFile", -> @@ -89,6 +91,39 @@ describe "FileController", -> done() @controller.getFile @req, @res + describe "getFileHead", -> + it "should return the file size in a Content-Length header", (done) -> + expectedFileSize = 84921 + @FileHandler.getFileSize.yields( + new Error("FileHandler.getFileSize: unexpected arguments") + ) + @FileHandler.getFileSize.withArgs(@bucket, @key).yields(null, expectedFileSize) + + @res.end = () => + expect(@res.status.lastCall.args[0]).to.equal(200) + expect(@res.set.calledWith("Content-Length", expectedFileSize)).to.equal(true) + done() + + @controller.getFileHead(@req, @res) + + it "should return a 404 is the file is not found", (done) -> + @FileHandler.getFileSize.yields(new @Errors.NotFoundError()) + + @res.end = () => + expect(@res.status.lastCall.args[0]).to.equal(404) + done() + + @controller.getFileHead(@req, @res) + + it "should return a 500 on internal errors", (done) -> + @FileHandler.getFileSize.yields(new Error()) + + @res.end = () => + expect(@res.status.lastCall.args[0]).to.equal(500) + done() + + @controller.getFileHead(@req, @res) + describe "insertFile", -> it "should send bucket name key and res to PersistorManager", (done)-> diff --git a/services/filestore/test/unit/coffee/FileHandlerTests.coffee b/services/filestore/test/unit/coffee/FileHandlerTests.coffee index 50b8a17524..754366195e 100644 --- a/services/filestore/test/unit/coffee/FileHandlerTests.coffee +++ b/services/filestore/test/unit/coffee/FileHandlerTests.coffee @@ -108,7 +108,6 @@ describe "FileHandler", -> @handler._getConvertedFile.called.should.equal true done() - describe "_getStandardFile", -> beforeEach -> diff --git a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee index 1eee09dc29..2554461fd7 100644 --- a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee @@ -26,11 +26,13 @@ describe "S3PersistorManagerTests", -> @knox = createClient: sinon.stub().returns(@stubbedKnoxClient) @stubbedS3Client = - copyObject:sinon.stub() + copyObject: sinon.stub() + headObject: sinon.stub() @awsS3 = sinon.stub().returns @stubbedS3Client @LocalFileWriter = writeStream: sinon.stub() deleteFile: sinon.stub() + @request = sinon.stub() @requires = "knox": @knox "aws-sdk/clients/s3": @awsS3 @@ -39,15 +41,16 @@ describe "S3PersistorManagerTests", -> "logger-sharelatex": log:-> err:-> + "request": @request "./Errors": @Errors = NotFoundError: sinon.stub() @key = "my/key" @bucketName = "my-bucket" @error = "my errror" + @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires describe "getFileStream", -> beforeEach -> - @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires @opts = {} it "should use correct key", (done)-> @@ -74,7 +77,6 @@ describe "S3PersistorManagerTests", -> describe "with supplied auth", -> beforeEach -> - @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires @credentials = auth_key: "that_key" auth_secret: "that_secret" @@ -156,10 +158,45 @@ describe "S3PersistorManagerTests", -> @Errors.NotFoundError.called.should.equal false done() + describe "getFileSize", -> + it "should obtain the file size from S3", (done) -> + expectedFileSize = 123 + @stubbedS3Client.headObject.yields(new Error( + "s3Client.headObject got unexpected arguments" + )) + @stubbedS3Client.headObject.withArgs({ + Bucket: @bucketName + Key: @key + }).yields(null, { ContentLength: expectedFileSize }) + + @S3PersistorManager.getFileSize @bucketName, @key, (err, fileSize) => + if err? + return done(err) + expect(fileSize).to.equal(expectedFileSize) + done() + + [403, 404].forEach (statusCode) -> + it "should throw NotFoundError when S3 responds with #{statusCode}", (done) -> + error = new Error() + error.statusCode = statusCode + @stubbedS3Client.headObject.yields(error) + + @S3PersistorManager.getFileSize @bucketName, @key, (err, fileSize) => + expect(err).to.be.an.instanceof(@Errors.NotFoundError) + done() + + it "should rethrow any other error", (done) -> + error = new Error() + @stubbedS3Client.headObject.yields(error) + @stubbedS3Client.headObject.yields(error) + + @S3PersistorManager.getFileSize @bucketName, @key, (err, fileSize) => + expect(err).to.equal(error) + done() + describe "sendFile", -> beforeEach -> - @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires @stubbedKnoxClient.putFile.returns on:-> it "should put file with knox", (done)-> @@ -183,7 +220,6 @@ describe "S3PersistorManagerTests", -> @fsPath = "to/some/where" @origin = on:-> - @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires @S3PersistorManager.sendFile = sinon.stub().callsArgWith(3) it "should send stream to LocalFileWriter", (done)-> @@ -211,7 +247,6 @@ describe "S3PersistorManagerTests", -> beforeEach -> @sourceKey = "my/key" @destKey = "my/dest/key" - @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires it "should use AWS SDK to copy file", (done)-> @stubbedS3Client.copyObject.callsArgWith(1, @error) @@ -229,9 +264,6 @@ describe "S3PersistorManagerTests", -> describe "deleteDirectory", -> - beforeEach -> - @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires - it "should list the contents passing them onto multi delete", (done)-> data = Contents: [{Key:"1234"}, {Key: "456"}] @@ -244,9 +276,7 @@ describe "S3PersistorManagerTests", -> describe "deleteFile", -> it "should use correct options", (done)-> - @request = sinon.stub().callsArgWith(1) - @requires["request"] = @request - @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires + @request.callsArgWith(1) @S3PersistorManager.deleteFile @bucketName, @key, (err)=> opts = @request.args[0][0] @@ -257,9 +287,7 @@ describe "S3PersistorManagerTests", -> done() it "should return the error", (done)-> - @request = sinon.stub().callsArgWith(1, @error) - @requires["request"] = @request - @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires + @request.callsArgWith(1, @error) @S3PersistorManager.deleteFile @bucketName, @key, (err)=> err.should.equal @error @@ -268,9 +296,7 @@ describe "S3PersistorManagerTests", -> describe "checkIfFileExists", -> it "should use correct options", (done)-> - @request = sinon.stub().callsArgWith(1, null, statusCode:200) - @requires["request"] = @request - @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires + @request.callsArgWith(1, null, statusCode:200) @S3PersistorManager.checkIfFileExists @bucketName, @key, (err)=> opts = @request.args[0][0] @@ -281,25 +307,21 @@ describe "S3PersistorManagerTests", -> done() it "should return true for a 200", (done)-> - @request = sinon.stub().callsArgWith(1, null, statusCode:200) - @requires["request"] = @request - @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires + @request.callsArgWith(1, null, statusCode:200) + @S3PersistorManager.checkIfFileExists @bucketName, @key, (err, exists)=> exists.should.equal true done() it "should return false for a non 200", (done)-> - @request = sinon.stub().callsArgWith(1, null, statusCode:404) - @requires["request"] = @request - @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires + @request.callsArgWith(1, null, statusCode:404) + @S3PersistorManager.checkIfFileExists @bucketName, @key, (err, exists)=> exists.should.equal false done() it "should return the error", (done)-> - @request = sinon.stub().callsArgWith(1, @error, {}) - @requires["request"] = @request - @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires + @request.callsArgWith(1, @error, {}) @S3PersistorManager.checkIfFileExists @bucketName, @key, (err)=> err.should.equal @error @@ -307,13 +329,10 @@ describe "S3PersistorManagerTests", -> describe "directorySize", -> - beforeEach -> - @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires - it "should sum directory files size", (done) -> data = Contents: [ {Size: 1024}, {Size: 2048} ] @stubbedKnoxClient.list.callsArgWith(1, null, data) @S3PersistorManager.directorySize @bucketName, @key, (err, totalSize)=> totalSize.should.equal 3072 - done() \ No newline at end of file + done() From f2521a29b9d8035782647cfbf6f84c6794837ae3 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Tue, 18 Jun 2019 08:25:14 -0400 Subject: [PATCH 304/555] Use AWS SDK for getFileStream() The AWS SDK has a retry strategy to deal with rate limiting or transient unavailability of S3. We hope it will reduce our error rates. --- .../app/coffee/S3PersistorManager.coffee | 62 +++-- .../coffee/S3PersistorManagerTests.coffee | 228 +++++++++--------- 2 files changed, 155 insertions(+), 135 deletions(-) diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index 50486a0ce3..940ba90a95 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -26,12 +26,22 @@ buildDefaultOptions = (bucketName, method, key)-> uri:"https://#{bucketName}.s3.amazonaws.com/#{key}" } -s3 = new awsS3({ +defaultS3Client = new awsS3({ credentials: accessKeyId: settings.filestore.s3.key, secretAccessKey: settings.filestore.s3.secret }) +getS3Client = (credentials) -> + if credentials? + return new awsS3({ + credentials: + accessKeyId: credentials.auth_key + secretAccessKey: credentials.auth_secret + }) + else + return defaultS3Client + module.exports = sendFile: (bucketName, key, fsPath, callback)-> @@ -71,34 +81,39 @@ module.exports = # opts may be {start: Number, end: Number} getFileStream: (bucketName, key, opts, callback = (err, res)->)-> opts = opts || {} - headers = {} - if opts.start? and opts.end? - headers['Range'] = "bytes=#{opts.start}-#{opts.end}" - callback = _.once callback + callback = _.once(callback) logger.log bucketName:bucketName, key:key, "getting file from s3" - s3Client = knox.createClient - key: opts.credentials?.auth_key || settings.filestore.s3.key - secret: opts.credentials?.auth_secret || settings.filestore.s3.secret - bucket: bucketName - s3Stream = s3Client.get(key, headers) - s3Stream.end() - s3Stream.on 'response', (res) -> - if res.statusCode in [403, 404] + + s3 = getS3Client(opts.credentials) + s3Params = { + Bucket: bucketName + Key: key + } + if opts.start? and opts.end? + s3Params['Range'] = "bytes=#{opts.start}-#{opts.end}" + request = s3.getObject(s3Params) + + request.on 'httpHeaders', (statusCode, headers, response, statusMessage) => + if statusCode in [403, 404] # S3 returns a 403 instead of a 404 when the user doesn't have # permission to list the bucket contents. - logger.log bucketName:bucketName, key:key, "file not found in s3" - return callback new Errors.NotFoundError("File not found in S3: #{bucketName}:#{key}"), null - else if res.statusCode not in [200, 206] - logger.log bucketName:bucketName, key:key, "error getting file from s3: #{res.statusCode}" - return callback new Error("Got non-200 response from S3: #{res.statusCode}"), null - else - return callback null, res - s3Stream.on 'error', (err) -> - logger.err err:err, bucketName:bucketName, key:key, "error getting file stream from s3" - callback err + logger.log({ bucketName: bucketName, key: key }, "file not found in s3") + return callback(new Errors.NotFoundError("File not found in S3: #{bucketName}:#{key}"), null) + if statusCode not in [200, 206] + logger.log({bucketName: bucketName, key: key }, "error getting file from s3: #{statusCode}") + return callback(new Error("Got non-200 response from S3: #{statusCode} #{statusMessage}"), null) + stream = response.httpResponse.getUnbufferedStream() + callback(null, stream) + + request.on 'error', (err) => + logger.err({ err: err, bucketName: bucketName, key: key }, "error getting file stream from s3") + callback(err) + + request.send() getFileSize: (bucketName, key, callback) -> logger.log({ bucketName: bucketName, key: key }, "getting file size from S3") + s3 = getS3Client() s3.headObject { Bucket: bucketName, Key: key }, (err, data) -> if err? if err.statusCode in [403, 404] @@ -125,6 +140,7 @@ module.exports = logger.log bucketName:bucketName, sourceKey:sourceKey, destKey: destKey, "copying file in s3" source = bucketName + '/' + sourceKey # use the AWS SDK instead of knox due to problems with error handling (https://github.com/Automattic/knox/issues/114) + s3 = getS3Client() s3.copyObject {Bucket: bucketName, Key: destKey, CopySource: source}, (err) -> if err? if err.code is 'NoSuchKey' diff --git a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee index 2554461fd7..5244fcb8f2 100644 --- a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee @@ -17,18 +17,27 @@ describe "S3PersistorManagerTests", -> key: "this_key" stores: user_files:"sl_user_files" - @stubbedKnoxClient = + @knoxClient = putFile:sinon.stub() copyFile:sinon.stub() list: sinon.stub() deleteMultiple: sinon.stub() get: sinon.stub() @knox = - createClient: sinon.stub().returns(@stubbedKnoxClient) - @stubbedS3Client = + createClient: sinon.stub().returns(@knoxClient) + @s3EventHandlers = {} + @s3Request = + on: sinon.stub().callsFake (event, callback) => + @s3EventHandlers[event] = callback + send: sinon.stub() + @s3Response = + httpResponse: + getUnbufferedStream: sinon.stub() + @s3Client = copyObject: sinon.stub() headObject: sinon.stub() - @awsS3 = sinon.stub().returns @stubbedS3Client + getObject: sinon.stub().returns(@s3Request) + @awsS3 = sinon.stub().returns(@s3Client) @LocalFileWriter = writeStream: sinon.stub() deleteFile: sinon.stub() @@ -50,121 +59,116 @@ describe "S3PersistorManagerTests", -> @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires describe "getFileStream", -> - beforeEach -> - @opts = {} + describe "success", -> + beforeEach () -> + @expectedStream = { expectedStream: true } + @s3Request.send.callsFake () => + @s3EventHandlers.httpHeaders(200, {}, @s3Response, "OK") + @s3Response.httpResponse.getUnbufferedStream.returns(@expectedStream) - it "should use correct key", (done)-> - @stubbedKnoxClient.get.returns( - on:-> - end:-> - ) - @S3PersistorManager.getFileStream @bucketName, @key, @opts, (err)=> # empty callback - @stubbedKnoxClient.get.calledWith(@key).should.equal true - done() + it "returns a stream", (done) -> + @S3PersistorManager.getFileStream @bucketName, @key, {}, (err, stream) => + if err? + return done(err) + expect(stream).to.equal(@expectedStream) + done() - it "should use default auth", (done)-> - @stubbedKnoxClient.get.returns( - on:-> - end:-> - ) - @S3PersistorManager.getFileStream @bucketName, @key, @opts, (err)=> # empty callback - clientParams = - key: @settings.filestore.s3.key - secret: @settings.filestore.s3.secret - bucket: @bucketName - @knox.createClient.calledWith(clientParams).should.equal true - done() + it "sets the AWS client up with credentials from settings", (done) -> + @S3PersistorManager.getFileStream @bucketName, @key, {}, (err, stream) => + if err? + return done(err) + expect(@awsS3.lastCall.args).to.deep.equal([{ + credentials: + accessKeyId: @settings.filestore.s3.key + secretAccessKey: @settings.filestore.s3.secret + }]) + done() - describe "with supplied auth", -> - beforeEach -> - @credentials = - auth_key: "that_key" - auth_secret: "that_secret" - @opts = - credentials: @credentials + it "fetches the right key from the right bucket", (done) -> + @S3PersistorManager.getFileStream @bucketName, @key, {}, (err, stream) => + if err? + return done(err) + expect(@s3Client.getObject.lastCall.args).to.deep.equal([{ + Bucket: @bucketName, + Key: @key + }]) + done() - it "should use supplied auth", (done)-> - @stubbedKnoxClient.get.returns( - on:-> - end:-> - ) - @S3PersistorManager.getFileStream @bucketName, @key, @opts, (err)=> # empty callback - clientParams = - key: @credentials.auth_key - secret: @credentials.auth_secret - bucket: @bucketName - @knox.createClient.calledWith(clientParams).should.equal true - done() + it "accepts alternative credentials", (done) -> + accessKeyId = "that_key" + secret = "that_secret" + opts = { + credentials: + auth_key: accessKeyId + auth_secret: secret + } + @S3PersistorManager.getFileStream @bucketName, @key, opts, (err, stream) => + if err? + return done(err) + expect(@awsS3.lastCall.args).to.deep.equal([{ + credentials: + accessKeyId: accessKeyId + secretAccessKey: secret + }]) + expect(stream).to.equal(@expectedStream) + done() - describe "with start and end options", -> - beforeEach -> - @opts = - start: 0 - end: 8 - it "should pass headers to the knox.Client.get()", (done) -> - @stubbedKnoxClient.get.returns( - on:-> - end:-> - ) - @S3PersistorManager.getFileStream @bucketName, @key, @opts, (err)=> # empty callback - @stubbedKnoxClient.get.calledWith(@key, {'Range': 'bytes=0-8'}).should.equal true - done() - - describe "error conditions", -> + it "accepts byte range", (done) -> + start = 0 + end = 8 + opts = { start: start, end: end } + @S3PersistorManager.getFileStream @bucketName, @key, opts, (err, stream) => + if err? + return done(err) + expect(@s3Client.getObject.lastCall.args).to.deep.equal([{ + Bucket: @bucketName + Key: @key + Range: "bytes=#{start}-#{end}" + }]) + expect(stream).to.equal(@expectedStream) + done() + describe "errors", -> describe "when the file doesn't exist", -> - beforeEach -> - @fakeResponse = - statusCode: 404 - @stubbedKnoxClient.get.returns( - on: (key, callback) => - if key == 'response' - callback(@fakeResponse) - end: -> - ) + @s3Request.send.callsFake () => + @s3EventHandlers.httpHeaders(404, {}, @s3Response, "Not found") - it "should produce a NotFoundError", (done) -> - @S3PersistorManager.getFileStream @bucketName, @key, @opts, (err, stream)=> # empty callback - expect(stream).to.equal null - expect(err).to.not.equal null - expect(err instanceof @Errors.NotFoundError).to.equal true + it "returns a NotFoundError that indicates the bucket and key", (done) -> + @S3PersistorManager.getFileStream @bucketName, @key, {}, (err, stream) => + expect(err).to.be.instanceof(@Errors.NotFoundError) + errMsg = @Errors.NotFoundError.lastCall.args[0] + expect(errMsg).to.match(new RegExp(".*#{@bucketName}.*")) + expect(errMsg).to.match(new RegExp(".*#{@key}.*")) done() - it "should have bucket and key in the Error message", (done) -> - @S3PersistorManager.getFileStream @bucketName, @key, @opts, (err, stream)=> # empty callback - error_message = @Errors.NotFoundError.lastCall.args[0] - expect(error_message).to.not.equal null - error_message.should.match(new RegExp(".*#{@bucketName}.*")) - error_message.should.match(new RegExp(".*#{@key}.*")) + describe "when S3 encounters an unkown error", -> + beforeEach -> + @s3Request.send.callsFake () => + @s3EventHandlers.httpHeaders(500, {}, @s3Response, "Internal server error") + + it "returns an error", (done) -> + @S3PersistorManager.getFileStream @bucketName, @key, {}, (err, stream) => + expect(err).to.be.instanceof(Error) done() - describe "when the S3 service produces an error", -> + describe "when the S3 request errors out before receiving HTTP headers", -> beforeEach -> - @fakeResponse = - statusCode: 500 - @stubbedKnoxClient.get.returns( - on: (key, callback) => - if key == 'response' - callback(@fakeResponse) - end: -> - ) + @s3Request.send.callsFake () => + @s3EventHandlers.error(new Error("connection failed")) - it "should produce an error", (done) -> - @S3PersistorManager.getFileStream @bucketName, @key, @opts, (err, stream)=> # empty callback - expect(stream).to.equal null - expect(err).to.not.equal null - expect(err instanceof Error).to.equal true - @Errors.NotFoundError.called.should.equal false + it "returns an error", (done) -> + @S3PersistorManager.getFileStream @bucketName, @key, {}, (err, stream) => + expect(err).to.be.instanceof(Error) done() describe "getFileSize", -> it "should obtain the file size from S3", (done) -> expectedFileSize = 123 - @stubbedS3Client.headObject.yields(new Error( + @s3Client.headObject.yields(new Error( "s3Client.headObject got unexpected arguments" )) - @stubbedS3Client.headObject.withArgs({ + @s3Client.headObject.withArgs({ Bucket: @bucketName Key: @key }).yields(null, { ContentLength: expectedFileSize }) @@ -179,7 +183,7 @@ describe "S3PersistorManagerTests", -> it "should throw NotFoundError when S3 responds with #{statusCode}", (done) -> error = new Error() error.statusCode = statusCode - @stubbedS3Client.headObject.yields(error) + @s3Client.headObject.yields(error) @S3PersistorManager.getFileSize @bucketName, @key, (err, fileSize) => expect(err).to.be.an.instanceof(@Errors.NotFoundError) @@ -187,8 +191,8 @@ describe "S3PersistorManagerTests", -> it "should rethrow any other error", (done) -> error = new Error() - @stubbedS3Client.headObject.yields(error) - @stubbedS3Client.headObject.yields(error) + @s3Client.headObject.yields(error) + @s3Client.headObject.yields(error) @S3PersistorManager.getFileSize @bucketName, @key, (err, fileSize) => expect(err).to.equal(error) @@ -197,21 +201,21 @@ describe "S3PersistorManagerTests", -> describe "sendFile", -> beforeEach -> - @stubbedKnoxClient.putFile.returns on:-> + @knoxClient.putFile.returns on:-> it "should put file with knox", (done)-> @LocalFileWriter.deleteFile.callsArgWith(1) - @stubbedKnoxClient.putFile.callsArgWith(2, @error) + @knoxClient.putFile.callsArgWith(2, @error) @S3PersistorManager.sendFile @bucketName, @key, @fsPath, (err)=> - @stubbedKnoxClient.putFile.calledWith(@fsPath, @key).should.equal true + @knoxClient.putFile.calledWith(@fsPath, @key).should.equal true err.should.equal @error done() it "should delete the file and pass the error with it", (done)-> @LocalFileWriter.deleteFile.callsArgWith(1) - @stubbedKnoxClient.putFile.callsArgWith(2, @error) + @knoxClient.putFile.callsArgWith(2, @error) @S3PersistorManager.sendFile @bucketName, @key, @fsPath, (err)=> - @stubbedKnoxClient.putFile.calledWith(@fsPath, @key).should.equal true + @knoxClient.putFile.calledWith(@fsPath, @key).should.equal true err.should.equal @error done() @@ -249,15 +253,15 @@ describe "S3PersistorManagerTests", -> @destKey = "my/dest/key" it "should use AWS SDK to copy file", (done)-> - @stubbedS3Client.copyObject.callsArgWith(1, @error) + @s3Client.copyObject.callsArgWith(1, @error) @S3PersistorManager.copyFile @bucketName, @sourceKey, @destKey, (err)=> err.should.equal @error - @stubbedS3Client.copyObject.calledWith({Bucket: @bucketName, Key: @destKey, CopySource: @bucketName + '/' + @key}).should.equal true + @s3Client.copyObject.calledWith({Bucket: @bucketName, Key: @destKey, CopySource: @bucketName + '/' + @key}).should.equal true done() it "should return a NotFoundError object if the original file does not exist", (done)-> NoSuchKeyError = {code: "NoSuchKey"} - @stubbedS3Client.copyObject.callsArgWith(1, NoSuchKeyError) + @s3Client.copyObject.callsArgWith(1, NoSuchKeyError) @S3PersistorManager.copyFile @bucketName, @sourceKey, @destKey, (err)=> expect(err instanceof @Errors.NotFoundError).to.equal true done() @@ -267,10 +271,10 @@ describe "S3PersistorManagerTests", -> it "should list the contents passing them onto multi delete", (done)-> data = Contents: [{Key:"1234"}, {Key: "456"}] - @stubbedKnoxClient.list.callsArgWith(1, null, data) - @stubbedKnoxClient.deleteMultiple.callsArgWith(1) + @knoxClient.list.callsArgWith(1, null, data) + @knoxClient.deleteMultiple.callsArgWith(1) @S3PersistorManager.deleteDirectory @bucketName, @key, (err)=> - @stubbedKnoxClient.deleteMultiple.calledWith(["1234","456"]).should.equal true + @knoxClient.deleteMultiple.calledWith(["1234","456"]).should.equal true done() describe "deleteFile", -> @@ -332,7 +336,7 @@ describe "S3PersistorManagerTests", -> it "should sum directory files size", (done) -> data = Contents: [ {Size: 1024}, {Size: 2048} ] - @stubbedKnoxClient.list.callsArgWith(1, null, data) + @knoxClient.list.callsArgWith(1, null, data) @S3PersistorManager.directorySize @bucketName, @key, (err, totalSize)=> totalSize.should.equal 3072 done() From f865762c293b3746a36ebd6df7284c327b271954 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Wed, 19 Jun 2019 13:25:18 +0100 Subject: [PATCH 305/555] update logger and metrics --- services/filestore/Makefile | 6 +- services/filestore/buildscript.txt | 2 +- services/filestore/docker-compose.ci.yml | 6 +- services/filestore/docker-compose.yml | 7 +- services/filestore/npm-shrinkwrap.json | 515 ++++++++++++++++------- services/filestore/package.json | 4 +- 6 files changed, 381 insertions(+), 159 deletions(-) diff --git a/services/filestore/Makefile b/services/filestore/Makefile index db33518816..75286c139a 100644 --- a/services/filestore/Makefile +++ b/services/filestore/Makefile @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.12 +# Version: 1.1.21 BUILD_NUMBER ?= local BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) @@ -26,7 +26,9 @@ test: test_unit test_acceptance test_unit: @[ ! -d test/unit ] && echo "filestore has no unit tests" || $(DOCKER_COMPOSE) run --rm test_unit -test_acceptance: test_clean test_acceptance_pre_run # clear the database before each acceptance test run +test_acceptance: test_clean test_acceptance_pre_run test_acceptance_run + +test_acceptance_run: @[ ! -d test/acceptance ] && echo "filestore has no acceptance tests" || $(DOCKER_COMPOSE) run --rm test_acceptance test_clean: diff --git a/services/filestore/buildscript.txt b/services/filestore/buildscript.txt index 0ba90519b3..51452eb242 100644 --- a/services/filestore/buildscript.txt +++ b/services/filestore/buildscript.txt @@ -5,4 +5,4 @@ filestore --dependencies=mongo,redis --docker-repos=gcr.io/overleaf-ops --build-target=docker ---script-version=1.1.12 +--script-version=1.1.21 diff --git a/services/filestore/docker-compose.ci.yml b/services/filestore/docker-compose.ci.yml index e7ac6e84a7..d2bcca9ec6 100644 --- a/services/filestore/docker-compose.ci.yml +++ b/services/filestore/docker-compose.ci.yml @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.12 +# Version: 1.1.21 version: "2" @@ -10,6 +10,8 @@ services: image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER user: node command: npm run test:unit:_run + environment: + NODE_ENV: test test_acceptance: @@ -21,7 +23,7 @@ services: MONGO_HOST: mongo POSTGRES_HOST: postgres MOCHA_GREP: ${MOCHA_GREP} - ENABLE_CONVERSIONS: "true" + NODE_ENV: test depends_on: - mongo - redis diff --git a/services/filestore/docker-compose.yml b/services/filestore/docker-compose.yml index 60d387bf51..234b93e236 100644 --- a/services/filestore/docker-compose.yml +++ b/services/filestore/docker-compose.yml @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.12 +# Version: 1.1.21 version: "2" @@ -13,6 +13,7 @@ services: working_dir: /app environment: MOCHA_GREP: ${MOCHA_GREP} + NODE_ENV: test command: npm run test:unit user: node @@ -27,7 +28,8 @@ services: MONGO_HOST: mongo POSTGRES_HOST: postgres MOCHA_GREP: ${MOCHA_GREP} - ENABLE_CONVERSIONS: "true" + LOG_LEVEL: ERROR + NODE_ENV: test user: node depends_on: - mongo @@ -50,3 +52,4 @@ services: mongo: image: mongo:3.4 + diff --git a/services/filestore/npm-shrinkwrap.json b/services/filestore/npm-shrinkwrap.json index 7caf791bae..66670c6e69 100644 --- a/services/filestore/npm-shrinkwrap.json +++ b/services/filestore/npm-shrinkwrap.json @@ -3,19 +3,19 @@ "version": "0.1.4", "dependencies": { "@google-cloud/common": { - "version": "0.27.0", - "from": "@google-cloud/common@>=0.27.0 <0.28.0", - "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.27.0.tgz" + "version": "0.32.1", + "from": "@google-cloud/common@>=0.32.0 <0.33.0", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.32.1.tgz" }, "@google-cloud/debug-agent": { - "version": "3.0.1", + "version": "3.2.0", "from": "@google-cloud/debug-agent@>=3.0.0 <4.0.0", - "resolved": "https://registry.npmjs.org/@google-cloud/debug-agent/-/debug-agent-3.0.1.tgz", + "resolved": "https://registry.npmjs.org/@google-cloud/debug-agent/-/debug-agent-3.2.0.tgz", "dependencies": { "coffeescript": { - "version": "2.3.2", + "version": "2.4.1", "from": "coffeescript@>=2.0.0 <3.0.0", - "resolved": "https://registry.npmjs.org/coffeescript/-/coffeescript-2.3.2.tgz" + "resolved": "https://registry.npmjs.org/coffeescript/-/coffeescript-2.4.1.tgz" } } }, @@ -29,15 +29,47 @@ "from": "@google-cloud/common@>=0.26.0 <0.27.0", "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.26.2.tgz" }, + "@google-cloud/promisify": { + "version": "0.3.1", + "from": "@google-cloud/promisify@>=0.3.0 <0.4.0", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-0.3.1.tgz" + }, + "arrify": { + "version": "1.0.1", + "from": "arrify@>=1.0.1 <2.0.0", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz" + }, + "gcp-metadata": { + "version": "0.9.3", + "from": "gcp-metadata@>=0.9.0 <0.10.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.9.3.tgz" + }, + "google-auth-library": { + "version": "2.0.2", + "from": "google-auth-library@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-2.0.2.tgz", + "dependencies": { + "gcp-metadata": { + "version": "0.7.0", + "from": "gcp-metadata@>=0.7.0 <0.8.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.7.0.tgz" + } + } + }, "nan": { - "version": "2.12.1", + "version": "2.14.0", "from": "nan@>=2.11.1 <3.0.0", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.12.1.tgz" + "resolved": "https://registry.npmjs.org/nan/-/nan-2.14.0.tgz" }, "readable-stream": { - "version": "3.1.1", + "version": "3.4.0", "from": "readable-stream@>=2.0.0 <3.0.0||>=3.0.0 <4.0.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.1.1.tgz" + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.4.0.tgz" + }, + "semver": { + "version": "5.7.0", + "from": "semver@>=5.5.0 <6.0.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.0.tgz" }, "string_decoder": { "version": "1.2.0", @@ -45,37 +77,27 @@ "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.2.0.tgz" }, "through2": { - "version": "3.0.0", + "version": "3.0.1", "from": "through2@>=3.0.0 <4.0.0", - "resolved": "https://registry.npmjs.org/through2/-/through2-3.0.0.tgz" + "resolved": "https://registry.npmjs.org/through2/-/through2-3.0.1.tgz" } } }, "@google-cloud/projectify": { - "version": "0.3.2", - "from": "@google-cloud/projectify@>=0.3.0 <0.4.0", - "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-0.3.2.tgz" + "version": "0.3.3", + "from": "@google-cloud/projectify@>=0.3.3 <0.4.0", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-0.3.3.tgz" }, "@google-cloud/promisify": { - "version": "0.3.1", - "from": "@google-cloud/promisify@>=0.3.0 <0.4.0", - "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-0.3.1.tgz" + "version": "0.4.0", + "from": "@google-cloud/promisify@>=0.4.0 <0.5.0", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-0.4.0.tgz" }, "@google-cloud/trace-agent": { - "version": "3.5.2", + "version": "3.6.1", "from": "@google-cloud/trace-agent@>=3.2.0 <4.0.0", - "resolved": "https://registry.npmjs.org/@google-cloud/trace-agent/-/trace-agent-3.5.2.tgz", + "resolved": "https://registry.npmjs.org/@google-cloud/trace-agent/-/trace-agent-3.6.1.tgz", "dependencies": { - "@google-cloud/common": { - "version": "0.30.2", - "from": "@google-cloud/common@>=0.30.0 <0.31.0", - "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.30.2.tgz" - }, - "google-auth-library": { - "version": "3.0.1", - "from": "google-auth-library@>=3.0.0 <4.0.0", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-3.0.1.tgz" - }, "uuid": { "version": "3.3.2", "from": "uuid@>=3.0.1 <4.0.0", @@ -134,14 +156,46 @@ "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz" }, "@sindresorhus/is": { - "version": "0.13.0", - "from": "@sindresorhus/is@>=0.13.0 <0.14.0", - "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-0.13.0.tgz" + "version": "0.15.0", + "from": "@sindresorhus/is@>=0.15.0 <0.16.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-0.15.0.tgz" + }, + "@sinonjs/commons": { + "version": "1.4.0", + "from": "@sinonjs/commons@>=1.2.0 <2.0.0", + "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-1.4.0.tgz", + "dev": true + }, + "@sinonjs/formatio": { + "version": "3.2.1", + "from": "@sinonjs/formatio@>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/@sinonjs/formatio/-/formatio-3.2.1.tgz", + "dev": true, + "dependencies": { + "@sinonjs/samsam": { + "version": "3.3.2", + "from": "@sinonjs/samsam@>=3.1.0 <4.0.0", + "resolved": "https://registry.npmjs.org/@sinonjs/samsam/-/samsam-3.3.2.tgz", + "dev": true + } + } + }, + "@sinonjs/samsam": { + "version": "2.1.3", + "from": "@sinonjs/samsam@>=2.1.2 <3.0.0", + "resolved": "https://registry.npmjs.org/@sinonjs/samsam/-/samsam-2.1.3.tgz", + "dev": true + }, + "@sinonjs/text-encoding": { + "version": "0.7.1", + "from": "@sinonjs/text-encoding@>=0.7.1 <0.8.0", + "resolved": "https://registry.npmjs.org/@sinonjs/text-encoding/-/text-encoding-0.7.1.tgz", + "dev": true }, "@types/caseless": { - "version": "0.12.1", + "version": "0.12.2", "from": "@types/caseless@*", - "resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.1.tgz" + "resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.2.tgz" }, "@types/console-log-level": { "version": "1.4.0", @@ -164,9 +218,9 @@ "resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.0.tgz" }, "@types/node": { - "version": "10.12.20", + "version": "12.0.8", "from": "@types/node@*", - "resolved": "https://registry.npmjs.org/@types/node/-/node-10.12.20.tgz" + "resolved": "https://registry.npmjs.org/@types/node/-/node-12.0.8.tgz" }, "@types/request": { "version": "2.48.1", @@ -183,6 +237,11 @@ "from": "@types/tough-cookie@*", "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-2.3.5.tgz" }, + "abort-controller": { + "version": "3.0.0", + "from": "abort-controller@>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz" + }, "accept-encoding": { "version": "0.1.0", "from": "accept-encoding@>=0.1.0 <0.2.0", @@ -194,29 +253,35 @@ "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.5.tgz" }, "acorn": { - "version": "5.7.3", - "from": "acorn@>=5.0.3 <6.0.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-5.7.3.tgz" + "version": "6.1.1", + "from": "acorn@>=6.0.0 <7.0.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.1.1.tgz" }, "agent-base": { - "version": "4.2.1", + "version": "4.3.0", "from": "agent-base@>=4.1.0 <5.0.0", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.2.1.tgz" + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.3.0.tgz" }, "ajv": { - "version": "6.7.0", + "version": "6.10.0", "from": "ajv@>=6.5.5 <7.0.0", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.7.0.tgz" + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.10.0.tgz" }, "array-flatten": { "version": "1.1.1", "from": "array-flatten@1.1.1", "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz" }, + "array-from": { + "version": "2.1.1", + "from": "array-from@>=2.1.1 <3.0.0", + "resolved": "https://registry.npmjs.org/array-from/-/array-from-2.1.1.tgz", + "dev": true + }, "arrify": { - "version": "1.0.1", - "from": "arrify@>=1.0.1 <2.0.0", - "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz" + "version": "2.0.1", + "from": "arrify@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz" }, "asn1": { "version": "0.2.4", @@ -228,6 +293,12 @@ "from": "assert-plus@>=1.0.0 <2.0.0", "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz" }, + "assertion-error": { + "version": "1.1.0", + "from": "assertion-error@>=1.1.0 <2.0.0", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", + "dev": true + }, "async": { "version": "0.2.10", "from": "async@>=0.2.10 <0.3.0", @@ -236,7 +307,14 @@ "async-listener": { "version": "0.6.10", "from": "async-listener@>=0.6.0 <0.7.0", - "resolved": "https://registry.npmjs.org/async-listener/-/async-listener-0.6.10.tgz" + "resolved": "https://registry.npmjs.org/async-listener/-/async-listener-0.6.10.tgz", + "dependencies": { + "semver": { + "version": "5.7.0", + "from": "semver@>=5.3.0 <6.0.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.0.tgz" + } + } }, "asynckit": { "version": "0.4.0", @@ -271,9 +349,9 @@ "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.8.0.tgz" }, "axios": { - "version": "0.18.0", + "version": "0.18.1", "from": "axios@>=0.18.0 <0.19.0", - "resolved": "https://registry.npmjs.org/axios/-/axios-0.18.0.tgz" + "resolved": "https://registry.npmjs.org/axios/-/axios-0.18.1.tgz" }, "balanced-match": { "version": "1.0.0", @@ -301,9 +379,9 @@ "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-7.2.1.tgz" }, "bindings": { - "version": "1.4.0", + "version": "1.5.0", "from": "bindings@>=1.2.1 <2.0.0", - "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.4.0.tgz" + "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz" }, "bintrees": { "version": "1.0.1", @@ -346,14 +424,15 @@ "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz" }, "builtin-modules": { - "version": "3.0.0", + "version": "3.1.0", "from": "builtin-modules@>=3.0.0 <4.0.0", - "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.0.0.tgz" + "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.1.0.tgz" }, "bunyan": { "version": "1.5.1", "from": "bunyan@1.5.1", - "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.5.1.tgz" + "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.5.1.tgz", + "dev": true }, "bytes": { "version": "3.0.0", @@ -365,10 +444,22 @@ "from": "caseless@>=0.3.0 <0.4.0", "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.3.0.tgz" }, + "chai": { + "version": "4.2.0", + "from": "chai@4.2.0", + "resolved": "https://registry.npmjs.org/chai/-/chai-4.2.0.tgz", + "dev": true + }, + "check-error": { + "version": "1.0.2", + "from": "check-error@>=1.0.2 <2.0.0", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz", + "dev": true + }, "coffee-script": { - "version": "1.12.4", - "from": "coffee-script@1.12.4", - "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.12.4.tgz" + "version": "1.6.0", + "from": "coffee-script@1.6.0", + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz" }, "combined-stream": { "version": "0.0.7", @@ -381,9 +472,9 @@ "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" }, "console-log-level": { - "version": "1.4.0", + "version": "1.4.1", "from": "console-log-level@>=1.4.0 <2.0.0", - "resolved": "https://registry.npmjs.org/console-log-level/-/console-log-level-1.4.0.tgz" + "resolved": "https://registry.npmjs.org/console-log-level/-/console-log-level-1.4.1.tgz" }, "content-disposition": { "version": "0.5.2", @@ -435,10 +526,16 @@ "from": "debug@2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz" }, + "deep-eql": { + "version": "3.0.1", + "from": "deep-eql@>=3.0.1 <4.0.0", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-3.0.1.tgz", + "dev": true + }, "delay": { - "version": "4.1.0", + "version": "4.3.0", "from": "delay@>=4.0.1 <5.0.0", - "resolved": "https://registry.npmjs.org/delay/-/delay-4.1.0.tgz" + "resolved": "https://registry.npmjs.org/delay/-/delay-4.3.0.tgz" }, "delayed-stream": { "version": "0.0.5", @@ -455,16 +552,23 @@ "from": "destroy@>=1.0.4 <1.1.0", "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.0.4.tgz" }, + "diff": { + "version": "3.5.0", + "from": "diff@>=3.5.0 <4.0.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz", + "dev": true + }, "dtrace-provider": { "version": "0.6.0", "from": "dtrace-provider@>=0.6.0 <0.7.0", "resolved": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.6.0.tgz", + "dev": true, "optional": true }, "duplexify": { - "version": "3.6.1", + "version": "3.7.1", "from": "duplexify@>=3.6.0 <4.0.0", - "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.6.1.tgz", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.7.1.tgz", "dependencies": { "readable-stream": { "version": "2.3.6", @@ -484,9 +588,9 @@ "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz" }, "ecdsa-sig-formatter": { - "version": "1.0.10", - "from": "ecdsa-sig-formatter@1.0.10", - "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.10.tgz" + "version": "1.0.11", + "from": "ecdsa-sig-formatter@1.0.11", + "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz" }, "ee-first": { "version": "1.1.1", @@ -514,9 +618,9 @@ "resolved": "https://registry.npmjs.org/ent/-/ent-2.2.0.tgz" }, "es6-promise": { - "version": "4.2.5", + "version": "4.2.8", "from": "es6-promise@>=4.0.3 <5.0.0", - "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.5.tgz" + "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.8.tgz" }, "es6-promisify": { "version": "5.0.0", @@ -538,6 +642,11 @@ "from": "etag@>=1.8.1 <1.9.0", "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz" }, + "event-target-shim": { + "version": "5.0.1", + "from": "event-target-shim@>=5.0.0 <6.0.0", + "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz" + }, "events": { "version": "1.1.1", "from": "events@1.1.1", @@ -640,9 +749,9 @@ "resolved": "https://registry.npmjs.org/findit2/-/findit2-2.2.3.tgz" }, "follow-redirects": { - "version": "1.6.1", - "from": "follow-redirects@>=1.3.0 <2.0.0", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.6.1.tgz", + "version": "1.5.10", + "from": "follow-redirects@1.5.10", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.5.10.tgz", "dependencies": { "debug": { "version": "3.1.0", @@ -689,14 +798,20 @@ "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz" }, "gaxios": { - "version": "1.2.7", - "from": "gaxios@>=1.0.4 <2.0.0", - "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-1.2.7.tgz" + "version": "1.8.4", + "from": "gaxios@>=1.2.1 <2.0.0", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-1.8.4.tgz" }, "gcp-metadata": { - "version": "0.9.3", - "from": "gcp-metadata@>=0.9.0 <0.10.0", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.9.3.tgz" + "version": "1.0.0", + "from": "gcp-metadata@>=1.0.0 <2.0.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-1.0.0.tgz" + }, + "get-func-name": { + "version": "2.0.0", + "from": "get-func-name@>=2.0.0 <3.0.0", + "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz", + "dev": true }, "getpass": { "version": "0.1.7", @@ -715,21 +830,21 @@ "optional": true }, "google-auth-library": { - "version": "2.0.2", - "from": "google-auth-library@>=2.0.0 <3.0.0", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-2.0.2.tgz", + "version": "3.1.2", + "from": "google-auth-library@>=3.1.1 <4.0.0", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-3.1.2.tgz", "dependencies": { - "gcp-metadata": { - "version": "0.7.0", - "from": "gcp-metadata@>=0.7.0 <0.8.0", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.7.0.tgz" + "semver": { + "version": "5.7.0", + "from": "semver@>=5.5.0 <6.0.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.0.tgz" } } }, "google-p12-pem": { - "version": "1.0.3", + "version": "1.0.4", "from": "google-p12-pem@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-1.0.3.tgz" + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-1.0.4.tgz" }, "graceful-fs": { "version": "4.1.11", @@ -737,14 +852,14 @@ "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.11.tgz" }, "gtoken": { - "version": "2.3.2", - "from": "gtoken@>=2.3.0 <3.0.0", - "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-2.3.2.tgz", + "version": "2.3.3", + "from": "gtoken@>=2.3.2 <3.0.0", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-2.3.3.tgz", "dependencies": { "mime": { - "version": "2.4.0", + "version": "2.4.4", "from": "mime@>=2.2.0 <3.0.0", - "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.0.tgz" + "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.4.tgz" } } }, @@ -809,9 +924,9 @@ "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz" }, "ms": { - "version": "2.1.1", + "version": "2.1.2", "from": "ms@>=2.1.1 <3.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz" + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz" } } }, @@ -846,9 +961,9 @@ "resolved": "https://registry.npmjs.org/is/-/is-3.3.0.tgz" }, "is-buffer": { - "version": "1.1.6", - "from": "is-buffer@>=1.1.5 <2.0.0", - "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz" + "version": "2.0.3", + "from": "is-buffer@>=2.0.2 <3.0.0", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.3.tgz" }, "is-typedarray": { "version": "1.0.0", @@ -905,15 +1020,21 @@ "from": "jsprim@>=1.2.2 <2.0.0", "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz" }, + "just-extend": { + "version": "4.0.2", + "from": "just-extend@>=4.0.2 <5.0.0", + "resolved": "https://registry.npmjs.org/just-extend/-/just-extend-4.0.2.tgz", + "dev": true + }, "jwa": { - "version": "1.2.0", - "from": "jwa@>=1.2.0 <2.0.0", - "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.2.0.tgz" + "version": "1.4.1", + "from": "jwa@>=1.4.1 <2.0.0", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz" }, "jws": { - "version": "3.2.1", + "version": "3.2.2", "from": "jws@>=3.1.5 <4.0.0", - "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.1.tgz" + "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz" }, "klaw": { "version": "1.3.1", @@ -932,31 +1053,54 @@ } } }, + "lodash": { + "version": "4.17.11", + "from": "lodash@>=4.17.11 <5.0.0", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.11.tgz", + "dev": true + }, + "lodash.get": { + "version": "4.4.2", + "from": "lodash.get@>=4.4.2 <5.0.0", + "resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz", + "dev": true + }, "lodash.pickby": { "version": "4.6.0", "from": "lodash.pickby@>=4.6.0 <5.0.0", "resolved": "https://registry.npmjs.org/lodash.pickby/-/lodash.pickby-4.6.0.tgz" }, "logger-sharelatex": { - "version": "1.6.0", - "from": "logger-sharelatex@1.6.0", - "resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-1.6.0.tgz", + "version": "1.7.0", + "from": "logger-sharelatex@1.7.0", + "resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-1.7.0.tgz", "dependencies": { + "bunyan": { + "version": "1.8.12", + "from": "bunyan@1.8.12", + "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.12.tgz" + }, "caseless": { "version": "0.12.0", "from": "caseless@>=0.12.0 <0.13.0", "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz" }, "combined-stream": { - "version": "1.0.7", + "version": "1.0.8", "from": "combined-stream@>=1.0.6 <1.1.0", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.7.tgz" + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz" }, "delayed-stream": { "version": "1.0.0", "from": "delayed-stream@>=1.0.0 <1.1.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz" }, + "dtrace-provider": { + "version": "0.8.7", + "from": "dtrace-provider@>=0.8.0 <0.9.0", + "resolved": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.8.7.tgz", + "optional": true + }, "forever-agent": { "version": "0.6.1", "from": "forever-agent@>=0.6.1 <0.7.0", @@ -994,6 +1138,12 @@ } } }, + "lolex": { + "version": "3.1.0", + "from": "lolex@>=3.0.0 <4.0.0", + "resolved": "https://registry.npmjs.org/lolex/-/lolex-3.1.0.tgz", + "dev": true + }, "long": { "version": "4.0.0", "from": "long@>=4.0.0 <5.0.0", @@ -1035,15 +1185,10 @@ "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz" }, "metrics-sharelatex": { - "version": "2.1.1", - "from": "metrics-sharelatex@2.1.1", - "resolved": "https://registry.npmjs.org/metrics-sharelatex/-/metrics-sharelatex-2.1.1.tgz", + "version": "2.2.0", + "from": "metrics-sharelatex@2.2.0", + "resolved": "https://registry.npmjs.org/metrics-sharelatex/-/metrics-sharelatex-2.2.0.tgz", "dependencies": { - "coffee-script": { - "version": "1.6.0", - "from": "coffee-script@1.6.0", - "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz" - }, "underscore": { "version": "1.6.0", "from": "underscore@>=1.6.0 <1.7.0", @@ -1123,6 +1268,12 @@ "from": "module-details-from-path@>=1.0.3 <2.0.0", "resolved": "https://registry.npmjs.org/module-details-from-path/-/module-details-from-path-1.0.3.tgz" }, + "moment": { + "version": "2.24.0", + "from": "moment@>=2.10.6 <3.0.0", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.24.0.tgz", + "optional": true + }, "ms": { "version": "2.0.0", "from": "ms@2.0.0", @@ -1165,15 +1316,41 @@ "from": "negotiator@0.6.1", "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.1.tgz" }, + "nise": { + "version": "1.5.0", + "from": "nise@>=1.4.6 <2.0.0", + "resolved": "https://registry.npmjs.org/nise/-/nise-1.5.0.tgz", + "dev": true, + "dependencies": { + "isarray": { + "version": "0.0.1", + "from": "isarray@0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "dev": true + }, + "lolex": { + "version": "4.1.0", + "from": "lolex@>=4.1.0 <5.0.0", + "resolved": "https://registry.npmjs.org/lolex/-/lolex-4.1.0.tgz", + "dev": true + }, + "path-to-regexp": { + "version": "1.7.0", + "from": "path-to-regexp@>=1.7.0 <2.0.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.7.0.tgz", + "dev": true + } + } + }, "node-fetch": { - "version": "2.3.0", - "from": "node-fetch@>=2.2.0 <3.0.0", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.3.0.tgz" + "version": "2.6.0", + "from": "node-fetch@>=2.3.0 <3.0.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.0.tgz" }, "node-forge": { - "version": "0.7.6", - "from": "node-forge@>=0.7.4 <0.8.0", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.7.6.tgz" + "version": "0.8.4", + "from": "node-forge@>=0.8.0 <0.9.0", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.8.4.tgz" }, "node-transloadit": { "version": "0.0.4", @@ -1228,14 +1405,14 @@ "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz" }, "p-limit": { - "version": "2.1.0", - "from": "p-limit@>=2.0.0 <3.0.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.1.0.tgz" + "version": "2.2.0", + "from": "p-limit@>=2.2.0 <3.0.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.2.0.tgz" }, "p-try": { - "version": "2.0.0", + "version": "2.2.0", "from": "p-try@>=2.0.0 <3.0.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.0.0.tgz" + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz" }, "parse-duration": { "version": "0.1.1", @@ -1243,9 +1420,9 @@ "resolved": "https://registry.npmjs.org/parse-duration/-/parse-duration-0.1.1.tgz" }, "parse-ms": { - "version": "2.0.0", + "version": "2.1.0", "from": "parse-ms@>=2.0.0 <3.0.0", - "resolved": "https://registry.npmjs.org/parse-ms/-/parse-ms-2.0.0.tgz" + "resolved": "https://registry.npmjs.org/parse-ms/-/parse-ms-2.1.0.tgz" }, "parseurl": { "version": "1.3.2", @@ -1267,6 +1444,12 @@ "from": "path-to-regexp@0.1.7", "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz" }, + "pathval": { + "version": "1.1.0", + "from": "pathval@>=1.1.0 <2.0.0", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.0.tgz", + "dev": true + }, "performance-now": { "version": "2.1.0", "from": "performance-now@>=2.1.0 <3.0.0", @@ -1293,14 +1476,21 @@ "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.0.tgz" }, "prom-client": { - "version": "11.2.1", + "version": "11.5.1", "from": "prom-client@>=11.1.3 <12.0.0", - "resolved": "https://registry.npmjs.org/prom-client/-/prom-client-11.2.1.tgz" + "resolved": "https://registry.npmjs.org/prom-client/-/prom-client-11.5.1.tgz" }, "protobufjs": { "version": "6.8.8", "from": "protobufjs@>=6.8.6 <6.9.0", - "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.8.8.tgz" + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.8.8.tgz", + "dependencies": { + "@types/node": { + "version": "10.14.9", + "from": "@types/node@>=10.1.0 <11.0.0", + "resolved": "https://registry.npmjs.org/@types/node/-/node-10.14.9.tgz" + } + } }, "proxy-addr": { "version": "2.0.4", @@ -1308,9 +1498,9 @@ "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.4.tgz" }, "psl": { - "version": "1.1.31", + "version": "1.1.32", "from": "psl@>=1.1.24 <2.0.0", - "resolved": "https://registry.npmjs.org/psl/-/psl-1.1.31.tgz" + "resolved": "https://registry.npmjs.org/psl/-/psl-1.1.32.tgz" }, "punycode": { "version": "1.3.2", @@ -1333,9 +1523,9 @@ "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz" }, "raven": { - "version": "1.2.1", - "from": "raven@>=1.1.3 <2.0.0", - "resolved": "https://registry.npmjs.org/raven/-/raven-1.2.1.tgz", + "version": "1.1.3", + "from": "raven@1.1.3", + "resolved": "https://registry.npmjs.org/raven/-/raven-1.1.3.tgz", "dependencies": { "uuid": { "version": "3.0.0", @@ -1398,9 +1588,21 @@ } }, "require-in-the-middle": { - "version": "3.1.0", - "from": "require-in-the-middle@>=3.0.0 <4.0.0", - "resolved": "https://registry.npmjs.org/require-in-the-middle/-/require-in-the-middle-3.1.0.tgz" + "version": "4.0.0", + "from": "require-in-the-middle@>=4.0.0 <5.0.0", + "resolved": "https://registry.npmjs.org/require-in-the-middle/-/require-in-the-middle-4.0.0.tgz", + "dependencies": { + "debug": { + "version": "4.1.1", + "from": "debug@>=4.1.1 <5.0.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz" + }, + "ms": { + "version": "2.1.2", + "from": "ms@^2.1.1", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz" + } + } }, "require-like": { "version": "0.1.2", @@ -1409,9 +1611,9 @@ "dev": true }, "resolve": { - "version": "1.10.0", - "from": "resolve@>=1.5.0 <2.0.0", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.10.0.tgz" + "version": "1.11.0", + "from": "resolve@>=1.10.0 <2.0.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.11.0.tgz" }, "response": { "version": "0.14.0", @@ -1468,9 +1670,9 @@ "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz" }, "semver": { - "version": "5.6.0", - "from": "semver@>=5.5.0 <6.0.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.6.0.tgz" + "version": "6.1.1", + "from": "semver@>=6.0.0 <7.0.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.1.1.tgz" }, "send": { "version": "0.16.2", @@ -1511,6 +1713,12 @@ "from": "shimmer@>=1.2.0 <2.0.0", "resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.2.1.tgz" }, + "sinon": { + "version": "7.1.1", + "from": "sinon@7.1.1", + "resolved": "https://registry.npmjs.org/sinon/-/sinon-7.1.1.tgz", + "dev": true + }, "sntp": { "version": "0.1.4", "from": "sntp@>=0.1.0 <0.2.0", @@ -1583,10 +1791,11 @@ "from": "string_decoder@>=0.10.0 <0.11.0", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" }, - "symbol-observable": { - "version": "1.2.0", - "from": "symbol-observable@>=1.2.0 <2.0.0", - "resolved": "https://registry.npmjs.org/symbol-observable/-/symbol-observable-1.2.0.tgz" + "supports-color": { + "version": "5.5.0", + "from": "supports-color@>=5.5.0 <6.0.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "dev": true }, "tdigest": { "version": "0.1.1", @@ -1649,6 +1858,12 @@ "from": "tweetnacl@>=0.14.0 <0.15.0", "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz" }, + "type-detect": { + "version": "4.0.8", + "from": "type-detect@>=4.0.5 <5.0.0", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "dev": true + }, "type-is": { "version": "1.6.16", "from": "type-is@>=1.6.16 <1.7.0", diff --git a/services/filestore/package.json b/services/filestore/package.json index a9163666cd..bcd8011f10 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -29,8 +29,8 @@ "fs-extra": "^1.0.0", "heapdump": "^0.3.2", "knox": "~0.9.1", - "logger-sharelatex": "^1.6.0", - "metrics-sharelatex": "^2.1.1", + "logger-sharelatex": "^1.7.0", + "metrics-sharelatex": "^2.2.0", "mocha": "5.2.0", "node-transloadit": "0.0.4", "node-uuid": "~1.4.1", From 013400d7a4e5cc1380604c2d0141e7df96c17961 Mon Sep 17 00:00:00 2001 From: Shane Kilkelly Date: Wed, 19 Jun 2019 14:04:57 +0100 Subject: [PATCH 306/555] Re-add environment variable for conversions --- services/filestore/docker-compose.ci.yml | 1 + services/filestore/docker-compose.yml | 1 + 2 files changed, 2 insertions(+) diff --git a/services/filestore/docker-compose.ci.yml b/services/filestore/docker-compose.ci.yml index d2bcca9ec6..765c79aac5 100644 --- a/services/filestore/docker-compose.ci.yml +++ b/services/filestore/docker-compose.ci.yml @@ -22,6 +22,7 @@ services: REDIS_HOST: redis MONGO_HOST: mongo POSTGRES_HOST: postgres + ENABLE_CONVERSIONS: "true" MOCHA_GREP: ${MOCHA_GREP} NODE_ENV: test depends_on: diff --git a/services/filestore/docker-compose.yml b/services/filestore/docker-compose.yml index 234b93e236..e282295b14 100644 --- a/services/filestore/docker-compose.yml +++ b/services/filestore/docker-compose.yml @@ -28,6 +28,7 @@ services: MONGO_HOST: mongo POSTGRES_HOST: postgres MOCHA_GREP: ${MOCHA_GREP} + ENABLE_CONVERSIONS: "true" LOG_LEVEL: ERROR NODE_ENV: test user: node From 7d900b57bfe1c0ebaa9fa03f5f18f1f74b101402 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Wed, 19 Jun 2019 12:58:17 -0400 Subject: [PATCH 307/555] Fix createUnbufferedStream() function call In 49a21155f642670dfea264ac73fb60241f37cb87, I managed to incorrectly write the `createUnbufferedStream()` function from the AWS SDK as `getUnbufferedStream()` and to consistently use that naming in the unit tests. This commit fixes that. I have tested again on S3. --- services/filestore/app/coffee/S3PersistorManager.coffee | 2 +- .../filestore/test/unit/coffee/S3PersistorManagerTests.coffee | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index 940ba90a95..8f1b080efd 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -102,7 +102,7 @@ module.exports = if statusCode not in [200, 206] logger.log({bucketName: bucketName, key: key }, "error getting file from s3: #{statusCode}") return callback(new Error("Got non-200 response from S3: #{statusCode} #{statusMessage}"), null) - stream = response.httpResponse.getUnbufferedStream() + stream = response.httpResponse.createUnbufferedStream() callback(null, stream) request.on 'error', (err) => diff --git a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee index 5244fcb8f2..8a1b2e4d49 100644 --- a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee @@ -32,7 +32,7 @@ describe "S3PersistorManagerTests", -> send: sinon.stub() @s3Response = httpResponse: - getUnbufferedStream: sinon.stub() + createUnbufferedStream: sinon.stub() @s3Client = copyObject: sinon.stub() headObject: sinon.stub() @@ -64,7 +64,7 @@ describe "S3PersistorManagerTests", -> @expectedStream = { expectedStream: true } @s3Request.send.callsFake () => @s3EventHandlers.httpHeaders(200, {}, @s3Response, "OK") - @s3Response.httpResponse.getUnbufferedStream.returns(@expectedStream) + @s3Response.httpResponse.createUnbufferedStream.returns(@expectedStream) it "returns a stream", (done) -> @S3PersistorManager.getFileStream @bucketName, @key, {}, (err, stream) => From 47985892136faeac8d6c7dcc3add79ef55a3274d Mon Sep 17 00:00:00 2001 From: Nate Stemen Date: Fri, 25 Oct 2019 11:51:58 -0400 Subject: [PATCH 308/555] replace private link with public one --- services/filestore/.github/PULL_REQUEST_TEMPLATE.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/services/filestore/.github/PULL_REQUEST_TEMPLATE.md b/services/filestore/.github/PULL_REQUEST_TEMPLATE.md index ed25ee83c1..12bb2eeb3f 100644 --- a/services/filestore/.github/PULL_REQUEST_TEMPLATE.md +++ b/services/filestore/.github/PULL_REQUEST_TEMPLATE.md @@ -1,4 +1,7 @@ - + + + + ### Description From aba0d14eddcdecde52aa04332eb1a985701265e4 Mon Sep 17 00:00:00 2001 From: Nate Stemen Date: Fri, 25 Oct 2019 11:52:54 -0400 Subject: [PATCH 309/555] bump build script to 1.1.24 --- services/filestore/Makefile | 4 ++-- services/filestore/buildscript.txt | 4 +++- services/filestore/docker-compose.ci.yml | 2 +- services/filestore/docker-compose.yml | 2 +- 4 files changed, 7 insertions(+), 5 deletions(-) diff --git a/services/filestore/Makefile b/services/filestore/Makefile index 75286c139a..e83a0696e0 100644 --- a/services/filestore/Makefile +++ b/services/filestore/Makefile @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.21 +# Version: 1.1.24 BUILD_NUMBER ?= local BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) @@ -35,7 +35,7 @@ test_clean: $(DOCKER_COMPOSE) down -v -t 0 test_acceptance_pre_run: - @[ ! -f test/acceptance/scripts/pre-run ] && echo "filestore has no pre acceptance tests task" || $(DOCKER_COMPOSE) run --rm test_acceptance test/acceptance/scripts/pre-run + @[ ! -f test/acceptance/js/scripts/pre-run ] && echo "filestore has no pre acceptance tests task" || $(DOCKER_COMPOSE) run --rm test_acceptance test/acceptance/js/scripts/pre-run build: docker build --pull --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ --tag gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ diff --git a/services/filestore/buildscript.txt b/services/filestore/buildscript.txt index 51452eb242..dc8c156383 100644 --- a/services/filestore/buildscript.txt +++ b/services/filestore/buildscript.txt @@ -5,4 +5,6 @@ filestore --dependencies=mongo,redis --docker-repos=gcr.io/overleaf-ops --build-target=docker ---script-version=1.1.21 +--script-version=1.1.24 +--env-pass-through= +--public-repo=True diff --git a/services/filestore/docker-compose.ci.yml b/services/filestore/docker-compose.ci.yml index 765c79aac5..e5557b7e91 100644 --- a/services/filestore/docker-compose.ci.yml +++ b/services/filestore/docker-compose.ci.yml @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.21 +# Version: 1.1.24 version: "2" diff --git a/services/filestore/docker-compose.yml b/services/filestore/docker-compose.yml index e282295b14..b7fd2afc71 100644 --- a/services/filestore/docker-compose.yml +++ b/services/filestore/docker-compose.yml @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.21 +# Version: 1.1.24 version: "2" From 2ec38068aa6542bf66b37a8a771e7af9383c9fb7 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 6 Dec 2019 14:35:13 +0000 Subject: [PATCH 310/555] add comments about aws-sdk and s3 backends --- services/filestore/app/coffee/AWSSDKPersistorManager.coffee | 5 +++++ services/filestore/app/coffee/S3PersistorManager.coffee | 4 ++++ 2 files changed, 9 insertions(+) diff --git a/services/filestore/app/coffee/AWSSDKPersistorManager.coffee b/services/filestore/app/coffee/AWSSDKPersistorManager.coffee index 5be80506f5..168fc68d54 100644 --- a/services/filestore/app/coffee/AWSSDKPersistorManager.coffee +++ b/services/filestore/app/coffee/AWSSDKPersistorManager.coffee @@ -1,3 +1,8 @@ +# This module is not used in production, which currently uses +# S3PersistorManager. The intention is to migrate S3PersistorManager to use the +# latest aws-sdk and delete this module so that PersistorManager would load the +# same backend for both the 's3' and 'aws-sdk' options. + logger = require "logger-sharelatex" aws = require "aws-sdk" _ = require "underscore" diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index 8f1b080efd..ca74bdb013 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -1,3 +1,7 @@ +# This module is the one which is used in production. It needs to be migrated +# to use aws-sdk throughout, see the comments in AWSSDKPersistorManager for +# details. The knox library is unmaintained and has bugs. + http = require('http') http.globalAgent.maxSockets = 300 https = require('https') From 86b9e4b53a349a808755b155b5050743db9127c1 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 5 Dec 2019 13:51:27 +0000 Subject: [PATCH 311/555] Rename request -> s3Request to prevent overwriting main import --- services/filestore/app/coffee/S3PersistorManager.coffee | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index ca74bdb013..4c33fd84d2 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -95,9 +95,9 @@ module.exports = } if opts.start? and opts.end? s3Params['Range'] = "bytes=#{opts.start}-#{opts.end}" - request = s3.getObject(s3Params) + s3Request = s3.getObject(s3Params) - request.on 'httpHeaders', (statusCode, headers, response, statusMessage) => + s3Request.on 'httpHeaders', (statusCode, headers, response, statusMessage) => if statusCode in [403, 404] # S3 returns a 403 instead of a 404 when the user doesn't have # permission to list the bucket contents. @@ -109,11 +109,11 @@ module.exports = stream = response.httpResponse.createUnbufferedStream() callback(null, stream) - request.on 'error', (err) => + s3Request.on 'error', (err) => logger.err({ err: err, bucketName: bucketName, key: key }, "error getting file stream from s3") callback(err) - request.send() + s3Request.send() getFileSize: (bucketName, key, callback) -> logger.log({ bucketName: bucketName, key: key }, "getting file size from S3") From 96457597ac30dc23cc723072ae0fbb45c8701f81 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 5 Dec 2019 13:55:08 +0000 Subject: [PATCH 312/555] Add fake s3 server and initial config --- services/filestore/config/settings.defaults.coffee | 1 + services/filestore/docker-compose.ci.yml | 11 +++++++++++ services/filestore/docker-compose.yml | 12 ++++++++++++ 3 files changed, 24 insertions(+) diff --git a/services/filestore/config/settings.defaults.coffee b/services/filestore/config/settings.defaults.coffee index 550cfd2694..29010e85ee 100644 --- a/services/filestore/config/settings.defaults.coffee +++ b/services/filestore/config/settings.defaults.coffee @@ -16,6 +16,7 @@ settings = s3: key: process.env['AWS_KEY'] secret: process.env['AWS_SECRET'] + endpoint: process.env['AWS_S3_ENDPOINT'] stores: user_files: process.env['AWS_S3_USER_FILES_BUCKET_NAME'] template_files: process.env['AWS_S3_TEMPLATE_FILES_BUCKET_NAME'] diff --git a/services/filestore/docker-compose.ci.yml b/services/filestore/docker-compose.ci.yml index e5557b7e91..790109f70b 100644 --- a/services/filestore/docker-compose.ci.yml +++ b/services/filestore/docker-compose.ci.yml @@ -25,9 +25,16 @@ services: ENABLE_CONVERSIONS: "true" MOCHA_GREP: ${MOCHA_GREP} NODE_ENV: test + AWS_KEY: fake + AWS_SECRET: fake + AWS_S3_USER_FILES_BUCKET_NAME: fake_user_files + AWS_S3_TEMPLATE_FILES_BUCKET_NAME: fake_template_files + AWS_S3_PUBLIC_FILES_BUCKET_NAME: fake_public_files + AWS_S3_ENDPOINT: http://fakes3:9090 depends_on: - mongo - redis + - fakes3 user: node command: npm run test:acceptance:_run @@ -46,3 +53,7 @@ services: mongo: image: mongo:3.4 + + fakes3: + image: adobe/s3mock + command: --initialBuckets=fake_user_files,fake_template_files,fake_public_files diff --git a/services/filestore/docker-compose.yml b/services/filestore/docker-compose.yml index b7fd2afc71..cd03a9daf4 100644 --- a/services/filestore/docker-compose.yml +++ b/services/filestore/docker-compose.yml @@ -31,10 +31,17 @@ services: ENABLE_CONVERSIONS: "true" LOG_LEVEL: ERROR NODE_ENV: test + AWS_KEY: fake + AWS_SECRET: fake + AWS_S3_USER_FILES_BUCKET_NAME: fake_user_files + AWS_S3_TEMPLATE_FILES_BUCKET_NAME: fake_template_files + AWS_S3_PUBLIC_FILES_BUCKET_NAME: fake_public_files + AWS_S3_ENDPOINT: http://fakes3:9090 user: node depends_on: - mongo - redis + - fakes3 command: npm run test:acceptance @@ -53,4 +60,9 @@ services: mongo: image: mongo:3.4 + fakes3: + image: adobe/s3mock + environment: + - initialBuckets=fake_user_files,fake_template_files,fake_public_files + From c01603b1e71afedeb2471c358e296c3677412a56 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 5 Dec 2019 14:25:25 +0000 Subject: [PATCH 313/555] Support custom S3 endpoints --- .../app/coffee/S3PersistorManager.coffee | 60 ++++++++++++------- 1 file changed, 37 insertions(+), 23 deletions(-) diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index 4c33fd84d2..5ee9e25865 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -16,10 +16,15 @@ LocalFileWriter = require("./LocalFileWriter") Errors = require("./Errors") _ = require("underscore") awsS3 = require "aws-sdk/clients/s3" +URL = require('url') thirtySeconds = 30 * 1000 buildDefaultOptions = (bucketName, method, key)-> + if settings.filestore.s3.endpoint + endpoint = "#{settings.filestore.s3.endpoint}/#{bucketName}" + else + endpoint = "https://#{bucketName}.s3.amazonaws.com" return { aws: key: settings.filestore.s3.key @@ -27,32 +32,47 @@ buildDefaultOptions = (bucketName, method, key)-> bucket: bucketName method: method timeout: thirtySeconds - uri:"https://#{bucketName}.s3.amazonaws.com/#{key}" + uri:"#{endpoint}/#{key}" } -defaultS3Client = new awsS3({ - credentials: - accessKeyId: settings.filestore.s3.key, - secretAccessKey: settings.filestore.s3.secret -}) +getS3Options = (credentials) -> + options = + credentials: + accessKeyId: credentials.auth_key + secretAccessKey: credentials.auth_secret + + if settings.filestore.s3.endpoint + options.endpoint = settings.filestore.s3.endpoint + options.sslEnabled = false + + return options + +defaultS3Client = new awsS3(getS3Options({ + auth_key: settings.filestore.s3.key, + auth_secret: settings.filestore.s3.secret +})) getS3Client = (credentials) -> if credentials? - return new awsS3({ - credentials: - accessKeyId: credentials.auth_key - secretAccessKey: credentials.auth_secret - }) + return new awsS3(getS3Options(credentials)) else return defaultS3Client +getKnoxClient = (bucketName) => + options = + key: settings.filestore.s3.key + secret: settings.filestore.s3.secret + bucket: bucketName + if settings.filestore.s3.endpoint + endpoint = URL.parse(settings.filestore.s3.endpoint) + options.endpoint = endpoint.hostname + options.port = endpoint.port + return knox.createClient(options) + module.exports = sendFile: (bucketName, key, fsPath, callback)-> - s3Client = knox.createClient - key: settings.filestore.s3.key - secret: settings.filestore.s3.secret - bucket: bucketName + s3Client = getKnoxClient(bucketName) putEventEmiter = s3Client.putFile fsPath, key, (err, res)-> if err? logger.err err:err, bucketName:bucketName, key:key, fsPath:fsPath,"something went wrong uploading file to s3" @@ -171,10 +191,7 @@ module.exports = _callback = () -> logger.log key: key, bucketName: bucketName, "deleting directory" - s3Client = knox.createClient - key: settings.filestore.s3.key - secret: settings.filestore.s3.secret - bucket: bucketName + s3Client = getKnoxClient(bucketName) s3Client.list prefix:key, (err, data)-> if err? logger.err err:err, bucketName:bucketName, key:key, "something went wrong listing prefix in aws" @@ -200,10 +217,7 @@ module.exports = directorySize:(bucketName, key, callback)-> logger.log bucketName:bucketName, key:key, "get project size in s3" - s3Client = knox.createClient - key: settings.filestore.s3.key - secret: settings.filestore.s3.secret - bucket: bucketName + s3Client = getKnoxClient(bucketName) s3Client.list prefix:key, (err, data)-> if err? logger.err err:err, bucketName:bucketName, key:key, "something went wrong listing prefix in aws" From 1d1106bc679e344653c5feef96dca4a3124d6a38 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 5 Dec 2019 15:25:35 +0000 Subject: [PATCH 314/555] Add metric for s3 egress --- .../app/coffee/S3PersistorManager.coffee | 5 +++ services/filestore/docker-compose.ci.yml | 1 + services/filestore/docker-compose.yml | 1 + .../acceptance/coffee/SendingFileTest.coffee | 33 ++++++++++++++----- 4 files changed, 32 insertions(+), 8 deletions(-) diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index 5ee9e25865..14ab1bffe5 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -9,6 +9,7 @@ https.globalAgent.maxSockets = 300 settings = require("settings-sharelatex") request = require("request") logger = require("logger-sharelatex") +metrics = require("metrics-sharelatex") fs = require("fs") knox = require("knox") path = require("path") @@ -73,7 +74,9 @@ module.exports = sendFile: (bucketName, key, fsPath, callback)-> s3Client = getKnoxClient(bucketName) + uploaded = 0 putEventEmiter = s3Client.putFile fsPath, key, (err, res)-> + metrics.count 's3.egress', uploaded if err? logger.err err:err, bucketName:bucketName, key:key, fsPath:fsPath,"something went wrong uploading file to s3" return callback(err) @@ -88,6 +91,8 @@ module.exports = putEventEmiter.on "error", (err)-> logger.err err:err, bucketName:bucketName, key:key, fsPath:fsPath, "error emmited on put of file" callback err + putEventEmiter.on "progress", (progress)-> + uploaded = progress.written sendStream: (bucketName, key, readStream, callback)-> logger.log bucketName:bucketName, key:key, "sending file to s3" diff --git a/services/filestore/docker-compose.ci.yml b/services/filestore/docker-compose.ci.yml index 790109f70b..d7b97ac011 100644 --- a/services/filestore/docker-compose.ci.yml +++ b/services/filestore/docker-compose.ci.yml @@ -25,6 +25,7 @@ services: ENABLE_CONVERSIONS: "true" MOCHA_GREP: ${MOCHA_GREP} NODE_ENV: test + USE_PROM_METRICS: "true" AWS_KEY: fake AWS_SECRET: fake AWS_S3_USER_FILES_BUCKET_NAME: fake_user_files diff --git a/services/filestore/docker-compose.yml b/services/filestore/docker-compose.yml index cd03a9daf4..c2c2ed565d 100644 --- a/services/filestore/docker-compose.yml +++ b/services/filestore/docker-compose.yml @@ -31,6 +31,7 @@ services: ENABLE_CONVERSIONS: "true" LOG_LEVEL: ERROR NODE_ENV: test + USE_PROM_METRICS: "true" AWS_KEY: fake AWS_SECRET: fake AWS_S3_USER_FILES_BUCKET_NAME: fake_user_files diff --git a/services/filestore/test/acceptance/coffee/SendingFileTest.coffee b/services/filestore/test/acceptance/coffee/SendingFileTest.coffee index b77afb866b..fb03697cb0 100644 --- a/services/filestore/test/acceptance/coffee/SendingFileTest.coffee +++ b/services/filestore/test/acceptance/coffee/SendingFileTest.coffee @@ -10,8 +10,14 @@ request = require("request") settings = require("settings-sharelatex") FilestoreApp = require "./FilestoreApp" -describe "Filestore", -> +getMetric = (filestoreUrl, metric, cb) -> + request.get "#{filestoreUrl}/metrics", (err, res) -> + expect(res.statusCode).to.equal 200 + metricRegex = new RegExp("^#{metric}{[^}]+} ([0-9]+)$", "m") + cb(parseInt(metricRegex.exec(res.body)?[1] || '0')) + +describe "Filestore", -> before (done)-> @localFileReadPath = "/tmp/filestore_acceptence_tests_file_read.txt" @localFileWritePath = "/tmp/filestore_acceptence_tests_file_write.txt" @@ -27,10 +33,10 @@ describe "Filestore", -> beforeEach (done)-> FilestoreApp.ensureRunning => - fs.unlink @localFileWritePath, -> - done() - - + fs.unlink @localFileWritePath, => + getMetric @filestoreUrl, 's3_egress', (metric) => + @previousEgress = metric + done() it "should send a 200 for status endpoint", (done)-> request "#{@filestoreUrl}/status", (err, response, body)-> @@ -59,6 +65,11 @@ describe "Filestore", -> response.statusCode.should.equal 404 done() + it 'should record an egress metric for the upload', (done) -> + getMetric @filestoreUrl, 's3_egress', (metric) => + expect(metric - @previousEgress).to.equal @constantFileContent.length + done() + it "should return the file size on a HEAD request", (done) -> expectedLength = Buffer.byteLength(@constantFileContent) request.head @fileUrl, (err, res) => @@ -128,11 +139,17 @@ describe "Filestore", -> @file_id = Math.random() @fileUrl = "#{@filestoreUrl}/project/acceptence_tests/file/#{@file_id}" @localFileReadPath = __dirname + '/../../fixtures/test.pdf' + fs.stat @localFileReadPath, (err, stat) => + @localFileSize = stat.size + writeStream = request.post(@fileUrl) - writeStream = request.post(@fileUrl) + writeStream.on "end", done + fs.createReadStream(@localFileReadPath).pipe writeStream - writeStream.on "end", done - fs.createReadStream(@localFileReadPath).pipe writeStream + it 'should record an egress metric for the upload', (done) -> + getMetric @filestoreUrl, 's3_egress', (metric) => + expect(metric - @previousEgress).to.equal @localFileSize + done() it "should be able get the file back", (done)-> @timeout(1000 * 10) From 48aa14159163217deb75181e4d7435f325e163e5 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 5 Dec 2019 15:46:14 +0000 Subject: [PATCH 315/555] Add metric for s3 ingress --- .../app/coffee/S3PersistorManager.coffee | 3 ++ .../acceptance/coffee/SendingFileTest.coffee | 38 ++++++++++++++++--- .../coffee/S3PersistorManagerTests.coffee | 1 + 3 files changed, 37 insertions(+), 5 deletions(-) diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index 14ab1bffe5..b575001fb3 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -132,6 +132,9 @@ module.exports = logger.log({bucketName: bucketName, key: key }, "error getting file from s3: #{statusCode}") return callback(new Error("Got non-200 response from S3: #{statusCode} #{statusMessage}"), null) stream = response.httpResponse.createUnbufferedStream() + stream.on 'data', (data) -> + metrics.count 's3.ingress', data.byteLength + callback(null, stream) s3Request.on 'error', (err) => diff --git a/services/filestore/test/acceptance/coffee/SendingFileTest.coffee b/services/filestore/test/acceptance/coffee/SendingFileTest.coffee index fb03697cb0..cd1fa167f5 100644 --- a/services/filestore/test/acceptance/coffee/SendingFileTest.coffee +++ b/services/filestore/test/acceptance/coffee/SendingFileTest.coffee @@ -9,6 +9,7 @@ fs = require("fs") request = require("request") settings = require("settings-sharelatex") FilestoreApp = require "./FilestoreApp" +async = require('async') getMetric = (filestoreUrl, metric, cb) -> @@ -33,10 +34,19 @@ describe "Filestore", -> beforeEach (done)-> FilestoreApp.ensureRunning => - fs.unlink @localFileWritePath, => - getMetric @filestoreUrl, 's3_egress', (metric) => - @previousEgress = metric - done() + async.parallel [ + (cb) => + fs.unlink @localFileWritePath, () -> + cb() + (cb) => + getMetric @filestoreUrl, 's3_egress', (metric) => + @previousEgress = metric + cb() + (cb) => + getMetric @filestoreUrl, 's3_ingress', (metric) => + @previousIngress = metric + cb() + ], done it "should send a 200 for status endpoint", (done)-> request "#{@filestoreUrl}/status", (err, response, body)-> @@ -83,7 +93,14 @@ describe "Filestore", -> body.should.equal @constantFileContent done() - it "should be able to get back the first 8 bytes of the file", (done) -> + it "should record an ingress metric when downloading the file", (done)-> + @timeout(1000 * 10) + request.get @fileUrl, () => + getMetric @filestoreUrl, 's3_ingress', (metric) => + expect(metric - @previousIngress).to.equal @constantFileContent.length + done() + + it "should be able to get back the first 9 bytes of the file", (done) -> @timeout(1000 * 10) options = uri: @fileUrl @@ -93,6 +110,17 @@ describe "Filestore", -> body.should.equal 'hello wor' done() + it "should record an ingress metric for a partial download", (done)-> + @timeout(1000 * 10) + options = + uri: @fileUrl + headers: + 'Range': 'bytes=0-8' + request.get options, ()=> + getMetric @filestoreUrl, 's3_ingress', (metric) => + expect(metric - @previousIngress).to.equal 9 + done() + it "should be able to get back bytes 4 through 10 of the file", (done) -> @timeout(1000 * 10) options = diff --git a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee index 8a1b2e4d49..a5ab5c2932 100644 --- a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee +++ b/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee @@ -62,6 +62,7 @@ describe "S3PersistorManagerTests", -> describe "success", -> beforeEach () -> @expectedStream = { expectedStream: true } + @expectedStream.on = sinon.stub() @s3Request.send.callsFake () => @s3EventHandlers.httpHeaders(200, {}, @s3Response, "OK") @s3Response.httpResponse.createUnbufferedStream.returns(@expectedStream) From 6f326d56505f53106908b607d139729176fa2c93 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 9 Dec 2019 17:41:20 +0000 Subject: [PATCH 316/555] Use SSL setting based on url protocol --- services/filestore/app/coffee/S3PersistorManager.coffee | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.coffee index b575001fb3..89522a4643 100644 --- a/services/filestore/app/coffee/S3PersistorManager.coffee +++ b/services/filestore/app/coffee/S3PersistorManager.coffee @@ -43,8 +43,9 @@ getS3Options = (credentials) -> secretAccessKey: credentials.auth_secret if settings.filestore.s3.endpoint + endpoint = URL.parse(settings.filestore.s3.endpoint) options.endpoint = settings.filestore.s3.endpoint - options.sslEnabled = false + options.sslEnabled = endpoint.protocol == 'https' return options From cf684dcd9865bd6f9e49b6b8ceb7c2c2f6aa8d0b Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Tue, 10 Dec 2019 11:38:19 +0000 Subject: [PATCH 317/555] Fix fakes3 configuration in ci yml --- services/filestore/docker-compose.ci.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/services/filestore/docker-compose.ci.yml b/services/filestore/docker-compose.ci.yml index d7b97ac011..c1929c03ca 100644 --- a/services/filestore/docker-compose.ci.yml +++ b/services/filestore/docker-compose.ci.yml @@ -57,4 +57,6 @@ services: fakes3: image: adobe/s3mock - command: --initialBuckets=fake_user_files,fake_template_files,fake_public_files + environment: + - initialBuckets=fake_user_files,fake_template_files,fake_public_files + From 237c4113cd0b4158b8445d19d4d0d1b4a9fa07af Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Tue, 10 Dec 2019 16:11:44 +0000 Subject: [PATCH 318/555] Ensure fakes3 is healthy before running tests --- services/filestore/docker-compose.ci.yml | 13 +++++++++---- services/filestore/docker-compose.yml | 13 +++++++++---- 2 files changed, 18 insertions(+), 8 deletions(-) diff --git a/services/filestore/docker-compose.ci.yml b/services/filestore/docker-compose.ci.yml index c1929c03ca..42c6ae37b5 100644 --- a/services/filestore/docker-compose.ci.yml +++ b/services/filestore/docker-compose.ci.yml @@ -3,7 +3,7 @@ # https://github.com/sharelatex/sharelatex-dev-environment # Version: 1.1.24 -version: "2" +version: "2.1" services: test_unit: @@ -33,9 +33,12 @@ services: AWS_S3_PUBLIC_FILES_BUCKET_NAME: fake_public_files AWS_S3_ENDPOINT: http://fakes3:9090 depends_on: - - mongo - - redis - - fakes3 + mongo: + condition: service_healthy + redis: + condition: service_healthy + fakes3: + condition: service_healthy user: node command: npm run test:acceptance:_run @@ -59,4 +62,6 @@ services: image: adobe/s3mock environment: - initialBuckets=fake_user_files,fake_template_files,fake_public_files + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:9090"] diff --git a/services/filestore/docker-compose.yml b/services/filestore/docker-compose.yml index c2c2ed565d..65f18f4d78 100644 --- a/services/filestore/docker-compose.yml +++ b/services/filestore/docker-compose.yml @@ -3,7 +3,7 @@ # https://github.com/sharelatex/sharelatex-dev-environment # Version: 1.1.24 -version: "2" +version: "2.1" services: test_unit: @@ -40,9 +40,12 @@ services: AWS_S3_ENDPOINT: http://fakes3:9090 user: node depends_on: - - mongo - - redis - - fakes3 + mongo: + condition: service_healthy + redis: + condition: service_healthy + fakes3: + condition: service_healthy command: npm run test:acceptance @@ -65,5 +68,7 @@ services: image: adobe/s3mock environment: - initialBuckets=fake_user_files,fake_template_files,fake_public_files + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:9090"] From 56b38af678c8061eaddc88a5b211c4a5c57537b3 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Tue, 10 Dec 2019 17:43:34 +0000 Subject: [PATCH 319/555] Explicitly wait for S3 in acceptance tests --- .../acceptance/coffee/FilestoreApp.coffee | 23 ++++++++++++++++++- .../acceptance/coffee/SendingFileTest.coffee | 4 +++- 2 files changed, 25 insertions(+), 2 deletions(-) diff --git a/services/filestore/test/acceptance/coffee/FilestoreApp.coffee b/services/filestore/test/acceptance/coffee/FilestoreApp.coffee index 818e90ec6f..1b4cc38834 100644 --- a/services/filestore/test/acceptance/coffee/FilestoreApp.coffee +++ b/services/filestore/test/acceptance/coffee/FilestoreApp.coffee @@ -2,6 +2,9 @@ app = require('../../../app') require("logger-sharelatex").logger.level("info") logger = require("logger-sharelatex") Settings = require("settings-sharelatex") +request = require('request') + +S3_TRIES = 30 module.exports = running: false @@ -21,4 +24,22 @@ module.exports = logger.log("filestore running in dev mode") for callback in @callbacks - callback() \ No newline at end of file + callback() + + waitForS3: (callback, tries) -> + return callback() unless Settings.filestore.s3?.endpoint + tries = 1 unless tries + + request.get "#{Settings.filestore.s3.endpoint}/", (err, response) => + console.log(err, response?.statusCode, tries) + if !err && [200, 404].includes(response?.statusCode) + return callback() + + if tries == S3_TRIES + return callback('timed out waiting for S3') + + setTimeout( + () => + @waitForS3 callback, tries + 1 + 1000 + ) diff --git a/services/filestore/test/acceptance/coffee/SendingFileTest.coffee b/services/filestore/test/acceptance/coffee/SendingFileTest.coffee index cd1fa167f5..4e9443fd88 100644 --- a/services/filestore/test/acceptance/coffee/SendingFileTest.coffee +++ b/services/filestore/test/acceptance/coffee/SendingFileTest.coffee @@ -29,8 +29,10 @@ describe "Filestore", -> "there are 3 lines in all" ].join("\n") - fs.writeFile(@localFileReadPath, @constantFileContent, done) @filestoreUrl = "http://localhost:#{settings.internal.filestore.port}" + fs.writeFile @localFileReadPath, @constantFileContent, (err) -> + return done(err) if err + FilestoreApp.waitForS3(done) beforeEach (done)-> FilestoreApp.ensureRunning => From c5e1584fcc9b94e40eb19d5fe929ca9bbd2f535f Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Thu, 2 May 2019 02:04:59 +0200 Subject: [PATCH 320/555] [FSPersistorManager] fix the stream opening for node10+ Attaching a `readable` listener causes the stream to hang otherwise. Signed-off-by: Jakob Ackermann --- services/filestore/app/coffee/FSPersistorManager.coffee | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/services/filestore/app/coffee/FSPersistorManager.coffee b/services/filestore/app/coffee/FSPersistorManager.coffee index 61e497a4fb..7d17f6d10a 100644 --- a/services/filestore/app/coffee/FSPersistorManager.coffee +++ b/services/filestore/app/coffee/FSPersistorManager.coffee @@ -48,10 +48,10 @@ module.exports = fs.open "#{location}/#{filteredName}", 'r', (err, fd) -> if err? logger.err err:err, location:location, filteredName:name, "Error reading from file" - if err.code == 'ENOENT' - return callback new Errors.NotFoundError(err.message), null - else - return callback err, null + if err.code == 'ENOENT' + return callback new Errors.NotFoundError(err.message), null + else + return callback err, null opts.fd = fd sourceStream = fs.createReadStream null, opts return callback null, sourceStream From ed97fcfcd00099758738bc9c8083e19dde9e94fd Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Thu, 2 May 2019 01:26:35 +0200 Subject: [PATCH 321/555] [logging] do not overwrite the logger name Signed-off-by: Jakob Ackermann --- services/filestore/app/coffee/FSPersistorManager.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/app/coffee/FSPersistorManager.coffee b/services/filestore/app/coffee/FSPersistorManager.coffee index 7d17f6d10a..38e30f284f 100644 --- a/services/filestore/app/coffee/FSPersistorManager.coffee +++ b/services/filestore/app/coffee/FSPersistorManager.coffee @@ -36,7 +36,7 @@ module.exports = if err? logger.err location:location, target:target, fsPath:fsPath, err:err, "something went wrong writing stream to disk" return callback err - @sendFile location, target, fsPath, (err) -> + @sendFile location, target, fsPath, (err) -> # delete the temporary file created above and return the original error LocalFileWriter.deleteFile fsPath, () -> callback(err) From c9106eff95e5e77e0abf29321e19e2d3eca764c5 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Fri, 13 Dec 2019 15:46:35 +0000 Subject: [PATCH 322/555] Upgrade Docker image to node 10 --- services/filestore/Dockerfile | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/services/filestore/Dockerfile b/services/filestore/Dockerfile index 2845544ae6..5a18b41699 100644 --- a/services/filestore/Dockerfile +++ b/services/filestore/Dockerfile @@ -1,4 +1,4 @@ -FROM node:6.9.5 as app +FROM node:10.17.0-jessie as app WORKDIR /app @@ -9,10 +9,9 @@ RUN npm install --quiet COPY . /app - RUN npm run compile:all -FROM node:6.9.5 +FROM node:10.17.0-jessie COPY --from=app /app /app From 1798efd4be149e7bcc4490d3cb2cef7e8846d074 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 16 Dec 2019 10:15:52 +0000 Subject: [PATCH 323/555] Install eslint and prettier --- services/filestore/npm-shrinkwrap.json | 4655 ++++++++++++++++++++---- services/filestore/package.json | 14 + 2 files changed, 4000 insertions(+), 669 deletions(-) diff --git a/services/filestore/npm-shrinkwrap.json b/services/filestore/npm-shrinkwrap.json index 66670c6e69..413c41dca4 100644 --- a/services/filestore/npm-shrinkwrap.json +++ b/services/filestore/npm-shrinkwrap.json @@ -1,1581 +1,4248 @@ { "name": "filestore-sharelatex", "version": "0.1.4", + "lockfileVersion": 1, + "requires": true, "dependencies": { + "@babel/code-frame": { + "version": "7.5.5", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.5.5.tgz", + "integrity": "sha512-27d4lZoomVyo51VegxI20xZPuSHusqbQag/ztrBC7wegWoQ1nLREPVSKSW8byhTlzTKyNE4ifaTA6lCp7JjpFw==", + "dev": true, + "requires": { + "@babel/highlight": "^7.0.0" + } + }, + "@babel/generator": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.7.4.tgz", + "integrity": "sha512-m5qo2WgdOJeyYngKImbkyQrnUN1mPceaG5BV+G0E3gWsa4l/jCSryWJdM2x8OuGAOyh+3d5pVYfZWCiNFtynxg==", + "dev": true, + "requires": { + "@babel/types": "^7.7.4", + "jsesc": "^2.5.1", + "lodash": "^4.17.13", + "source-map": "^0.5.0" + }, + "dependencies": { + "lodash": { + "version": "4.17.15", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", + "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==", + "dev": true + }, + "source-map": { + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", + "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", + "dev": true + } + } + }, + "@babel/helper-function-name": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.7.4.tgz", + "integrity": "sha512-AnkGIdiBhEuiwdoMnKm7jfPfqItZhgRaZfMg1XX3bS25INOnLPjPG1Ppnajh8eqgt5kPJnfqrRHqFqmjKDZLzQ==", + "dev": true, + "requires": { + "@babel/helper-get-function-arity": "^7.7.4", + "@babel/template": "^7.7.4", + "@babel/types": "^7.7.4" + } + }, + "@babel/helper-get-function-arity": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/helper-get-function-arity/-/helper-get-function-arity-7.7.4.tgz", + "integrity": "sha512-QTGKEdCkjgzgfJ3bAyRwF4yyT3pg+vDgan8DSivq1eS0gwi+KGKE5x8kRcbeFTb/673mkO5SN1IZfmCfA5o+EA==", + "dev": true, + "requires": { + "@babel/types": "^7.7.4" + } + }, + "@babel/helper-split-export-declaration": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.7.4.tgz", + "integrity": "sha512-guAg1SXFcVr04Guk9eq0S4/rWS++sbmyqosJzVs8+1fH5NI+ZcmkaSkc7dmtAFbHFva6yRJnjW3yAcGxjueDug==", + "dev": true, + "requires": { + "@babel/types": "^7.7.4" + } + }, + "@babel/highlight": { + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.5.0.tgz", + "integrity": "sha512-7dV4eu9gBxoM0dAnj/BCFDW9LFU0zvTrkq0ugM7pnHEgguOEeOz1so2ZghEdzviYzQEED0r4EAgpsBChKy1TRQ==", + "dev": true, + "requires": { + "chalk": "^2.0.0", + "esutils": "^2.0.2", + "js-tokens": "^4.0.0" + } + }, + "@babel/parser": { + "version": "7.7.5", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.7.5.tgz", + "integrity": "sha512-KNlOe9+/nk4i29g0VXgl8PEXIRms5xKLJeuZ6UptN0fHv+jDiriG+y94X6qAgWTR0h3KaoM1wK5G5h7MHFRSig==", + "dev": true + }, + "@babel/template": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.7.4.tgz", + "integrity": "sha512-qUzihgVPguAzXCK7WXw8pqs6cEwi54s3E+HrejlkuWO6ivMKx9hZl3Y2fSXp9i5HgyWmj7RKP+ulaYnKM4yYxw==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.0.0", + "@babel/parser": "^7.7.4", + "@babel/types": "^7.7.4" + } + }, + "@babel/traverse": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.7.4.tgz", + "integrity": "sha512-P1L58hQyupn8+ezVA2z5KBm4/Zr4lCC8dwKCMYzsa5jFMDMQAzaBNy9W5VjB+KAmBjb40U7a/H6ao+Xo+9saIw==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.5.5", + "@babel/generator": "^7.7.4", + "@babel/helper-function-name": "^7.7.4", + "@babel/helper-split-export-declaration": "^7.7.4", + "@babel/parser": "^7.7.4", + "@babel/types": "^7.7.4", + "debug": "^4.1.0", + "globals": "^11.1.0", + "lodash": "^4.17.13" + }, + "dependencies": { + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + }, + "globals": { + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "dev": true + }, + "lodash": { + "version": "4.17.15", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", + "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==", + "dev": true + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + } + } + }, + "@babel/types": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.7.4.tgz", + "integrity": "sha512-cz5Ji23KCi4T+YIE/BolWosrJuSmoZeN1EFnRtBwF+KKLi8GG/Z2c2hOJJeCXPk4mwk4QFvTmwIodJowXgttRA==", + "dev": true, + "requires": { + "esutils": "^2.0.2", + "lodash": "^4.17.13", + "to-fast-properties": "^2.0.0" + }, + "dependencies": { + "lodash": { + "version": "4.17.15", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", + "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==", + "dev": true + } + } + }, "@google-cloud/common": { "version": "0.32.1", - "from": "@google-cloud/common@>=0.32.0 <0.33.0", - "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.32.1.tgz" + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.32.1.tgz", + "integrity": "sha1-ajLDQBcs6j22Z00ODjTnh0CgBz8=", + "requires": { + "@google-cloud/projectify": "^0.3.3", + "@google-cloud/promisify": "^0.4.0", + "@types/request": "^2.48.1", + "arrify": "^2.0.0", + "duplexify": "^3.6.0", + "ent": "^2.2.0", + "extend": "^3.0.2", + "google-auth-library": "^3.1.1", + "pify": "^4.0.1", + "retry-request": "^4.0.0", + "teeny-request": "^3.11.3" + } }, "@google-cloud/debug-agent": { "version": "3.2.0", - "from": "@google-cloud/debug-agent@>=3.0.0 <4.0.0", "resolved": "https://registry.npmjs.org/@google-cloud/debug-agent/-/debug-agent-3.2.0.tgz", + "integrity": "sha1-2qdjWhaYpWY31dxXzhED536uKdM=", + "requires": { + "@google-cloud/common": "^0.32.0", + "@sindresorhus/is": "^0.15.0", + "acorn": "^6.0.0", + "coffeescript": "^2.0.0", + "console-log-level": "^1.4.0", + "extend": "^3.0.1", + "findit2": "^2.2.3", + "gcp-metadata": "^1.0.0", + "lodash.pickby": "^4.6.0", + "p-limit": "^2.2.0", + "pify": "^4.0.1", + "semver": "^6.0.0", + "source-map": "^0.6.1", + "split": "^1.0.0" + }, "dependencies": { "coffeescript": { "version": "2.4.1", - "from": "coffeescript@>=2.0.0 <3.0.0", - "resolved": "https://registry.npmjs.org/coffeescript/-/coffeescript-2.4.1.tgz" + "resolved": "https://registry.npmjs.org/coffeescript/-/coffeescript-2.4.1.tgz", + "integrity": "sha1-gV/TN98KNNSedKmKbr6pw+eTD3A=" } } }, "@google-cloud/profiler": { "version": "0.2.3", - "from": "@google-cloud/profiler@>=0.2.3 <0.3.0", "resolved": "https://registry.npmjs.org/@google-cloud/profiler/-/profiler-0.2.3.tgz", + "integrity": "sha1-Fj3738Mwuug1X+RuHlvgZTV7H1w=", + "requires": { + "@google-cloud/common": "^0.26.0", + "@types/console-log-level": "^1.4.0", + "@types/semver": "^5.5.0", + "bindings": "^1.2.1", + "console-log-level": "^1.4.0", + "delay": "^4.0.1", + "extend": "^3.0.1", + "gcp-metadata": "^0.9.0", + "nan": "^2.11.1", + "parse-duration": "^0.1.1", + "pify": "^4.0.0", + "pretty-ms": "^4.0.0", + "protobufjs": "~6.8.6", + "semver": "^5.5.0", + "teeny-request": "^3.3.0" + }, "dependencies": { "@google-cloud/common": { "version": "0.26.2", - "from": "@google-cloud/common@>=0.26.0 <0.27.0", - "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.26.2.tgz" + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.26.2.tgz", + "integrity": "sha1-nFTiRxqEqgMelaJIJJduCA8lVkU=", + "requires": { + "@google-cloud/projectify": "^0.3.2", + "@google-cloud/promisify": "^0.3.0", + "@types/duplexify": "^3.5.0", + "@types/request": "^2.47.0", + "arrify": "^1.0.1", + "duplexify": "^3.6.0", + "ent": "^2.2.0", + "extend": "^3.0.1", + "google-auth-library": "^2.0.0", + "pify": "^4.0.0", + "retry-request": "^4.0.0", + "through2": "^3.0.0" + } }, "@google-cloud/promisify": { "version": "0.3.1", - "from": "@google-cloud/promisify@>=0.3.0 <0.4.0", - "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-0.3.1.tgz" + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-0.3.1.tgz", + "integrity": "sha1-9kHm2USo4KBe4MsQkd+mAIm+zbo=" }, "arrify": { "version": "1.0.1", - "from": "arrify@>=1.0.1 <2.0.0", - "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz" + "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", + "integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=" }, "gcp-metadata": { "version": "0.9.3", - "from": "gcp-metadata@>=0.9.0 <0.10.0", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.9.3.tgz" + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.9.3.tgz", + "integrity": "sha1-H510lfdGChRSZIHynhFZbdVj3SY=", + "requires": { + "gaxios": "^1.0.2", + "json-bigint": "^0.3.0" + } }, "google-auth-library": { "version": "2.0.2", - "from": "google-auth-library@>=2.0.0 <3.0.0", "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-2.0.2.tgz", + "integrity": "sha1-ejFdIDZ0Svavyth7IQ7mY4tA9Xs=", + "requires": { + "axios": "^0.18.0", + "gcp-metadata": "^0.7.0", + "gtoken": "^2.3.0", + "https-proxy-agent": "^2.2.1", + "jws": "^3.1.5", + "lru-cache": "^5.0.0", + "semver": "^5.5.0" + }, "dependencies": { "gcp-metadata": { "version": "0.7.0", - "from": "gcp-metadata@>=0.7.0 <0.8.0", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.7.0.tgz" + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.7.0.tgz", + "integrity": "sha1-bDXbtSvaMqQnu5yY9UI33dG1QG8=", + "requires": { + "axios": "^0.18.0", + "extend": "^3.0.1", + "retry-axios": "0.3.2" + } } } }, "nan": { "version": "2.14.0", - "from": "nan@>=2.11.1 <3.0.0", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.14.0.tgz" + "resolved": "https://registry.npmjs.org/nan/-/nan-2.14.0.tgz", + "integrity": "sha1-eBj3IgJ7JFmobwKV1DTR/CM2xSw=" }, "readable-stream": { "version": "3.4.0", - "from": "readable-stream@>=2.0.0 <3.0.0||>=3.0.0 <4.0.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.4.0.tgz" + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.4.0.tgz", + "integrity": "sha1-pRwmdUZY4KPCHb9ZFjvUW6b0R/w=", + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } }, "semver": { "version": "5.7.0", - "from": "semver@>=5.5.0 <6.0.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.0.tgz" + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.0.tgz", + "integrity": "sha1-eQp89v6lRZuslhELKbYEEtyP+Ws=" }, "string_decoder": { "version": "1.2.0", - "from": "string_decoder@>=1.1.1 <2.0.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.2.0.tgz" + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.2.0.tgz", + "integrity": "sha1-/obnOLGVRK/nBGkkOyoe6SQOro0=", + "requires": { + "safe-buffer": "~5.1.0" + } }, "through2": { "version": "3.0.1", - "from": "through2@>=3.0.0 <4.0.0", - "resolved": "https://registry.npmjs.org/through2/-/through2-3.0.1.tgz" + "resolved": "https://registry.npmjs.org/through2/-/through2-3.0.1.tgz", + "integrity": "sha1-OSducTwzAu3544jdnIEt07glvVo=", + "requires": { + "readable-stream": "2 || 3" + } } } }, "@google-cloud/projectify": { "version": "0.3.3", - "from": "@google-cloud/projectify@>=0.3.3 <0.4.0", - "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-0.3.3.tgz" + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-0.3.3.tgz", + "integrity": "sha1-vekQPVCyCj6jM334xng6dm5w1B0=" }, "@google-cloud/promisify": { "version": "0.4.0", - "from": "@google-cloud/promisify@>=0.4.0 <0.5.0", - "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-0.4.0.tgz" + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-0.4.0.tgz", + "integrity": "sha1-T7/PTYW7ai5MzwWqY9KxDWyarZs=" }, "@google-cloud/trace-agent": { "version": "3.6.1", - "from": "@google-cloud/trace-agent@>=3.2.0 <4.0.0", "resolved": "https://registry.npmjs.org/@google-cloud/trace-agent/-/trace-agent-3.6.1.tgz", + "integrity": "sha1-W+dEE5TQ6ldY8o25IqUAT/PwO+w=", + "requires": { + "@google-cloud/common": "^0.32.1", + "builtin-modules": "^3.0.0", + "console-log-level": "^1.4.0", + "continuation-local-storage": "^3.2.1", + "extend": "^3.0.0", + "gcp-metadata": "^1.0.0", + "hex2dec": "^1.0.1", + "is": "^3.2.0", + "methods": "^1.1.1", + "require-in-the-middle": "^4.0.0", + "semver": "^6.0.0", + "shimmer": "^1.2.0", + "uuid": "^3.0.1" + }, "dependencies": { "uuid": { "version": "3.3.2", - "from": "uuid@>=3.0.1 <4.0.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz" + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", + "integrity": "sha1-G0r0lV6zB3xQHCOHL8ZROBFYcTE=" } } }, "@protobufjs/aspromise": { "version": "1.1.2", - "from": "@protobufjs/aspromise@>=1.1.2 <2.0.0", - "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz" + "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", + "integrity": "sha1-m4sMxmPWaafY9vXQiToU00jzD78=" }, "@protobufjs/base64": { "version": "1.1.2", - "from": "@protobufjs/base64@>=1.1.2 <2.0.0", - "resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz" + "resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz", + "integrity": "sha1-TIVzDlm5ofHzSQR9vyQpYDS7JzU=" }, "@protobufjs/codegen": { "version": "2.0.4", - "from": "@protobufjs/codegen@>=2.0.4 <3.0.0", - "resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz" + "resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz", + "integrity": "sha1-fvN/DQEPsCitGtWXIuUG2SYoFcs=" }, "@protobufjs/eventemitter": { "version": "1.1.0", - "from": "@protobufjs/eventemitter@>=1.1.0 <2.0.0", - "resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz" + "resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz", + "integrity": "sha1-NVy8mLr61ZePntCV85diHx0Ga3A=" }, "@protobufjs/fetch": { "version": "1.1.0", - "from": "@protobufjs/fetch@>=1.1.0 <2.0.0", - "resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz" + "resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz", + "integrity": "sha1-upn7WYYUr2VwDBYZ/wbUVLDYTEU=", + "requires": { + "@protobufjs/aspromise": "^1.1.1", + "@protobufjs/inquire": "^1.1.0" + } }, "@protobufjs/float": { "version": "1.0.2", - "from": "@protobufjs/float@>=1.0.2 <2.0.0", - "resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz" + "resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz", + "integrity": "sha1-Xp4avctz/Ap8uLKR33jIy9l7h9E=" }, "@protobufjs/inquire": { "version": "1.1.0", - "from": "@protobufjs/inquire@>=1.1.0 <2.0.0", - "resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz" + "resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz", + "integrity": "sha1-/yAOPnzyQp4tyvwRQIKOjMY48Ik=" }, "@protobufjs/path": { "version": "1.1.2", - "from": "@protobufjs/path@>=1.1.2 <2.0.0", - "resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz" + "resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz", + "integrity": "sha1-bMKyDFya1q0NzP0hynZz2Nf79o0=" }, "@protobufjs/pool": { "version": "1.1.0", - "from": "@protobufjs/pool@>=1.1.0 <2.0.0", - "resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz" + "resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz", + "integrity": "sha1-Cf0V8tbTq/qbZbw2ZQbWrXhG/1Q=" }, "@protobufjs/utf8": { "version": "1.1.0", - "from": "@protobufjs/utf8@>=1.1.0 <2.0.0", - "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz" + "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz", + "integrity": "sha1-p3c2C1s5oaLlEG+OhY8v0tBgxXA=" }, "@sindresorhus/is": { "version": "0.15.0", - "from": "@sindresorhus/is@>=0.15.0 <0.16.0", - "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-0.15.0.tgz" + "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-0.15.0.tgz", + "integrity": "sha1-lpFbqgXmpqHRN7rfSYTT/AWCC7Y=" }, "@sinonjs/commons": { "version": "1.4.0", - "from": "@sinonjs/commons@>=1.2.0 <2.0.0", "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-1.4.0.tgz", - "dev": true + "integrity": "sha1-ez7C2Wr0gdegMhJS57HJRyTsWng=", + "dev": true, + "requires": { + "type-detect": "4.0.8" + } }, "@sinonjs/formatio": { "version": "3.2.1", - "from": "@sinonjs/formatio@>=3.0.0 <4.0.0", "resolved": "https://registry.npmjs.org/@sinonjs/formatio/-/formatio-3.2.1.tgz", + "integrity": "sha1-UjEPL5vLxnvawYyUrUkBuV/eJn4=", "dev": true, + "requires": { + "@sinonjs/commons": "^1", + "@sinonjs/samsam": "^3.1.0" + }, "dependencies": { "@sinonjs/samsam": { "version": "3.3.2", - "from": "@sinonjs/samsam@>=3.1.0 <4.0.0", "resolved": "https://registry.npmjs.org/@sinonjs/samsam/-/samsam-3.3.2.tgz", - "dev": true + "integrity": "sha1-Y5QuPV6wt59t4775q/rRX7S2QBs=", + "dev": true, + "requires": { + "@sinonjs/commons": "^1.0.2", + "array-from": "^2.1.1", + "lodash": "^4.17.11" + } } } }, "@sinonjs/samsam": { "version": "2.1.3", - "from": "@sinonjs/samsam@>=2.1.2 <3.0.0", "resolved": "https://registry.npmjs.org/@sinonjs/samsam/-/samsam-2.1.3.tgz", + "integrity": "sha1-Ys8qm2JO3HlRNBNf43/Cro6ja+M=", "dev": true }, "@sinonjs/text-encoding": { "version": "0.7.1", - "from": "@sinonjs/text-encoding@>=0.7.1 <0.8.0", "resolved": "https://registry.npmjs.org/@sinonjs/text-encoding/-/text-encoding-0.7.1.tgz", + "integrity": "sha1-jaXGUwkVZT86Hzj9XxAdjD+AecU=", "dev": true }, "@types/caseless": { "version": "0.12.2", - "from": "@types/caseless@*", - "resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.2.tgz" + "resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.2.tgz", + "integrity": "sha1-9l09Y4ngHutFi9VNyPUrlalGO8g=" }, "@types/console-log-level": { "version": "1.4.0", - "from": "@types/console-log-level@>=1.4.0 <2.0.0", - "resolved": "https://registry.npmjs.org/@types/console-log-level/-/console-log-level-1.4.0.tgz" + "resolved": "https://registry.npmjs.org/@types/console-log-level/-/console-log-level-1.4.0.tgz", + "integrity": "sha1-7/ccQa689RyLpa2LBdfVQkviuPM=" }, "@types/duplexify": { "version": "3.6.0", - "from": "@types/duplexify@>=3.5.0 <4.0.0", - "resolved": "https://registry.npmjs.org/@types/duplexify/-/duplexify-3.6.0.tgz" + "resolved": "https://registry.npmjs.org/@types/duplexify/-/duplexify-3.6.0.tgz", + "integrity": "sha1-38grZL06IWj1vSZESvFlvwI33Ng=", + "requires": { + "@types/node": "*" + } + }, + "@types/eslint-visitor-keys": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@types/eslint-visitor-keys/-/eslint-visitor-keys-1.0.0.tgz", + "integrity": "sha512-OCutwjDZ4aFS6PB1UZ988C4YgwlBHJd6wCeQqaLdmadZ/7e+w79+hbMUFC1QXDNCmdyoRfAFdm0RypzwR+Qpag==", + "dev": true }, "@types/form-data": { "version": "2.2.1", - "from": "@types/form-data@*", - "resolved": "https://registry.npmjs.org/@types/form-data/-/form-data-2.2.1.tgz" + "resolved": "https://registry.npmjs.org/@types/form-data/-/form-data-2.2.1.tgz", + "integrity": "sha1-7is7jqoRwJOCiZU2BrdFtzjFSx4=", + "requires": { + "@types/node": "*" + } + }, + "@types/json-schema": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.3.tgz", + "integrity": "sha512-Il2DtDVRGDcqjDtE+rF8iqg1CArehSK84HZJCT7AMITlyXRBpuPhqGLDQMowraqqu1coEaimg4ZOqggt6L6L+A==", + "dev": true }, "@types/long": { "version": "4.0.0", - "from": "@types/long@>=4.0.0 <5.0.0", - "resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.0.tgz" + "resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.0.tgz", + "integrity": "sha1-cZVR0jUtMBrIuB23Mqy2vcKNve8=" }, "@types/node": { "version": "12.0.8", - "from": "@types/node@*", - "resolved": "https://registry.npmjs.org/@types/node/-/node-12.0.8.tgz" + "resolved": "https://registry.npmjs.org/@types/node/-/node-12.0.8.tgz", + "integrity": "sha1-VRRmvhGyrcPz1HFWdY9hC9n2sdg=" }, "@types/request": { "version": "2.48.1", - "from": "@types/request@>=2.47.0 <3.0.0", - "resolved": "https://registry.npmjs.org/@types/request/-/request-2.48.1.tgz" + "resolved": "https://registry.npmjs.org/@types/request/-/request-2.48.1.tgz", + "integrity": "sha1-5ALWkapmcPu/8ZV7FfEnAjCrQvo=", + "requires": { + "@types/caseless": "*", + "@types/form-data": "*", + "@types/node": "*", + "@types/tough-cookie": "*" + } }, "@types/semver": { "version": "5.5.0", - "from": "@types/semver@>=5.5.0 <6.0.0", - "resolved": "https://registry.npmjs.org/@types/semver/-/semver-5.5.0.tgz" + "resolved": "https://registry.npmjs.org/@types/semver/-/semver-5.5.0.tgz", + "integrity": "sha1-FGwqKe59O65L8vyydGNuJkyBPEU=" }, "@types/tough-cookie": { "version": "2.3.5", - "from": "@types/tough-cookie@*", - "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-2.3.5.tgz" + "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-2.3.5.tgz", + "integrity": "sha1-naRO11VxmZtlw3tgybK4jbVMWF0=" + }, + "@typescript-eslint/experimental-utils": { + "version": "1.13.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/experimental-utils/-/experimental-utils-1.13.0.tgz", + "integrity": "sha512-zmpS6SyqG4ZF64ffaJ6uah6tWWWgZ8m+c54XXgwFtUv0jNz8aJAVx8chMCvnk7yl6xwn8d+d96+tWp7fXzTuDg==", + "dev": true, + "requires": { + "@types/json-schema": "^7.0.3", + "@typescript-eslint/typescript-estree": "1.13.0", + "eslint-scope": "^4.0.0" + }, + "dependencies": { + "eslint-scope": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-4.0.3.tgz", + "integrity": "sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg==", + "dev": true, + "requires": { + "esrecurse": "^4.1.0", + "estraverse": "^4.1.1" + } + } + } + }, + "@typescript-eslint/parser": { + "version": "1.13.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-1.13.0.tgz", + "integrity": "sha512-ITMBs52PCPgLb2nGPoeT4iU3HdQZHcPaZVw+7CsFagRJHUhyeTgorEwHXhFf3e7Evzi8oujKNpHc8TONth8AdQ==", + "dev": true, + "requires": { + "@types/eslint-visitor-keys": "^1.0.0", + "@typescript-eslint/experimental-utils": "1.13.0", + "@typescript-eslint/typescript-estree": "1.13.0", + "eslint-visitor-keys": "^1.0.0" + } + }, + "@typescript-eslint/typescript-estree": { + "version": "1.13.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-1.13.0.tgz", + "integrity": "sha512-b5rCmd2e6DCC6tCTN9GSUAuxdYwCM/k/2wdjHGrIRGPSJotWMCe/dGpi66u42bhuh8q3QBzqM4TMA1GUUCJvdw==", + "dev": true, + "requires": { + "lodash.unescape": "4.0.1", + "semver": "5.5.0" + } }, "abort-controller": { "version": "3.0.0", - "from": "abort-controller@>=3.0.0 <4.0.0", - "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz" + "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", + "integrity": "sha1-6vVNU7YrrkE46AnKIlyEOabvs5I=", + "requires": { + "event-target-shim": "^5.0.0" + } }, "accept-encoding": { "version": "0.1.0", - "from": "accept-encoding@>=0.1.0 <0.2.0", - "resolved": "https://registry.npmjs.org/accept-encoding/-/accept-encoding-0.1.0.tgz" + "resolved": "https://registry.npmjs.org/accept-encoding/-/accept-encoding-0.1.0.tgz", + "integrity": "sha1-XdiLjfcfHcLlzGuVZezOHjmaMz4=" }, "accepts": { "version": "1.3.5", - "from": "accepts@>=1.3.5 <1.4.0", - "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.5.tgz" + "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.5.tgz", + "integrity": "sha1-63d99gEXI6OxTopywIBcjoZ0a9I=", + "requires": { + "mime-types": "~2.1.18", + "negotiator": "0.6.1" + } }, "acorn": { "version": "6.1.1", - "from": "acorn@>=6.0.0 <7.0.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.1.1.tgz" + "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.1.1.tgz", + "integrity": "sha1-fSWuBbuK0fm2mRCOEJTs14hK3B8=" + }, + "acorn-jsx": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.1.0.tgz", + "integrity": "sha512-tMUqwBWfLFbJbizRmEcWSLw6HnFzfdJs2sOJEOwwtVPMoH/0Ay+E703oZz78VSXZiiDcZrQ5XKjPIUQixhmgVw==", + "dev": true }, "agent-base": { "version": "4.3.0", - "from": "agent-base@>=4.1.0 <5.0.0", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.3.0.tgz" + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.3.0.tgz", + "integrity": "sha1-gWXwHENgCbzK0LHRIvBe13Dvxu4=", + "requires": { + "es6-promisify": "^5.0.0" + } }, "ajv": { "version": "6.10.0", - "from": "ajv@>=6.5.5 <7.0.0", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.10.0.tgz" + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.10.0.tgz", + "integrity": "sha1-kNDVRDnaWHzX6EO/twRfUL0ivfE=", + "requires": { + "fast-deep-equal": "^2.0.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + } + }, + "ansi-escapes": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.0.tgz", + "integrity": "sha512-EiYhwo0v255HUL6eDyuLrXEkTi7WwVCLAw+SeOQ7M7qdun1z1pum4DEm/nuqIVbPvi9RPPc9k9LbyBv6H0DwVg==", + "dev": true, + "requires": { + "type-fest": "^0.8.1" + } + }, + "ansi-regex": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", + "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", + "dev": true + }, + "ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "requires": { + "color-convert": "^1.9.0" + } + }, + "argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "requires": { + "sprintf-js": "~1.0.2" + } }, "array-flatten": { "version": "1.1.1", - "from": "array-flatten@1.1.1", - "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz" + "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", + "integrity": "sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=" }, "array-from": { "version": "2.1.1", - "from": "array-from@>=2.1.1 <3.0.0", "resolved": "https://registry.npmjs.org/array-from/-/array-from-2.1.1.tgz", + "integrity": "sha1-z+nYwmYoudxa7MYqn12PHzUsEZU=", "dev": true }, + "array-includes": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.0.tgz", + "integrity": "sha512-ONOEQoKrvXPKk7Su92Co0YMqYO32FfqJTzkKU9u2UpIXyYZIzLSvpdg4AwvSw4mSUW0czu6inK+zby6Oj6gDjQ==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0-next.0" + } + }, + "array.prototype.flat": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.2.3.tgz", + "integrity": "sha512-gBlRZV0VSmfPIeWfuuy56XZMvbVfbEUnOXUvt3F/eUUUSyzlgLxhEX4YAEpxNAogRGehPSnfXyPtYyKAhkzQhQ==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0-next.1" + } + }, "arrify": { "version": "2.0.1", - "from": "arrify@>=2.0.0 <3.0.0", - "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz" + "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz", + "integrity": "sha1-yWVekzHgq81YjSp8rX6ZVvZnAfo=" }, "asn1": { "version": "0.2.4", - "from": "asn1@>=0.2.3 <0.3.0", - "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz" + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz", + "integrity": "sha1-jSR136tVO7M+d7VOWeiAu4ziMTY=", + "requires": { + "safer-buffer": "~2.1.0" + } }, "assert-plus": { "version": "1.0.0", - "from": "assert-plus@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz" + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=" }, "assertion-error": { "version": "1.1.0", - "from": "assertion-error@>=1.1.0 <2.0.0", "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", + "integrity": "sha1-5gtrDo8wG9l+U3UhW9pAbIURjAs=", + "dev": true + }, + "astral-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-1.0.0.tgz", + "integrity": "sha512-+Ryf6g3BKoRc7jfp7ad8tM4TtMiaWvbF/1/sQcZPkkS7ag3D5nMBCe2UfOTONtAkaG0tO0ij3C5Lwmf1EiyjHg==", "dev": true }, "async": { "version": "0.2.10", - "from": "async@>=0.2.10 <0.3.0", - "resolved": "https://registry.npmjs.org/async/-/async-0.2.10.tgz" + "resolved": "https://registry.npmjs.org/async/-/async-0.2.10.tgz", + "integrity": "sha1-trvgsGdLnXGXCMo43owjfLUmw9E=" }, "async-listener": { "version": "0.6.10", - "from": "async-listener@>=0.6.0 <0.7.0", "resolved": "https://registry.npmjs.org/async-listener/-/async-listener-0.6.10.tgz", + "integrity": "sha1-p8l6vlcLpgLXgic8DeYKUePhfLw=", + "requires": { + "semver": "^5.3.0", + "shimmer": "^1.1.0" + }, "dependencies": { "semver": { "version": "5.7.0", - "from": "semver@>=5.3.0 <6.0.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.0.tgz" + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.0.tgz", + "integrity": "sha1-eQp89v6lRZuslhELKbYEEtyP+Ws=" } } }, "asynckit": { "version": "0.4.0", - "from": "asynckit@>=0.4.0 <0.5.0", - "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz" + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=" }, "aws-sdk": { "version": "2.315.0", - "from": "aws-sdk@>=2.1.39 <3.0.0", "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.315.0.tgz", + "integrity": "sha1-fzkxYq8DjL73IjdERKm8muG9u+k=", + "requires": { + "buffer": "4.9.1", + "events": "1.1.1", + "ieee754": "1.1.8", + "jmespath": "0.15.0", + "querystring": "0.2.0", + "sax": "1.2.1", + "url": "0.10.3", + "uuid": "3.1.0", + "xml2js": "0.4.19" + }, "dependencies": { "uuid": { "version": "3.1.0", - "from": "uuid@3.1.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.1.0.tgz" + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.1.0.tgz", + "integrity": "sha1-PdPT55Crwk17DToDT/q6vijrvAQ=" } } }, "aws-sign": { "version": "0.2.1", - "from": "aws-sign@>=0.2.0 <0.3.0", - "resolved": "https://registry.npmjs.org/aws-sign/-/aws-sign-0.2.1.tgz" + "resolved": "https://registry.npmjs.org/aws-sign/-/aws-sign-0.2.1.tgz", + "integrity": "sha1-uWGyLwuqTxXsJBFA83dtbBQoVtA=" }, "aws-sign2": { "version": "0.7.0", - "from": "aws-sign2@>=0.7.0 <0.8.0", - "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz" + "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", + "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=" }, "aws4": { "version": "1.8.0", - "from": "aws4@>=1.8.0 <2.0.0", - "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.8.0.tgz" + "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.8.0.tgz", + "integrity": "sha1-8OAD2cqef1nHpQiUXXsu+aBKVC8=" }, "axios": { "version": "0.18.1", - "from": "axios@>=0.18.0 <0.19.0", - "resolved": "https://registry.npmjs.org/axios/-/axios-0.18.1.tgz" + "resolved": "https://registry.npmjs.org/axios/-/axios-0.18.1.tgz", + "integrity": "sha1-/z8N4ue10YDnV62YAA8Qgbh7zqM=", + "requires": { + "follow-redirects": "1.5.10", + "is-buffer": "^2.0.2" + } + }, + "babel-eslint": { + "version": "10.0.3", + "resolved": "https://registry.npmjs.org/babel-eslint/-/babel-eslint-10.0.3.tgz", + "integrity": "sha512-z3U7eMY6r/3f3/JB9mTsLjyxrv0Yb1zb8PCWCLpguxfCzBIZUwy23R1t/XKewP+8mEN2Ck8Dtr4q20z6ce6SoA==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.0.0", + "@babel/parser": "^7.0.0", + "@babel/traverse": "^7.0.0", + "@babel/types": "^7.0.0", + "eslint-visitor-keys": "^1.0.0", + "resolve": "^1.12.0" + }, + "dependencies": { + "resolve": { + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.13.1.tgz", + "integrity": "sha512-CxqObCX8K8YtAhOBRg+lrcdn+LK+WYOS8tSjqSFbjtrI5PnS63QPhZl4+yKfrU9tdsbMu9Anr/amegT87M9Z6w==", + "dev": true, + "requires": { + "path-parse": "^1.0.6" + } + } + } }, "balanced-match": { "version": "1.0.0", - "from": "balanced-match@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz" + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", + "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=" }, "base64-js": { "version": "1.3.0", - "from": "base64-js@>=1.0.2 <2.0.0", - "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.3.0.tgz" + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.3.0.tgz", + "integrity": "sha1-yrHmEY8FEJXli1KBrqjBzSK/wOM=" }, "bcrypt-pbkdf": { "version": "1.0.2", - "from": "bcrypt-pbkdf@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz" + "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", + "integrity": "sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=", + "requires": { + "tweetnacl": "^0.14.3" + } }, "best-encoding": { "version": "0.1.1", - "from": "best-encoding@>=0.1.1 <0.2.0", - "resolved": "https://registry.npmjs.org/best-encoding/-/best-encoding-0.1.1.tgz" + "resolved": "https://registry.npmjs.org/best-encoding/-/best-encoding-0.1.1.tgz", + "integrity": "sha1-GVIT2rysBFgYuAe3ox+Dn63cl04=", + "requires": { + "accept-encoding": "~0.1.0" + } }, "bignumber.js": { "version": "7.2.1", - "from": "bignumber.js@>=7.0.0 <8.0.0", - "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-7.2.1.tgz" + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-7.2.1.tgz", + "integrity": "sha1-gMBIdZ2CaACAfEv9Uh5Q7bulel8=" }, "bindings": { "version": "1.5.0", - "from": "bindings@>=1.2.1 <2.0.0", - "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz" + "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", + "integrity": "sha1-EDU8npRTNLwFEabZCzj7x8nFBN8=", + "requires": { + "file-uri-to-path": "1.0.0" + } }, "bintrees": { "version": "1.0.1", - "from": "bintrees@1.0.1", - "resolved": "https://registry.npmjs.org/bintrees/-/bintrees-1.0.1.tgz" + "resolved": "https://registry.npmjs.org/bintrees/-/bintrees-1.0.1.tgz", + "integrity": "sha1-DmVcm5wkNeqraL9AJyJtK1WjRSQ=" }, "bl": { "version": "0.7.0", - "from": "bl@>=0.7.0 <0.8.0", - "resolved": "https://registry.npmjs.org/bl/-/bl-0.7.0.tgz" + "resolved": "https://registry.npmjs.org/bl/-/bl-0.7.0.tgz", + "integrity": "sha1-P7BnBgKsKHjrdw3CA58YNr5irls=", + "requires": { + "readable-stream": "~1.0.2" + } }, "body-parser": { "version": "1.18.3", - "from": "body-parser@>=1.2.0 <2.0.0", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.18.3.tgz" + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.18.3.tgz", + "integrity": "sha1-WykhmP/dVTs6DyDe0FkrlWlVyLQ=", + "requires": { + "bytes": "3.0.0", + "content-type": "~1.0.4", + "debug": "2.6.9", + "depd": "~1.1.2", + "http-errors": "~1.6.3", + "iconv-lite": "0.4.23", + "on-finished": "~2.3.0", + "qs": "6.5.2", + "raw-body": "2.3.3", + "type-is": "~1.6.16" + } + }, + "boolify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/boolify/-/boolify-1.0.1.tgz", + "integrity": "sha1-tcCeF8rNET0Rt7s+04TMASmU2Gs=", + "dev": true }, "boom": { "version": "0.3.8", - "from": "boom@>=0.3.0 <0.4.0", - "resolved": "https://registry.npmjs.org/boom/-/boom-0.3.8.tgz" + "resolved": "https://registry.npmjs.org/boom/-/boom-0.3.8.tgz", + "integrity": "sha1-yM2wQUNZEnQWKMBE7Mcy0dF8Ceo=", + "requires": { + "hoek": "0.7.x" + } }, "brace-expansion": { "version": "1.1.11", - "from": "brace-expansion@>=1.1.7 <2.0.0", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz" + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha1-PH/L9SnYcibz0vUrlm/1Jx60Qd0=", + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } }, "browser-stdout": { "version": "1.3.1", - "from": "browser-stdout@1.3.1", - "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz" + "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", + "integrity": "sha1-uqVZ7hTO1zRSIputcyZGfGH6vWA=" }, "buffer": { "version": "4.9.1", - "from": "buffer@4.9.1", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.1.tgz" + "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.1.tgz", + "integrity": "sha1-bRu2AbB6TvztlwlBMgkwJ8lbwpg=", + "requires": { + "base64-js": "^1.0.2", + "ieee754": "^1.1.4", + "isarray": "^1.0.0" + } }, "buffer-equal-constant-time": { "version": "1.0.1", - "from": "buffer-equal-constant-time@1.0.1", - "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz" + "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", + "integrity": "sha1-+OcRMvf/5uAaXJaXpMbz5I1cyBk=" }, "builtin-modules": { "version": "3.1.0", - "from": "builtin-modules@>=3.0.0 <4.0.0", - "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.1.0.tgz" + "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.1.0.tgz", + "integrity": "sha1-qtl8FRMet2tltQ7yCOdYTNdqdIQ=" }, "bunyan": { "version": "1.5.1", - "from": "bunyan@1.5.1", "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.5.1.tgz", - "dev": true + "integrity": "sha1-X259RMQ7lS9WsPQTCeOrEjkbTi0=", + "dev": true, + "requires": { + "dtrace-provider": "~0.6", + "mv": "~2", + "safe-json-stringify": "~1" + } }, "bytes": { "version": "3.0.0", - "from": "bytes@3.0.0", - "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz" + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz", + "integrity": "sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg=" + }, + "callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true + }, + "camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true + }, + "camelcase-keys": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-6.1.1.tgz", + "integrity": "sha512-kEPCddRFChEzO0d6w61yh0WbBiSv9gBnfZWGfXRYPlGqIdIGef6HMR6pgqVSEWCYkrp8B0AtEpEXNY+Jx0xk1A==", + "dev": true, + "requires": { + "camelcase": "^5.3.1", + "map-obj": "^4.0.0", + "quick-lru": "^4.0.1" + } }, "caseless": { "version": "0.3.0", - "from": "caseless@>=0.3.0 <0.4.0", - "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.3.0.tgz" + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.3.0.tgz", + "integrity": "sha1-U06XkWOH07cGtk/eu6xGQ4RQk08=" }, "chai": { "version": "4.2.0", - "from": "chai@4.2.0", "resolved": "https://registry.npmjs.org/chai/-/chai-4.2.0.tgz", + "integrity": "sha1-dgqnLPION5XoSxKHfODoNzeqKeU=", + "dev": true, + "requires": { + "assertion-error": "^1.1.0", + "check-error": "^1.0.2", + "deep-eql": "^3.0.1", + "get-func-name": "^2.0.0", + "pathval": "^1.1.0", + "type-detect": "^4.0.5" + } + }, + "chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + } + }, + "chardet": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/chardet/-/chardet-0.7.0.tgz", + "integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==", "dev": true }, "check-error": { "version": "1.0.2", - "from": "check-error@>=1.0.2 <2.0.0", "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz", + "integrity": "sha1-V00xLt2Iu13YkS6Sht1sCu1KrII=", "dev": true }, + "cli-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", + "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", + "dev": true, + "requires": { + "restore-cursor": "^3.1.0" + } + }, + "cli-width": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-2.2.0.tgz", + "integrity": "sha1-/xnt6Kml5XkyQUewwR8PvLq+1jk=", + "dev": true + }, + "cliui": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-5.0.0.tgz", + "integrity": "sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==", + "dev": true, + "requires": { + "string-width": "^3.1.0", + "strip-ansi": "^5.2.0", + "wrap-ansi": "^5.1.0" + }, + "dependencies": { + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + } + } + } + }, "coffee-script": { "version": "1.6.0", - "from": "coffee-script@1.6.0", - "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz" + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz", + "integrity": "sha1-gIs5bhEPU9AhoZpO8fZb4OjjX6M=" + }, + "color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "requires": { + "color-name": "1.1.3" + } + }, + "color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", + "dev": true }, "combined-stream": { "version": "0.0.7", - "from": "combined-stream@>=0.0.4 <0.1.0", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-0.0.7.tgz" + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-0.0.7.tgz", + "integrity": "sha1-ATfmV7qlp1QcV6w3rF/AfXO03B8=", + "requires": { + "delayed-stream": "0.0.5" + } + }, + "common-tags": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/common-tags/-/common-tags-1.8.0.tgz", + "integrity": "sha512-6P6g0uetGpW/sdyUy/iQQCbFF0kWVMSIVSyYz7Zgjcgh8mgw8PQzDNZeyZ5DQ2gM7LBoZPHmnjz8rUthkBG5tw==", + "dev": true }, "concat-map": { "version": "0.0.1", - "from": "concat-map@0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" }, "console-log-level": { "version": "1.4.1", - "from": "console-log-level@>=1.4.0 <2.0.0", - "resolved": "https://registry.npmjs.org/console-log-level/-/console-log-level-1.4.1.tgz" + "resolved": "https://registry.npmjs.org/console-log-level/-/console-log-level-1.4.1.tgz", + "integrity": "sha1-nFprue8e9lsFq6gwKLD/iUzfYwo=" + }, + "contains-path": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/contains-path/-/contains-path-0.1.0.tgz", + "integrity": "sha1-/ozxhP9mcLa67wGp1IYaXL7EEgo=", + "dev": true }, "content-disposition": { "version": "0.5.2", - "from": "content-disposition@0.5.2", - "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.2.tgz" + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.2.tgz", + "integrity": "sha1-DPaLud318r55YcOoUXjLhdunjLQ=" }, "content-type": { "version": "1.0.4", - "from": "content-type@>=1.0.4 <1.1.0", - "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz" + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz", + "integrity": "sha1-4TjMdeBAxyexlm/l5fjJruJW/js=" }, "continuation-local-storage": { "version": "3.2.1", - "from": "continuation-local-storage@>=3.2.1 <4.0.0", - "resolved": "https://registry.npmjs.org/continuation-local-storage/-/continuation-local-storage-3.2.1.tgz" + "resolved": "https://registry.npmjs.org/continuation-local-storage/-/continuation-local-storage-3.2.1.tgz", + "integrity": "sha1-EfYT906RT+mzTJKtLSj+auHbf/s=", + "requires": { + "async-listener": "^0.6.0", + "emitter-listener": "^1.1.1" + } }, "cookie": { "version": "0.3.1", - "from": "cookie@0.3.1", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz" + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz", + "integrity": "sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s=" }, "cookie-jar": { "version": "0.2.0", - "from": "cookie-jar@>=0.2.0 <0.3.0", - "resolved": "https://registry.npmjs.org/cookie-jar/-/cookie-jar-0.2.0.tgz" + "resolved": "https://registry.npmjs.org/cookie-jar/-/cookie-jar-0.2.0.tgz", + "integrity": "sha1-ZOzAasl423leS1KQy+SLo3gUAPo=" }, "cookie-signature": { "version": "1.0.6", - "from": "cookie-signature@1.0.6", - "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz" + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", + "integrity": "sha1-4wOogrNCzD7oylE6eZmXNNqzriw=" + }, + "core-js": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.5.0.tgz", + "integrity": "sha512-Ifh3kj78gzQ7NAoJXeTu+XwzDld0QRIwjBLRqAMhuLhP3d2Av5wmgE9ycfnvK6NAEjTkQ1sDPeoEZAWO3Hx1Uw==", + "dev": true }, "core-util-is": { "version": "1.0.2", - "from": "core-util-is@>=1.0.0 <1.1.0", - "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz" + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" + }, + "cross-spawn": { + "version": "6.0.5", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", + "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", + "dev": true, + "requires": { + "nice-try": "^1.0.4", + "path-key": "^2.0.1", + "semver": "^5.5.0", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + } }, "cryptiles": { "version": "0.1.3", - "from": "cryptiles@>=0.1.0 <0.2.0", - "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-0.1.3.tgz" + "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-0.1.3.tgz", + "integrity": "sha1-GlVnNPBtJLo0hirpy55wmjr7/xw=", + "requires": { + "boom": "0.3.x" + } }, "dashdash": { "version": "1.14.1", - "from": "dashdash@>=1.12.0 <2.0.0", - "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz" + "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", + "integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=", + "requires": { + "assert-plus": "^1.0.0" + } }, "debug": { "version": "2.6.9", - "from": "debug@2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz" + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha1-XRKFFd8TT/Mn6QpMk/Tgd6U2NB8=", + "requires": { + "ms": "2.0.0" + } + }, + "decamelize": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", + "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=", + "dev": true }, "deep-eql": { "version": "3.0.1", - "from": "deep-eql@>=3.0.1 <4.0.0", "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-3.0.1.tgz", + "integrity": "sha1-38lARACtHI/gI+faHfHBR8S0RN8=", + "dev": true, + "requires": { + "type-detect": "^4.0.0" + } + }, + "deep-is": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.3.tgz", + "integrity": "sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ=", "dev": true }, + "define-properties": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz", + "integrity": "sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==", + "dev": true, + "requires": { + "object-keys": "^1.0.12" + } + }, "delay": { "version": "4.3.0", - "from": "delay@>=4.0.1 <5.0.0", - "resolved": "https://registry.npmjs.org/delay/-/delay-4.3.0.tgz" + "resolved": "https://registry.npmjs.org/delay/-/delay-4.3.0.tgz", + "integrity": "sha1-7+6/uPVFV5yzlrOnIkQ+yW0UxQ4=" }, "delayed-stream": { "version": "0.0.5", - "from": "delayed-stream@0.0.5", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz" + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz", + "integrity": "sha1-1LH0OpPoKW3+AmlPRoC8N6MTxz8=" }, "depd": { "version": "1.1.2", - "from": "depd@>=1.1.2 <1.2.0", - "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz" + "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", + "integrity": "sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=" }, "destroy": { "version": "1.0.4", - "from": "destroy@>=1.0.4 <1.1.0", - "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.0.4.tgz" + "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.0.4.tgz", + "integrity": "sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA=" }, "diff": { "version": "3.5.0", - "from": "diff@>=3.5.0 <4.0.0", "resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz", + "integrity": "sha1-gAwN0eCov7yVg1wgKtIg/jF+WhI=", "dev": true }, + "dlv": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/dlv/-/dlv-1.1.3.tgz", + "integrity": "sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA==", + "dev": true + }, + "doctrine": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", + "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", + "dev": true, + "requires": { + "esutils": "^2.0.2" + } + }, "dtrace-provider": { "version": "0.6.0", - "from": "dtrace-provider@>=0.6.0 <0.7.0", "resolved": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.6.0.tgz", + "integrity": "sha1-CweNVReTfYcxAUUtkUZzdVe3XlE=", "dev": true, - "optional": true + "optional": true, + "requires": { + "nan": "^2.0.8" + } }, "duplexify": { "version": "3.7.1", - "from": "duplexify@>=3.6.0 <4.0.0", "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.7.1.tgz", + "integrity": "sha1-Kk31MX9sz9kfhtb9JdjYoQO4gwk=", + "requires": { + "end-of-stream": "^1.0.0", + "inherits": "^2.0.1", + "readable-stream": "^2.0.0", + "stream-shift": "^1.0.0" + }, "dependencies": { "readable-stream": { "version": "2.3.6", - "from": "readable-stream@>=2.0.0 <3.0.0", - "resolved": "http://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz" + "resolved": "http://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", + "integrity": "sha1-sRwn2IuP8fvgcGQ8+UsMea4bCq8=", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } }, "string_decoder": { "version": "1.1.1", - "from": "string_decoder@>=1.1.1 <1.2.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz" + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha1-nPFhG6YmhdcDCunkujQUnDrwP8g=", + "requires": { + "safe-buffer": "~5.1.0" + } } } }, "ecc-jsbn": { "version": "0.1.2", - "from": "ecc-jsbn@>=0.1.1 <0.2.0", - "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz" + "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", + "integrity": "sha1-OoOpBOVDUyh4dMVkt1SThoSamMk=", + "requires": { + "jsbn": "~0.1.0", + "safer-buffer": "^2.1.0" + } }, "ecdsa-sig-formatter": { "version": "1.0.11", - "from": "ecdsa-sig-formatter@1.0.11", - "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz" + "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", + "integrity": "sha1-rg8PothQRe8UqBfao86azQSJ5b8=", + "requires": { + "safe-buffer": "^5.0.1" + } }, "ee-first": { "version": "1.1.1", - "from": "ee-first@1.1.1", - "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz" + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=" }, "emitter-listener": { "version": "1.1.2", - "from": "emitter-listener@>=1.1.1 <2.0.0", - "resolved": "https://registry.npmjs.org/emitter-listener/-/emitter-listener-1.1.2.tgz" + "resolved": "https://registry.npmjs.org/emitter-listener/-/emitter-listener-1.1.2.tgz", + "integrity": "sha1-VrFA6PaZI3Wz18ssqxzHQy2WMug=", + "requires": { + "shimmer": "^1.2.0" + } + }, + "emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true }, "encodeurl": { "version": "1.0.2", - "from": "encodeurl@>=1.0.2 <1.1.0", - "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz" + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", + "integrity": "sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k=" }, "end-of-stream": { "version": "1.4.1", - "from": "end-of-stream@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.1.tgz" + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.1.tgz", + "integrity": "sha1-7SljTRm6ukY7bOa4CjchPqtx7EM=", + "requires": { + "once": "^1.4.0" + } }, "ent": { "version": "2.2.0", - "from": "ent@>=2.2.0 <3.0.0", - "resolved": "https://registry.npmjs.org/ent/-/ent-2.2.0.tgz" + "resolved": "https://registry.npmjs.org/ent/-/ent-2.2.0.tgz", + "integrity": "sha1-6WQhkyWiHQX0RGai9obtbOX13R0=" + }, + "error-ex": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", + "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "dev": true, + "requires": { + "is-arrayish": "^0.2.1" + } + }, + "es-abstract": { + "version": "1.17.0-next.1", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.0-next.1.tgz", + "integrity": "sha512-7MmGr03N7Rnuid6+wyhD9sHNE2n4tFSwExnU2lQl3lIo2ShXWGePY80zYaoMOmILWv57H0amMjZGHNzzGG70Rw==", + "dev": true, + "requires": { + "es-to-primitive": "^1.2.1", + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1", + "is-callable": "^1.1.4", + "is-regex": "^1.0.4", + "object-inspect": "^1.7.0", + "object-keys": "^1.1.1", + "object.assign": "^4.1.0", + "string.prototype.trimleft": "^2.1.0", + "string.prototype.trimright": "^2.1.0" + } + }, + "es-to-primitive": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", + "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", + "dev": true, + "requires": { + "is-callable": "^1.1.4", + "is-date-object": "^1.0.1", + "is-symbol": "^1.0.2" + } }, "es6-promise": { "version": "4.2.8", - "from": "es6-promise@>=4.0.3 <5.0.0", - "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.8.tgz" + "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.8.tgz", + "integrity": "sha1-TrIVlMlyvEBVPSduUQU5FD21Pgo=" }, "es6-promisify": { "version": "5.0.0", - "from": "es6-promisify@>=5.0.0 <6.0.0", - "resolved": "https://registry.npmjs.org/es6-promisify/-/es6-promisify-5.0.0.tgz" + "resolved": "https://registry.npmjs.org/es6-promisify/-/es6-promisify-5.0.0.tgz", + "integrity": "sha1-UQnWLz5W6pZ8S2NQWu8IKRyKUgM=", + "requires": { + "es6-promise": "^4.0.3" + } }, "escape-html": { "version": "1.0.3", - "from": "escape-html@>=1.0.3 <1.1.0", - "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz" + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg=" }, "escape-string-regexp": { "version": "1.0.5", - "from": "escape-string-regexp@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz" + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=" + }, + "eslint": { + "version": "6.7.2", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-6.7.2.tgz", + "integrity": "sha512-qMlSWJaCSxDFr8fBPvJM9kJwbazrhNcBU3+DszDW1OlEwKBBRWsJc7NJFelvwQpanHCR14cOLD41x8Eqvo3Nng==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.0.0", + "ajv": "^6.10.0", + "chalk": "^2.1.0", + "cross-spawn": "^6.0.5", + "debug": "^4.0.1", + "doctrine": "^3.0.0", + "eslint-scope": "^5.0.0", + "eslint-utils": "^1.4.3", + "eslint-visitor-keys": "^1.1.0", + "espree": "^6.1.2", + "esquery": "^1.0.1", + "esutils": "^2.0.2", + "file-entry-cache": "^5.0.1", + "functional-red-black-tree": "^1.0.1", + "glob-parent": "^5.0.0", + "globals": "^12.1.0", + "ignore": "^4.0.6", + "import-fresh": "^3.0.0", + "imurmurhash": "^0.1.4", + "inquirer": "^7.0.0", + "is-glob": "^4.0.0", + "js-yaml": "^3.13.1", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.3.0", + "lodash": "^4.17.14", + "minimatch": "^3.0.4", + "mkdirp": "^0.5.1", + "natural-compare": "^1.4.0", + "optionator": "^0.8.3", + "progress": "^2.0.0", + "regexpp": "^2.0.1", + "semver": "^6.1.2", + "strip-ansi": "^5.2.0", + "strip-json-comments": "^3.0.1", + "table": "^5.2.3", + "text-table": "^0.2.0", + "v8-compile-cache": "^2.0.3" + } + }, + "eslint-config-prettier": { + "version": "6.7.0", + "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-6.7.0.tgz", + "integrity": "sha512-FamQVKM3jjUVwhG4hEMnbtsq7xOIDm+SY5iBPfR8gKsJoAB2IQnNF+bk1+8Fy44Nq7PPJaLvkRxILYdJWoguKQ==", + "dev": true, + "requires": { + "get-stdin": "^6.0.0" + } + }, + "eslint-config-standard": { + "version": "14.1.0", + "resolved": "https://registry.npmjs.org/eslint-config-standard/-/eslint-config-standard-14.1.0.tgz", + "integrity": "sha512-EF6XkrrGVbvv8hL/kYa/m6vnvmUT+K82pJJc4JJVMM6+Qgqh0pnwprSxdduDLB9p/7bIxD+YV5O0wfb8lmcPbA==", + "dev": true + }, + "eslint-import-resolver-node": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.2.tgz", + "integrity": "sha512-sfmTqJfPSizWu4aymbPr4Iidp5yKm8yDkHp+Ir3YiTHiiDfxh69mOUsmiqW6RZ9zRXFaF64GtYmN7e+8GHBv6Q==", + "dev": true, + "requires": { + "debug": "^2.6.9", + "resolve": "^1.5.0" + } + }, + "eslint-module-utils": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.5.0.tgz", + "integrity": "sha512-kCo8pZaNz2dsAW7nCUjuVoI11EBXXpIzfNxmaoLhXoRDOnqXLC4iSGVRdZPhOitfbdEfMEfKOiENaK6wDPZEGw==", + "dev": true, + "requires": { + "debug": "^2.6.9", + "pkg-dir": "^2.0.0" + } + }, + "eslint-plugin-chai-expect": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-chai-expect/-/eslint-plugin-chai-expect-2.1.0.tgz", + "integrity": "sha512-rd0/4mjMV6c3i0o4DKkWI4uaFN9DK707kW+/fDphaDI6HVgxXnhML9Xgt5vHnTXmSSnDhupuCFBgsEAEpchXmQ==", + "dev": true + }, + "eslint-plugin-chai-friendly": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-chai-friendly/-/eslint-plugin-chai-friendly-0.5.0.tgz", + "integrity": "sha512-Pxe6z8C9fP0pn2X2nGFU/b3GBOCM/5FVus1hsMwJsXP3R7RiXFl7g0ksJbsc0GxiLyidTW4mEFk77qsNn7Tk7g==", + "dev": true + }, + "eslint-plugin-es": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-es/-/eslint-plugin-es-2.0.0.tgz", + "integrity": "sha512-f6fceVtg27BR02EYnBhgWLFQfK6bN4Ll0nQFrBHOlCsAyxeZkn0NHns5O0YZOPrV1B3ramd6cgFwaoFLcSkwEQ==", + "dev": true, + "requires": { + "eslint-utils": "^1.4.2", + "regexpp": "^3.0.0" + }, + "dependencies": { + "regexpp": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.0.0.tgz", + "integrity": "sha512-Z+hNr7RAVWxznLPuA7DIh8UNX1j9CDrUQxskw9IrBE1Dxue2lyXT+shqEIeLUjrokxIP8CMy1WkjgG3rTsd5/g==", + "dev": true + } + } + }, + "eslint-plugin-import": { + "version": "2.19.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.19.1.tgz", + "integrity": "sha512-x68131aKoCZlCae7rDXKSAQmbT5DQuManyXo2sK6fJJ0aK5CWAkv6A6HJZGgqC8IhjQxYPgo6/IY4Oz8AFsbBw==", + "dev": true, + "requires": { + "array-includes": "^3.0.3", + "array.prototype.flat": "^1.2.1", + "contains-path": "^0.1.0", + "debug": "^2.6.9", + "doctrine": "1.5.0", + "eslint-import-resolver-node": "^0.3.2", + "eslint-module-utils": "^2.4.1", + "has": "^1.0.3", + "minimatch": "^3.0.4", + "object.values": "^1.1.0", + "read-pkg-up": "^2.0.0", + "resolve": "^1.12.0" + }, + "dependencies": { + "doctrine": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-1.5.0.tgz", + "integrity": "sha1-N53Ocw9hZvds76TmcHoVmwLFpvo=", + "dev": true, + "requires": { + "esutils": "^2.0.2", + "isarray": "^1.0.0" + } + } + } + }, + "eslint-plugin-mocha": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/eslint-plugin-mocha/-/eslint-plugin-mocha-6.2.2.tgz", + "integrity": "sha512-oNhPzfkT6Q6CJ0HMVJ2KLxEWG97VWGTmuHOoRcDLE0U88ugUyFNV9wrT2XIt5cGtqc5W9k38m4xTN34L09KhBA==", + "dev": true, + "requires": { + "ramda": "^0.26.1" + } + }, + "eslint-plugin-node": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-node/-/eslint-plugin-node-10.0.0.tgz", + "integrity": "sha512-1CSyM/QCjs6PXaT18+zuAXsjXGIGo5Rw630rSKwokSs2jrYURQc4R5JZpoanNCqwNmepg+0eZ9L7YiRUJb8jiQ==", + "dev": true, + "requires": { + "eslint-plugin-es": "^2.0.0", + "eslint-utils": "^1.4.2", + "ignore": "^5.1.1", + "minimatch": "^3.0.4", + "resolve": "^1.10.1", + "semver": "^6.1.0" + }, + "dependencies": { + "ignore": { + "version": "5.1.4", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.1.4.tgz", + "integrity": "sha512-MzbUSahkTW1u7JpKKjY7LCARd1fU5W2rLdxlM4kdkayuCwZImjkpluF9CM1aLewYJguPDqewLam18Y6AU69A8A==", + "dev": true + } + } + }, + "eslint-plugin-prettier": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-3.1.2.tgz", + "integrity": "sha512-GlolCC9y3XZfv3RQfwGew7NnuFDKsfI4lbvRK+PIIo23SFH+LemGs4cKwzAaRa+Mdb+lQO/STaIayno8T5sJJA==", + "dev": true, + "requires": { + "prettier-linter-helpers": "^1.0.0" + } + }, + "eslint-plugin-promise": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-4.2.1.tgz", + "integrity": "sha512-VoM09vT7bfA7D+upt+FjeBO5eHIJQBUWki1aPvB+vbNiHS3+oGIJGIeyBtKQTME6UPXXy3vV07OL1tHd3ANuDw==", + "dev": true + }, + "eslint-plugin-standard": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-standard/-/eslint-plugin-standard-4.0.1.tgz", + "integrity": "sha512-v/KBnfyaOMPmZc/dmc6ozOdWqekGp7bBGq4jLAecEfPGmfKiWS4sA8sC0LqiV9w5qmXAtXVn4M3p1jSyhY85SQ==", + "dev": true + }, + "eslint-scope": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.0.0.tgz", + "integrity": "sha512-oYrhJW7S0bxAFDvWqzvMPRm6pcgcnWc4QnofCAqRTRfQC0JcwenzGglTtsLyIuuWFfkqDG9vz67cnttSd53djw==", + "dev": true, + "requires": { + "esrecurse": "^4.1.0", + "estraverse": "^4.1.1" + } + }, + "eslint-utils": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-1.4.3.tgz", + "integrity": "sha512-fbBN5W2xdY45KulGXmLHZ3c3FHfVYmKg0IrAKGOkT/464PQsx2UeIzfz1RmEci+KLm1bBaAzZAh8+/E+XAeZ8Q==", + "dev": true, + "requires": { + "eslint-visitor-keys": "^1.1.0" + } + }, + "eslint-visitor-keys": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.1.0.tgz", + "integrity": "sha512-8y9YjtM1JBJU/A9Kc+SbaOV4y29sSWckBwMHa+FGtVj5gN/sbnKDf6xJUl+8g7FAij9LVaP8C24DUiH/f/2Z9A==", + "dev": true + }, + "espree": { + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/espree/-/espree-6.1.2.tgz", + "integrity": "sha512-2iUPuuPP+yW1PZaMSDM9eyVf8D5P0Hi8h83YtZ5bPc/zHYjII5khoixIUTMO794NOY8F/ThF1Bo8ncZILarUTA==", + "dev": true, + "requires": { + "acorn": "^7.1.0", + "acorn-jsx": "^5.1.0", + "eslint-visitor-keys": "^1.1.0" + } + }, + "esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true + }, + "esquery": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.0.1.tgz", + "integrity": "sha512-SmiyZ5zIWH9VM+SRUReLS5Q8a7GxtRdxEBVZpm98rJM7Sb+A9DVCndXfkeFUd3byderg+EbDkfnevfCwynWaNA==", + "dev": true, + "requires": { + "estraverse": "^4.0.0" + } + }, + "esrecurse": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.2.1.tgz", + "integrity": "sha512-64RBB++fIOAXPw3P9cy89qfMlvZEXZkqqJkjqqXIvzP5ezRZjW+lPWjw35UX/3EhUPFYbg5ER4JYgDw4007/DQ==", + "dev": true, + "requires": { + "estraverse": "^4.1.0" + } + }, + "estraverse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", + "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", + "dev": true + }, + "esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true }, "etag": { "version": "1.8.1", - "from": "etag@>=1.8.1 <1.9.0", - "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz" + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", + "integrity": "sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc=" }, "event-target-shim": { "version": "5.0.1", - "from": "event-target-shim@>=5.0.0 <6.0.0", - "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz" + "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", + "integrity": "sha1-XU0+vflYPWOlMzzi3rdICrKwV4k=" }, "events": { "version": "1.1.1", - "from": "events@1.1.1", - "resolved": "https://registry.npmjs.org/events/-/events-1.1.1.tgz" + "resolved": "https://registry.npmjs.org/events/-/events-1.1.1.tgz", + "integrity": "sha1-nr23Y1rQmccNzEwqH1AEKI6L2SQ=" }, "express": { "version": "4.16.3", - "from": "express@>=4.2.0 <5.0.0", "resolved": "https://registry.npmjs.org/express/-/express-4.16.3.tgz", + "integrity": "sha1-avilAjUNsyRuzEvs9rWjTSL37VM=", + "requires": { + "accepts": "~1.3.5", + "array-flatten": "1.1.1", + "body-parser": "1.18.2", + "content-disposition": "0.5.2", + "content-type": "~1.0.4", + "cookie": "0.3.1", + "cookie-signature": "1.0.6", + "debug": "2.6.9", + "depd": "~1.1.2", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "finalhandler": "1.1.1", + "fresh": "0.5.2", + "merge-descriptors": "1.0.1", + "methods": "~1.1.2", + "on-finished": "~2.3.0", + "parseurl": "~1.3.2", + "path-to-regexp": "0.1.7", + "proxy-addr": "~2.0.3", + "qs": "6.5.1", + "range-parser": "~1.2.0", + "safe-buffer": "5.1.1", + "send": "0.16.2", + "serve-static": "1.13.2", + "setprototypeof": "1.1.0", + "statuses": "~1.4.0", + "type-is": "~1.6.16", + "utils-merge": "1.0.1", + "vary": "~1.1.2" + }, "dependencies": { "body-parser": { "version": "1.18.2", - "from": "body-parser@1.18.2", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.18.2.tgz" + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.18.2.tgz", + "integrity": "sha1-h2eKGdhLR9hZuDGZvVm84iKxBFQ=", + "requires": { + "bytes": "3.0.0", + "content-type": "~1.0.4", + "debug": "2.6.9", + "depd": "~1.1.1", + "http-errors": "~1.6.2", + "iconv-lite": "0.4.19", + "on-finished": "~2.3.0", + "qs": "6.5.1", + "raw-body": "2.3.2", + "type-is": "~1.6.15" + } }, "iconv-lite": { "version": "0.4.19", - "from": "iconv-lite@0.4.19", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.19.tgz" + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.19.tgz", + "integrity": "sha1-90aPYBNfXl2tM5nAqBvpoWA6CCs=" }, "qs": { "version": "6.5.1", - "from": "qs@6.5.1", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.1.tgz" + "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.1.tgz", + "integrity": "sha1-NJzfbu+J7EXBLX1es/wMhwNDptg=" }, "raw-body": { "version": "2.3.2", - "from": "raw-body@2.3.2", "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.3.2.tgz", + "integrity": "sha1-vNYMd9Prk83gBQKVw/N5OJvIj4k=", + "requires": { + "bytes": "3.0.0", + "http-errors": "1.6.2", + "iconv-lite": "0.4.19", + "unpipe": "1.0.0" + }, "dependencies": { "depd": { "version": "1.1.1", - "from": "depd@1.1.1", - "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.1.tgz" + "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.1.tgz", + "integrity": "sha1-V4O04cRZ8G+lyif5kfPQbnoxA1k=" }, "http-errors": { "version": "1.6.2", - "from": "http-errors@1.6.2", - "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.2.tgz" + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.2.tgz", + "integrity": "sha1-CgAsyFcHGSp+eUbO7cERVfYOxzY=", + "requires": { + "depd": "1.1.1", + "inherits": "2.0.3", + "setprototypeof": "1.0.3", + "statuses": ">= 1.3.1 < 2" + } }, "setprototypeof": { "version": "1.0.3", - "from": "setprototypeof@1.0.3", - "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.0.3.tgz" + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.0.3.tgz", + "integrity": "sha1-ZlZ+NwQ+608E2RvWWMDL77VbjgQ=" } } }, "statuses": { "version": "1.4.0", - "from": "statuses@>=1.4.0 <1.5.0", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.4.0.tgz" + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.4.0.tgz", + "integrity": "sha1-u3PURtonlhBu/MG2AaJT1sRr0Ic=" } } }, "extend": { "version": "3.0.2", - "from": "extend@>=3.0.2 <3.1.0", - "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz" + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha1-+LETa0Bx+9jrFAr/hYsQGewpFfo=" + }, + "external-editor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/external-editor/-/external-editor-3.1.0.tgz", + "integrity": "sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew==", + "dev": true, + "requires": { + "chardet": "^0.7.0", + "iconv-lite": "^0.4.24", + "tmp": "^0.0.33" + } }, "extsprintf": { "version": "1.3.0", - "from": "extsprintf@1.3.0", - "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz" + "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", + "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=" }, "fast-deep-equal": { "version": "2.0.1", - "from": "fast-deep-equal@>=2.0.1 <3.0.0", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz" + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz", + "integrity": "sha1-ewUhjd+WZ79/Nwv3/bLLFf3Qqkk=" + }, + "fast-diff": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/fast-diff/-/fast-diff-1.2.0.tgz", + "integrity": "sha512-xJuoT5+L99XlZ8twedaRf6Ax2TgQVxvgZOYoPKqZufmJib0tL2tegPBOZb1pVNgIhlqDlA0eO0c3wBvQcmzx4w==", + "dev": true }, "fast-json-stable-stringify": { "version": "2.0.0", - "from": "fast-json-stable-stringify@>=2.0.0 <3.0.0", - "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz" + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz", + "integrity": "sha1-1RQsDK7msRifh9OnYREGT4bIu/I=" + }, + "fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=", + "dev": true }, "fast-text-encoding": { "version": "1.0.0", - "from": "fast-text-encoding@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/fast-text-encoding/-/fast-text-encoding-1.0.0.tgz" + "resolved": "https://registry.npmjs.org/fast-text-encoding/-/fast-text-encoding-1.0.0.tgz", + "integrity": "sha1-PlzoKTQJz6pxd6cbnKhOGx5vJe8=" + }, + "figures": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-3.1.0.tgz", + "integrity": "sha512-ravh8VRXqHuMvZt/d8GblBeqDMkdJMBdv/2KntFH+ra5MXkO7nxNKpzQ3n6QD/2da1kH0aWmNISdvhM7gl2gVg==", + "dev": true, + "requires": { + "escape-string-regexp": "^1.0.5" + } + }, + "file-entry-cache": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-5.0.1.tgz", + "integrity": "sha512-bCg29ictuBaKUwwArK4ouCaqDgLZcysCFLmM/Yn/FDoqndh/9vNuQfXRDvTuXKLxfD/JtZQGKFT8MGcJBK644g==", + "dev": true, + "requires": { + "flat-cache": "^2.0.1" + } }, "file-uri-to-path": { "version": "1.0.0", - "from": "file-uri-to-path@1.0.0", - "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz" + "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", + "integrity": "sha1-VTp7hEb/b2hDWcRF8eN6BdrMM90=" }, "finalhandler": { "version": "1.1.1", - "from": "finalhandler@1.1.1", "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.1.tgz", + "integrity": "sha1-7r9O2EAHnIP0JJA4ydcDAIMBsQU=", + "requires": { + "debug": "2.6.9", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "on-finished": "~2.3.0", + "parseurl": "~1.3.2", + "statuses": "~1.4.0", + "unpipe": "~1.0.0" + }, "dependencies": { "statuses": { "version": "1.4.0", - "from": "statuses@>=1.4.0 <1.5.0", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.4.0.tgz" + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.4.0.tgz", + "integrity": "sha1-u3PURtonlhBu/MG2AaJT1sRr0Ic=" } } }, + "find-up": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", + "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=", + "dev": true, + "requires": { + "locate-path": "^2.0.0" + } + }, "findit2": { "version": "2.2.3", - "from": "findit2@>=2.2.3 <3.0.0", - "resolved": "https://registry.npmjs.org/findit2/-/findit2-2.2.3.tgz" + "resolved": "https://registry.npmjs.org/findit2/-/findit2-2.2.3.tgz", + "integrity": "sha1-WKRmaX34piBc39vzlVNri9d3pfY=" + }, + "flat-cache": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-2.0.1.tgz", + "integrity": "sha512-LoQe6yDuUMDzQAEH8sgmh4Md6oZnc/7PjtwjNFSzveXqSHt6ka9fPBuso7IGf9Rz4uqnSnWiFH2B/zj24a5ReA==", + "dev": true, + "requires": { + "flatted": "^2.0.0", + "rimraf": "2.6.3", + "write": "1.0.3" + } + }, + "flatted": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-2.0.1.tgz", + "integrity": "sha512-a1hQMktqW9Nmqr5aktAux3JMNqaucxGcjtjWnZLHX7yyPCmlSV3M54nGYbqT8K+0GhF3NBgmJCc3ma+WOgX8Jg==", + "dev": true }, "follow-redirects": { "version": "1.5.10", - "from": "follow-redirects@1.5.10", "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.5.10.tgz", + "integrity": "sha1-e3qfmuov3/NnhqlP9kPtB/T/Xio=", + "requires": { + "debug": "=3.1.0" + }, "dependencies": { "debug": { "version": "3.1.0", - "from": "debug@3.1.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz" + "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz", + "integrity": "sha1-W7WgZyYotkFJVmuhaBnmFRjGcmE=", + "requires": { + "ms": "2.0.0" + } } } }, "forever-agent": { "version": "0.2.0", - "from": "forever-agent@>=0.2.0 <0.3.0", - "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.2.0.tgz" + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.2.0.tgz", + "integrity": "sha1-4cJcetROCcOPIzh2x2/MJP+EOx8=" }, "form-data": { "version": "0.0.10", - "from": "form-data@>=0.0.3 <0.1.0", "resolved": "https://registry.npmjs.org/form-data/-/form-data-0.0.10.tgz", + "integrity": "sha1-2zRaU3jYau6x7V1VO4aawZLS9e0=", + "requires": { + "async": "~0.2.7", + "combined-stream": "~0.0.4", + "mime": "~1.2.2" + }, "dependencies": { "mime": { "version": "1.2.11", - "from": "mime@>=1.2.2 <1.3.0", - "resolved": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz" + "resolved": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz", + "integrity": "sha1-WCA+7Ybjpe8XrtK32evUfwpg3RA=" } } }, "forwarded": { "version": "0.1.2", - "from": "forwarded@>=0.1.2 <0.2.0", - "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.1.2.tgz" + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.1.2.tgz", + "integrity": "sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ=" }, "fresh": { "version": "0.5.2", - "from": "fresh@0.5.2", - "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz" + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", + "integrity": "sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac=" }, "fs-extra": { "version": "1.0.0", - "from": "fs-extra@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-1.0.0.tgz" + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-1.0.0.tgz", + "integrity": "sha1-zTzl9+fLYUWIP8rjGR6Yd/hYeVA=", + "requires": { + "graceful-fs": "^4.1.2", + "jsonfile": "^2.1.0", + "klaw": "^1.0.0" + } }, "fs.realpath": { "version": "1.0.0", - "from": "fs.realpath@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz" + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" + }, + "function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", + "dev": true + }, + "functional-red-black-tree": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz", + "integrity": "sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=", + "dev": true }, "gaxios": { "version": "1.8.4", - "from": "gaxios@>=1.2.1 <2.0.0", - "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-1.8.4.tgz" + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-1.8.4.tgz", + "integrity": "sha1-4Iw0/pPAqbZ6Ure556ZOZDX5ozk=", + "requires": { + "abort-controller": "^3.0.0", + "extend": "^3.0.2", + "https-proxy-agent": "^2.2.1", + "node-fetch": "^2.3.0" + } }, "gcp-metadata": { "version": "1.0.0", - "from": "gcp-metadata@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-1.0.0.tgz" + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-1.0.0.tgz", + "integrity": "sha1-UhJEAin6CZ/C98KlzcuVV16bLKY=", + "requires": { + "gaxios": "^1.0.2", + "json-bigint": "^0.3.0" + } + }, + "get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true }, "get-func-name": { "version": "2.0.0", - "from": "get-func-name@>=2.0.0 <3.0.0", "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz", + "integrity": "sha1-6td0q+5y4gQJQzoGY2YCPdaIekE=", + "dev": true + }, + "get-stdin": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-6.0.0.tgz", + "integrity": "sha512-jp4tHawyV7+fkkSKyvjuLZswblUtz+SQKzSWnBbii16BuZksJlU1wuBYXY75r+duh/llF1ur6oNwi+2ZzjKZ7g==", "dev": true }, "getpass": { "version": "0.1.7", - "from": "getpass@>=0.1.1 <0.2.0", - "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz" + "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", + "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=", + "requires": { + "assert-plus": "^1.0.0" + } }, "gettemporaryfilepath": { "version": "0.0.1", - "from": "gettemporaryfilepath@0.0.1", - "resolved": "https://registry.npmjs.org/gettemporaryfilepath/-/gettemporaryfilepath-0.0.1.tgz" + "resolved": "https://registry.npmjs.org/gettemporaryfilepath/-/gettemporaryfilepath-0.0.1.tgz", + "integrity": "sha1-uKLHAUu1zUFTTpg7XKFgo3RwhGk=" }, "glob": { "version": "6.0.4", - "from": "glob@>=6.0.1 <7.0.0", "resolved": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz", - "optional": true + "integrity": "sha1-DwiGD2oVUSey+t1PnOJLGqtuTSI=", + "requires": { + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "2 || 3", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "glob-parent": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.0.tgz", + "integrity": "sha512-qjtRgnIVmOfnKUE3NJAQEdk+lKrxfw8t5ke7SXtfMTHcjsBfOfWXCQfdb30zfDoZQ2IRSIiidmjtbHZPZ++Ihw==", + "dev": true, + "requires": { + "is-glob": "^4.0.1" + } + }, + "globals": { + "version": "12.3.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-12.3.0.tgz", + "integrity": "sha512-wAfjdLgFsPZsklLJvOBUBmzYE8/CwhEqSBEMRXA3qxIiNtyqvjYurAtIfDh6chlEPUfmTY3MnZh5Hfh4q0UlIw==", + "dev": true, + "requires": { + "type-fest": "^0.8.1" + } }, "google-auth-library": { "version": "3.1.2", - "from": "google-auth-library@>=3.1.1 <4.0.0", "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-3.1.2.tgz", + "integrity": "sha1-/y+IzVzSEYpXvT1a08CTyIN/w1A=", + "requires": { + "base64-js": "^1.3.0", + "fast-text-encoding": "^1.0.0", + "gaxios": "^1.2.1", + "gcp-metadata": "^1.0.0", + "gtoken": "^2.3.2", + "https-proxy-agent": "^2.2.1", + "jws": "^3.1.5", + "lru-cache": "^5.0.0", + "semver": "^5.5.0" + }, "dependencies": { "semver": { "version": "5.7.0", - "from": "semver@>=5.5.0 <6.0.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.0.tgz" + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.0.tgz", + "integrity": "sha1-eQp89v6lRZuslhELKbYEEtyP+Ws=" } } }, "google-p12-pem": { "version": "1.0.4", - "from": "google-p12-pem@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-1.0.4.tgz" + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-1.0.4.tgz", + "integrity": "sha1-t3+4M6Lrn388aJ4uVPCVJ293dgU=", + "requires": { + "node-forge": "^0.8.0", + "pify": "^4.0.0" + } }, "graceful-fs": { "version": "4.1.11", - "from": "graceful-fs@>=4.1.2 <5.0.0", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.11.tgz" + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.11.tgz", + "integrity": "sha1-Dovf5NHduIVNZOBOp8AOKgJuVlg=" }, "gtoken": { "version": "2.3.3", - "from": "gtoken@>=2.3.2 <3.0.0", "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-2.3.3.tgz", + "integrity": "sha1-in/hVcXODEtxyIbPsoKpBg2UpkE=", + "requires": { + "gaxios": "^1.0.4", + "google-p12-pem": "^1.0.0", + "jws": "^3.1.5", + "mime": "^2.2.0", + "pify": "^4.0.0" + }, "dependencies": { "mime": { "version": "2.4.4", - "from": "mime@>=2.2.0 <3.0.0", - "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.4.tgz" + "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.4.tgz", + "integrity": "sha1-vXuRE1/GsBzePpuuM9ZZtj2IV+U=" } } }, "har-schema": { "version": "2.0.0", - "from": "har-schema@>=2.0.0 <3.0.0", - "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz" + "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", + "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=" }, "har-validator": { "version": "5.1.3", - "from": "har-validator@>=5.1.0 <5.2.0", - "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.3.tgz" + "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.3.tgz", + "integrity": "sha1-HvievT5JllV2de7ZiTEQ3DUPoIA=", + "requires": { + "ajv": "^6.5.5", + "har-schema": "^2.0.0" + } + }, + "has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dev": true, + "requires": { + "function-bind": "^1.1.1" + } + }, + "has-ansi": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz", + "integrity": "sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE=", + "dev": true, + "requires": { + "ansi-regex": "^2.0.0" + }, + "dependencies": { + "ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", + "dev": true + } + } }, "has-flag": { "version": "3.0.0", - "from": "has-flag@>=3.0.0 <4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz" + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=" + }, + "has-symbols": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.1.tgz", + "integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==", + "dev": true }, "hawk": { "version": "0.10.2", - "from": "hawk@>=0.10.2 <0.11.0", - "resolved": "https://registry.npmjs.org/hawk/-/hawk-0.10.2.tgz" + "resolved": "https://registry.npmjs.org/hawk/-/hawk-0.10.2.tgz", + "integrity": "sha1-mzYd7pWpMWQObVBOBWCaj8OsRdI=", + "requires": { + "boom": "0.3.x", + "cryptiles": "0.1.x", + "hoek": "0.7.x", + "sntp": "0.1.x" + } }, "he": { "version": "1.1.1", - "from": "he@1.1.1", - "resolved": "https://registry.npmjs.org/he/-/he-1.1.1.tgz" + "resolved": "https://registry.npmjs.org/he/-/he-1.1.1.tgz", + "integrity": "sha1-k0EP0hsAlzUVH4howvJx80J+I/0=" }, "heapdump": { "version": "0.3.9", - "from": "heapdump@>=0.3.2 <0.4.0", - "resolved": "https://registry.npmjs.org/heapdump/-/heapdump-0.3.9.tgz" + "resolved": "https://registry.npmjs.org/heapdump/-/heapdump-0.3.9.tgz", + "integrity": "sha1-A8dOsN9dZ74Jgug0KbqcnSs7f3g=" }, "hex2dec": { "version": "1.1.2", - "from": "hex2dec@>=1.0.1 <2.0.0", - "resolved": "https://registry.npmjs.org/hex2dec/-/hex2dec-1.1.2.tgz" + "resolved": "https://registry.npmjs.org/hex2dec/-/hex2dec-1.1.2.tgz", + "integrity": "sha1-jhzkvvNqdPfVcjw/swkMKGAHczg=" }, "hoek": { "version": "0.7.6", - "from": "hoek@>=0.7.0 <0.8.0", - "resolved": "https://registry.npmjs.org/hoek/-/hoek-0.7.6.tgz" + "resolved": "https://registry.npmjs.org/hoek/-/hoek-0.7.6.tgz", + "integrity": "sha1-YPvZBFV1Qc0rh5Wr8wihs3cOFVo=" + }, + "hosted-git-info": { + "version": "2.8.5", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.5.tgz", + "integrity": "sha512-kssjab8CvdXfcXMXVcvsXum4Hwdq9XGtRD3TteMEvEbq0LXyiNQr6AprqKqfeaDXze7SxWvRxdpwE6ku7ikLkg==", + "dev": true }, "http-errors": { "version": "1.6.3", - "from": "http-errors@>=1.6.3 <1.7.0", - "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz" + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz", + "integrity": "sha1-i1VoC7S+KDoLW/TqLjhYC+HZMg0=", + "requires": { + "depd": "~1.1.2", + "inherits": "2.0.3", + "setprototypeof": "1.1.0", + "statuses": ">= 1.4.0 < 2" + } }, "http-signature": { "version": "1.2.0", - "from": "http-signature@>=1.2.0 <1.3.0", - "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz" + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", + "integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=", + "requires": { + "assert-plus": "^1.0.0", + "jsprim": "^1.2.2", + "sshpk": "^1.7.0" + } }, "https-proxy-agent": { "version": "2.2.1", - "from": "https-proxy-agent@>=2.2.1 <3.0.0", "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-2.2.1.tgz", + "integrity": "sha1-UVUpcPoE1yPgTFbQQXjD+SWSu8A=", + "requires": { + "agent-base": "^4.1.0", + "debug": "^3.1.0" + }, "dependencies": { "debug": { "version": "3.2.6", - "from": "debug@>=3.1.0 <4.0.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz" + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", + "integrity": "sha1-6D0X3hbYp++3cX7b5fsQE17uYps=", + "requires": { + "ms": "^2.1.1" + } }, "ms": { "version": "2.1.2", - "from": "ms@>=2.1.1 <3.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz" + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha1-0J0fNXtEP0kzgqjrPM0YOHKuYAk=" } } }, "iconv-lite": { "version": "0.4.23", - "from": "iconv-lite@0.4.23", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.23.tgz" + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.23.tgz", + "integrity": "sha1-KXhx9jvlB63Pv8pxXQzQ7thOmmM=", + "requires": { + "safer-buffer": ">= 2.1.2 < 3" + } }, "ieee754": { "version": "1.1.8", - "from": "ieee754@1.1.8", - "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.8.tgz" + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.8.tgz", + "integrity": "sha1-vjPUCsEO8ZJnAfbwii2G+/0a0+Q=" + }, + "ignore": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz", + "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==", + "dev": true + }, + "import-fresh": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.2.1.tgz", + "integrity": "sha512-6e1q1cnWP2RXD9/keSkxHScg508CdXqXWgWBaETNhyuBFz+kUZlKboh+ISK+bU++DmbHimVBrOz/zzPe0sZ3sQ==", + "dev": true, + "requires": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + } + }, + "imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=", + "dev": true + }, + "indent-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "dev": true }, "inflight": { "version": "1.0.6", - "from": "inflight@>=1.0.4 <2.0.0", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz" + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "requires": { + "once": "^1.3.0", + "wrappy": "1" + } }, "inherits": { "version": "2.0.3", - "from": "inherits@2.0.3", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=" + }, + "inquirer": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-7.0.1.tgz", + "integrity": "sha512-V1FFQ3TIO15det8PijPLFR9M9baSlnRs9nL7zWu1MNVA2T9YVl9ZbrHJhYs7e9X8jeMZ3lr2JH/rdHFgNCBdYw==", + "dev": true, + "requires": { + "ansi-escapes": "^4.2.1", + "chalk": "^2.4.2", + "cli-cursor": "^3.1.0", + "cli-width": "^2.0.0", + "external-editor": "^3.0.3", + "figures": "^3.0.0", + "lodash": "^4.17.15", + "mute-stream": "0.0.8", + "run-async": "^2.2.0", + "rxjs": "^6.5.3", + "string-width": "^4.1.0", + "strip-ansi": "^5.1.0", + "through": "^2.3.6" + } }, "ipaddr.js": { "version": "1.8.0", - "from": "ipaddr.js@1.8.0", - "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.8.0.tgz" + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.8.0.tgz", + "integrity": "sha1-6qM9bd16zo9/b+DJygRA5wZzix4=" }, "is": { "version": "3.3.0", - "from": "is@>=3.2.0 <4.0.0", - "resolved": "https://registry.npmjs.org/is/-/is-3.3.0.tgz" + "resolved": "https://registry.npmjs.org/is/-/is-3.3.0.tgz", + "integrity": "sha1-Yc/23TxBk9uUo9YlggcrROVkXXk=" + }, + "is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", + "dev": true }, "is-buffer": { "version": "2.0.3", - "from": "is-buffer@>=2.0.2 <3.0.0", - "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.3.tgz" + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.3.tgz", + "integrity": "sha1-Ts8/z3ScvR5HJonhCaxmJhol5yU=" + }, + "is-callable": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.4.tgz", + "integrity": "sha512-r5p9sxJjYnArLjObpjA4xu5EKI3CuKHkJXMhT7kwbpUyIFD1n5PMAsoPvWnvtZiNz7LjkYDRZhd7FlI0eMijEA==", + "dev": true + }, + "is-date-object": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.1.tgz", + "integrity": "sha1-mqIOtq7rv/d/vTPnTKAbM1gdOhY=", + "dev": true + }, + "is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true + }, + "is-glob": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz", + "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==", + "dev": true, + "requires": { + "is-extglob": "^2.1.1" + } + }, + "is-promise": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-2.1.0.tgz", + "integrity": "sha1-eaKp7OfwlugPNtKy87wWwf9L8/o=", + "dev": true + }, + "is-regex": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.5.tgz", + "integrity": "sha512-vlKW17SNq44owv5AQR3Cq0bQPEb8+kF3UKZ2fiZNOWtztYE5i0CzCZxFDwO58qAOWtxdBRVO/V5Qin1wjCqFYQ==", + "dev": true, + "requires": { + "has": "^1.0.3" + } + }, + "is-symbol": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.3.tgz", + "integrity": "sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ==", + "dev": true, + "requires": { + "has-symbols": "^1.0.1" + } }, "is-typedarray": { "version": "1.0.0", - "from": "is-typedarray@>=1.0.0 <1.1.0", - "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz" + "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", + "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=" }, "isarray": { "version": "1.0.0", - "from": "isarray@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz" + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" + }, + "isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", + "dev": true }, "isstream": { "version": "0.1.2", - "from": "isstream@>=0.1.0 <0.2.0", - "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz" + "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", + "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=" }, "jmespath": { "version": "0.15.0", - "from": "jmespath@0.15.0", - "resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.15.0.tgz" + "resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.15.0.tgz", + "integrity": "sha1-o/Iiqarp+Wb10nx5ZRDigJF2Qhc=" + }, + "js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true + }, + "js-yaml": { + "version": "3.13.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.13.1.tgz", + "integrity": "sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==", + "dev": true, + "requires": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + } }, "jsbn": { "version": "0.1.1", - "from": "jsbn@>=0.1.0 <0.2.0", - "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz" + "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", + "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=" + }, + "jsesc": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", + "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", + "dev": true }, "json-bigint": { "version": "0.3.0", - "from": "json-bigint@>=0.3.0 <0.4.0", - "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-0.3.0.tgz" + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-0.3.0.tgz", + "integrity": "sha1-DM2RLEuCcNBfBW+9E4FLU9OCWx4=", + "requires": { + "bignumber.js": "^7.0.0" + } }, "json-schema": { "version": "0.2.3", - "from": "json-schema@0.2.3", - "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz" + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", + "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=" }, "json-schema-traverse": { "version": "0.4.1", - "from": "json-schema-traverse@>=0.4.1 <0.5.0", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz" + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha1-afaofZUTq4u4/mO9sJecRI5oRmA=" + }, + "json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=", + "dev": true }, "json-stringify-safe": { "version": "5.0.1", - "from": "json-stringify-safe@5.0.1", - "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz" + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", + "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=" }, "jsonfile": { "version": "2.4.0", - "from": "jsonfile@>=2.1.0 <3.0.0", - "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-2.4.0.tgz" + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-2.4.0.tgz", + "integrity": "sha1-NzaitCi4e72gzIO1P6PWM6NcKug=", + "requires": { + "graceful-fs": "^4.1.6" + } }, "jsprim": { "version": "1.4.1", - "from": "jsprim@>=1.2.2 <2.0.0", - "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz" + "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", + "integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=", + "requires": { + "assert-plus": "1.0.0", + "extsprintf": "1.3.0", + "json-schema": "0.2.3", + "verror": "1.10.0" + } }, "just-extend": { "version": "4.0.2", - "from": "just-extend@>=4.0.2 <5.0.0", "resolved": "https://registry.npmjs.org/just-extend/-/just-extend-4.0.2.tgz", + "integrity": "sha1-8/R/ffyg+YnFVBCn68iFSwcQivw=", "dev": true }, "jwa": { "version": "1.4.1", - "from": "jwa@>=1.4.1 <2.0.0", - "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz" + "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", + "integrity": "sha1-dDwymFy56YZVUw1TZBtmyGRbA5o=", + "requires": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } }, "jws": { "version": "3.2.2", - "from": "jws@>=3.1.5 <4.0.0", - "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz" + "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", + "integrity": "sha1-ABCZ82OUaMlBQADpmZX6UvtHgwQ=", + "requires": { + "jwa": "^1.4.1", + "safe-buffer": "^5.0.1" + } }, "klaw": { "version": "1.3.1", - "from": "klaw@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/klaw/-/klaw-1.3.1.tgz" + "resolved": "https://registry.npmjs.org/klaw/-/klaw-1.3.1.tgz", + "integrity": "sha1-QIhDO0azsbolnXh4XY6W9zugJDk=", + "requires": { + "graceful-fs": "^4.1.9" + } }, "knox": { "version": "0.9.2", - "from": "knox@>=0.9.1 <0.10.0", "resolved": "https://registry.npmjs.org/knox/-/knox-0.9.2.tgz", + "integrity": "sha1-NzZZNmniTwJP2vcjtqHcSv2DmnE=", + "requires": { + "debug": "^1.0.2", + "mime": "*", + "once": "^1.3.0", + "stream-counter": "^1.0.0", + "xml2js": "^0.4.4" + }, "dependencies": { "debug": { "version": "1.0.5", - "from": "debug@>=1.0.2 <2.0.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-1.0.5.tgz" + "resolved": "https://registry.npmjs.org/debug/-/debug-1.0.5.tgz", + "integrity": "sha1-9yQSF0MPmd7EwrRz6rkiKOh0wqw=", + "requires": { + "ms": "2.0.0" + } } } }, + "levn": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", + "integrity": "sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4=", + "dev": true, + "requires": { + "prelude-ls": "~1.1.2", + "type-check": "~0.3.2" + } + }, + "load-json-file": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-2.0.0.tgz", + "integrity": "sha1-eUfkIUmvgNaWy/eXvKq8/h/inKg=", + "dev": true, + "requires": { + "graceful-fs": "^4.1.2", + "parse-json": "^2.2.0", + "pify": "^2.0.0", + "strip-bom": "^3.0.0" + } + }, + "locate-path": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", + "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=", + "dev": true, + "requires": { + "p-locate": "^2.0.0", + "path-exists": "^3.0.0" + } + }, "lodash": { "version": "4.17.11", - "from": "lodash@>=4.17.11 <5.0.0", "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.11.tgz", + "integrity": "sha1-s56mIp72B+zYniyN8SU2iRysm40=", "dev": true }, "lodash.get": { "version": "4.4.2", - "from": "lodash.get@>=4.4.2 <5.0.0", "resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz", + "integrity": "sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk=", + "dev": true + }, + "lodash.memoize": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", + "integrity": "sha1-vMbEmkKihA7Zl/Mj6tpezRguC/4=", + "dev": true + }, + "lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", "dev": true }, "lodash.pickby": { "version": "4.6.0", - "from": "lodash.pickby@>=4.6.0 <5.0.0", - "resolved": "https://registry.npmjs.org/lodash.pickby/-/lodash.pickby-4.6.0.tgz" + "resolved": "https://registry.npmjs.org/lodash.pickby/-/lodash.pickby-4.6.0.tgz", + "integrity": "sha1-feoh2MGNdwOifHBMFdO4SmfjOv8=" + }, + "lodash.unescape": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/lodash.unescape/-/lodash.unescape-4.0.1.tgz", + "integrity": "sha1-vyJJiGzlFM2hEvrpIYzcBlIR/Jw=", + "dev": true }, "logger-sharelatex": { "version": "1.7.0", - "from": "logger-sharelatex@1.7.0", "resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-1.7.0.tgz", + "integrity": "sha1-XuMje84im1rITZ7SLoXa6eI3/HQ=", + "requires": { + "bunyan": "1.8.12", + "raven": "1.1.3", + "request": "2.88.0" + }, "dependencies": { "bunyan": { "version": "1.8.12", - "from": "bunyan@1.8.12", - "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.12.tgz" + "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.12.tgz", + "integrity": "sha1-8VDw9nSKvdcq6uhPBEA74u8RN5c=", + "requires": { + "dtrace-provider": "~0.8", + "moment": "^2.10.6", + "mv": "~2", + "safe-json-stringify": "~1" + } }, "caseless": { "version": "0.12.0", - "from": "caseless@>=0.12.0 <0.13.0", - "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz" + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", + "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=" }, "combined-stream": { "version": "1.0.8", - "from": "combined-stream@>=1.0.6 <1.1.0", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz" + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha1-w9RaizT9cwYxoRCoolIGgrMdWn8=", + "requires": { + "delayed-stream": "~1.0.0" + } }, "delayed-stream": { "version": "1.0.0", - "from": "delayed-stream@>=1.0.0 <1.1.0", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz" + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=" }, "dtrace-provider": { "version": "0.8.7", - "from": "dtrace-provider@>=0.8.0 <0.9.0", "resolved": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.8.7.tgz", - "optional": true + "integrity": "sha1-3JObTT4GIM/gwc2APQ0tftBP/QQ=", + "optional": true, + "requires": { + "nan": "^2.10.0" + } }, "forever-agent": { "version": "0.6.1", - "from": "forever-agent@>=0.6.1 <0.7.0", - "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz" + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", + "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=" }, "form-data": { "version": "2.3.3", - "from": "form-data@>=2.3.2 <2.4.0", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz" + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", + "integrity": "sha1-3M5SwF9kTymManq5Nr1yTO/786Y=", + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + } }, "oauth-sign": { "version": "0.9.0", - "from": "oauth-sign@>=0.9.0 <0.10.0", - "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz" + "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", + "integrity": "sha1-R6ewFrqmi1+g7PPe4IqFxnmsZFU=" }, "request": { "version": "2.88.0", - "from": "request@>=2.88.0 <3.0.0", - "resolved": "https://registry.npmjs.org/request/-/request-2.88.0.tgz" + "resolved": "https://registry.npmjs.org/request/-/request-2.88.0.tgz", + "integrity": "sha1-nC/KT301tZLv5Xx/ClXoEFIST+8=", + "requires": { + "aws-sign2": "~0.7.0", + "aws4": "^1.8.0", + "caseless": "~0.12.0", + "combined-stream": "~1.0.6", + "extend": "~3.0.2", + "forever-agent": "~0.6.1", + "form-data": "~2.3.2", + "har-validator": "~5.1.0", + "http-signature": "~1.2.0", + "is-typedarray": "~1.0.0", + "isstream": "~0.1.2", + "json-stringify-safe": "~5.0.1", + "mime-types": "~2.1.19", + "oauth-sign": "~0.9.0", + "performance-now": "^2.1.0", + "qs": "~6.5.2", + "safe-buffer": "^5.1.2", + "tough-cookie": "~2.4.3", + "tunnel-agent": "^0.6.0", + "uuid": "^3.3.2" + } }, "safe-buffer": { "version": "5.1.2", - "from": "safe-buffer@>=5.1.2 <6.0.0", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz" + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha1-mR7GnSluAxN0fVm9/St0XDX4go0=" }, "tunnel-agent": { "version": "0.6.0", - "from": "tunnel-agent@>=0.6.0 <0.7.0", - "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz" + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", + "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", + "requires": { + "safe-buffer": "^5.0.1" + } }, "uuid": { "version": "3.3.2", - "from": "uuid@>=3.3.2 <4.0.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz" + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", + "integrity": "sha1-G0r0lV6zB3xQHCOHL8ZROBFYcTE=" + } + } + }, + "loglevel": { + "version": "1.6.6", + "resolved": "https://registry.npmjs.org/loglevel/-/loglevel-1.6.6.tgz", + "integrity": "sha512-Sgr5lbboAUBo3eXCSPL4/KoVz3ROKquOjcctxmHIt+vol2DrqTQe3SwkKKuYhEiWB5kYa13YyopJ69deJ1irzQ==", + "dev": true + }, + "loglevel-colored-level-prefix": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/loglevel-colored-level-prefix/-/loglevel-colored-level-prefix-1.0.0.tgz", + "integrity": "sha1-akAhj9x64V/HbD0PPmdsRlOIYD4=", + "dev": true, + "requires": { + "chalk": "^1.1.3", + "loglevel": "^1.4.1" + }, + "dependencies": { + "ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", + "dev": true + }, + "ansi-styles": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz", + "integrity": "sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4=", + "dev": true + }, + "chalk": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz", + "integrity": "sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg=", + "dev": true, + "requires": { + "ansi-styles": "^2.2.1", + "escape-string-regexp": "^1.0.2", + "has-ansi": "^2.0.0", + "strip-ansi": "^3.0.0", + "supports-color": "^2.0.0" + } + }, + "strip-ansi": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "dev": true, + "requires": { + "ansi-regex": "^2.0.0" + } } } }, "lolex": { "version": "3.1.0", - "from": "lolex@>=3.0.0 <4.0.0", "resolved": "https://registry.npmjs.org/lolex/-/lolex-3.1.0.tgz", + "integrity": "sha1-Gn/rL+/XWz46f3nw4RDZR24pRDQ=", "dev": true }, "long": { "version": "4.0.0", - "from": "long@>=4.0.0 <5.0.0", - "resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz" + "resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz", + "integrity": "sha1-mntxz7fTYaGU6lVSQckvdGjVvyg=" }, "lru-cache": { "version": "5.1.1", - "from": "lru-cache@>=5.0.0 <6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz" + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha1-HaJ+ZxAnGUdpXa9oSOhH8B2EuSA=", + "requires": { + "yallist": "^3.0.2" + } }, "lsmod": { "version": "1.0.0", - "from": "lsmod@1.0.0", - "resolved": "https://registry.npmjs.org/lsmod/-/lsmod-1.0.0.tgz" + "resolved": "https://registry.npmjs.org/lsmod/-/lsmod-1.0.0.tgz", + "integrity": "sha1-mgD3bco26yP6BTUK/htYXUKZ5ks=" }, "lynx": { "version": "0.1.1", - "from": "lynx@>=0.1.1 <0.2.0", - "resolved": "https://registry.npmjs.org/lynx/-/lynx-0.1.1.tgz" + "resolved": "https://registry.npmjs.org/lynx/-/lynx-0.1.1.tgz", + "integrity": "sha1-Mxjc7xaQi4KG6Bisz9sxzXQkj50=", + "requires": { + "mersenne": "~0.0.3", + "statsd-parser": "~0.0.4" + } + }, + "make-plural": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/make-plural/-/make-plural-4.3.0.tgz", + "integrity": "sha512-xTYd4JVHpSCW+aqDof6w/MebaMVNTVYBZhbB/vi513xXdiPT92JMVCo0Jq8W2UZnzYRFeVbQiQ+I25l13JuKvA==", + "dev": true, + "requires": { + "minimist": "^1.2.0" + } + }, + "map-obj": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.1.0.tgz", + "integrity": "sha512-glc9y00wgtwcDmp7GaE/0b0OnxpNJsVf3ael/An6Fe2Q51LLwN1er6sdomLRzz5h0+yMpiYLhWYF5R7HeqVd4g==", + "dev": true }, "media-typer": { "version": "0.3.0", - "from": "media-typer@0.3.0", - "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz" + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "integrity": "sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=" }, "merge-descriptors": { "version": "1.0.1", - "from": "merge-descriptors@1.0.1", - "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz" + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", + "integrity": "sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E=" }, "mersenne": { "version": "0.0.4", - "from": "mersenne@>=0.0.3 <0.1.0", - "resolved": "https://registry.npmjs.org/mersenne/-/mersenne-0.0.4.tgz" + "resolved": "https://registry.npmjs.org/mersenne/-/mersenne-0.0.4.tgz", + "integrity": "sha1-QB/ex+whzbngPNPTAhOY2iGycIU=" + }, + "messageformat": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/messageformat/-/messageformat-2.3.0.tgz", + "integrity": "sha512-uTzvsv0lTeQxYI2y1NPa1lItL5VRI8Gb93Y2K2ue5gBPyrbJxfDi/EYWxh2PKv5yO42AJeeqblS9MJSh/IEk4w==", + "dev": true, + "requires": { + "make-plural": "^4.3.0", + "messageformat-formatters": "^2.0.1", + "messageformat-parser": "^4.1.2" + } + }, + "messageformat-formatters": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/messageformat-formatters/-/messageformat-formatters-2.0.1.tgz", + "integrity": "sha512-E/lQRXhtHwGuiQjI7qxkLp8AHbMD5r2217XNe/SREbBlSawe0lOqsFb7rflZJmlQFSULNLIqlcjjsCPlB3m3Mg==", + "dev": true + }, + "messageformat-parser": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/messageformat-parser/-/messageformat-parser-4.1.2.tgz", + "integrity": "sha512-7dWuifeyldz7vhEuL96Kwq1fhZXBW+TUfbnHN4UCrCxoXQTYjHnR78eI66Gk9LaLLsAvzPNVJBaa66DRfFNaiA==", + "dev": true }, "methods": { "version": "1.1.2", - "from": "methods@>=1.1.2 <1.2.0", - "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz" + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=" }, "metrics-sharelatex": { "version": "2.2.0", - "from": "metrics-sharelatex@2.2.0", "resolved": "https://registry.npmjs.org/metrics-sharelatex/-/metrics-sharelatex-2.2.0.tgz", + "integrity": "sha1-RM9oy9FuUQYgfrZ+PvkAhaQWwqk=", + "requires": { + "@google-cloud/debug-agent": "^3.0.0", + "@google-cloud/profiler": "^0.2.3", + "@google-cloud/trace-agent": "^3.2.0", + "coffee-script": "1.6.0", + "lynx": "~0.1.1", + "prom-client": "^11.1.3", + "underscore": "~1.6.0" + }, "dependencies": { "underscore": { "version": "1.6.0", - "from": "underscore@>=1.6.0 <1.7.0", - "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz" + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz", + "integrity": "sha1-izixDKze9jM3uLJOT/htRa6lKag=" } } }, "mime": { "version": "1.4.1", - "from": "mime@1.4.1", - "resolved": "https://registry.npmjs.org/mime/-/mime-1.4.1.tgz" + "resolved": "https://registry.npmjs.org/mime/-/mime-1.4.1.tgz", + "integrity": "sha1-Eh+evEnjdm8xGnbh+hyAA8SwOqY=" }, "mime-db": { "version": "1.36.0", - "from": "mime-db@>=1.36.0 <1.37.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.36.0.tgz" + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.36.0.tgz", + "integrity": "sha1-UCBHjbPH/pOq17vMTc+GnEM2M5c=" }, "mime-types": { "version": "2.1.20", - "from": "mime-types@>=2.1.18 <2.2.0", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.20.tgz" + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.20.tgz", + "integrity": "sha1-kwy3GdVx6QNzhSD4RwkRVIyizBk=", + "requires": { + "mime-db": "~1.36.0" + } + }, + "mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true }, "minimatch": { "version": "3.0.4", - "from": "minimatch@>=2.0.0 <3.0.0||>=3.0.0 <4.0.0", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz" + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha1-UWbihkV/AzBgZL5Ul+jbsMPTIIM=", + "requires": { + "brace-expansion": "^1.1.7" + } }, "minimist": { "version": "0.0.8", - "from": "minimist@0.0.8", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz" + "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", + "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=" + }, + "mkdirp": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", + "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", + "dev": true, + "requires": { + "minimist": "0.0.8" + } }, "mocha": { "version": "5.2.0", - "from": "mocha@5.2.0", "resolved": "https://registry.npmjs.org/mocha/-/mocha-5.2.0.tgz", + "integrity": "sha1-bYrlCPWRZ/lA8rWzxKYSrlDJCuY=", + "requires": { + "browser-stdout": "1.3.1", + "commander": "2.15.1", + "debug": "3.1.0", + "diff": "3.5.0", + "escape-string-regexp": "1.0.5", + "glob": "7.1.2", + "growl": "1.10.5", + "he": "1.1.1", + "minimatch": "3.0.4", + "mkdirp": "0.5.1", + "supports-color": "5.4.0" + }, "dependencies": { "commander": { "version": "2.15.1", - "from": "commander@2.15.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.15.1.tgz" + "resolved": "https://registry.npmjs.org/commander/-/commander-2.15.1.tgz", + "integrity": "sha1-30boZ9D8Kuxmo0ZitAapzK//Ww8=" }, "debug": { "version": "3.1.0", - "from": "debug@3.1.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz" + "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz", + "integrity": "sha1-W7WgZyYotkFJVmuhaBnmFRjGcmE=", + "requires": { + "ms": "2.0.0" + } }, "diff": { "version": "3.5.0", - "from": "diff@3.5.0", - "resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz" + "resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz", + "integrity": "sha1-gAwN0eCov7yVg1wgKtIg/jF+WhI=" }, "glob": { "version": "7.1.2", - "from": "glob@7.1.2", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.2.tgz" + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.2.tgz", + "integrity": "sha1-wZyd+aAocC1nhhI4SmVSQExjbRU=", + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } }, "growl": { "version": "1.10.5", - "from": "growl@1.10.5", - "resolved": "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz" + "resolved": "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz", + "integrity": "sha1-8nNdwig2dPpnR4sQGBBZNVw2nl4=" }, "mkdirp": { "version": "0.5.1", - "from": "mkdirp@0.5.1", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz" + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", + "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", + "requires": { + "minimist": "0.0.8" + } }, "supports-color": { "version": "5.4.0", - "from": "supports-color@5.4.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.4.0.tgz" + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.4.0.tgz", + "integrity": "sha1-HGszdALCE3YF7+GfEP7DkPb6q1Q=", + "requires": { + "has-flag": "^3.0.0" + } } } }, "module-details-from-path": { "version": "1.0.3", - "from": "module-details-from-path@>=1.0.3 <2.0.0", - "resolved": "https://registry.npmjs.org/module-details-from-path/-/module-details-from-path-1.0.3.tgz" + "resolved": "https://registry.npmjs.org/module-details-from-path/-/module-details-from-path-1.0.3.tgz", + "integrity": "sha1-EUyUlnPiqKNenTV4hSeqN7Z52is=" }, "moment": { "version": "2.24.0", - "from": "moment@>=2.10.6 <3.0.0", "resolved": "https://registry.npmjs.org/moment/-/moment-2.24.0.tgz", + "integrity": "sha1-DQVdU/UFKqZTyfbraLtdEr9cK1s=", "optional": true }, "ms": { "version": "2.0.0", - "from": "ms@2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz" + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + }, + "mute-stream": { + "version": "0.0.8", + "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz", + "integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==", + "dev": true }, "mv": { "version": "2.1.1", - "from": "mv@>=2.0.0 <3.0.0", "resolved": "https://registry.npmjs.org/mv/-/mv-2.1.1.tgz", + "integrity": "sha1-rmzg1vbV4KT32JN5jQPB6pVZtqI=", "optional": true, + "requires": { + "mkdirp": "~0.5.1", + "ncp": "~2.0.0", + "rimraf": "~2.4.0" + }, "dependencies": { "mkdirp": { "version": "0.5.1", - "from": "mkdirp@>=0.5.1 <0.6.0", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", - "optional": true + "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", + "optional": true, + "requires": { + "minimist": "0.0.8" + } }, "rimraf": { "version": "2.4.5", - "from": "rimraf@>=2.4.0 <2.5.0", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.5.tgz", - "optional": true + "integrity": "sha1-7nEM5dk6j9uFb7Xqj/Di11k0sto=", + "optional": true, + "requires": { + "glob": "^6.0.1" + } } } }, "nan": { "version": "2.11.0", - "from": "nan@>=2.0.8 <3.0.0", "resolved": "https://registry.npmjs.org/nan/-/nan-2.11.0.tgz", + "integrity": "sha1-V042Dk2VSrFpZuwQLAwEn9lhoJk=", "optional": true }, + "natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=", + "dev": true + }, "ncp": { "version": "2.0.0", - "from": "ncp@>=2.0.0 <2.1.0", "resolved": "https://registry.npmjs.org/ncp/-/ncp-2.0.0.tgz", + "integrity": "sha1-GVoh1sRuNh0vsSgbo4uR6d9727M=", "optional": true }, "negotiator": { "version": "0.6.1", - "from": "negotiator@0.6.1", - "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.1.tgz" + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.1.tgz", + "integrity": "sha1-KzJxhOiZIQEXeyhWP7XnECrNDKk=" + }, + "nice-try": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", + "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==", + "dev": true }, "nise": { "version": "1.5.0", - "from": "nise@>=1.4.6 <2.0.0", "resolved": "https://registry.npmjs.org/nise/-/nise-1.5.0.tgz", + "integrity": "sha1-0D6g5sG3XGOAFao1he3cEylJpQ0=", "dev": true, + "requires": { + "@sinonjs/formatio": "^3.1.0", + "@sinonjs/text-encoding": "^0.7.1", + "just-extend": "^4.0.2", + "lolex": "^4.1.0", + "path-to-regexp": "^1.7.0" + }, "dependencies": { "isarray": { "version": "0.0.1", - "from": "isarray@0.0.1", "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=", "dev": true }, "lolex": { "version": "4.1.0", - "from": "lolex@>=4.1.0 <5.0.0", "resolved": "https://registry.npmjs.org/lolex/-/lolex-4.1.0.tgz", + "integrity": "sha1-7N17hlOTkdgjeUejQZqorJdfD+E=", "dev": true }, "path-to-regexp": { "version": "1.7.0", - "from": "path-to-regexp@>=1.7.0 <2.0.0", "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.7.0.tgz", - "dev": true + "integrity": "sha1-Wf3g9DW62suhA6hOnTvGTpa5k30=", + "dev": true, + "requires": { + "isarray": "0.0.1" + } } } }, "node-fetch": { "version": "2.6.0", - "from": "node-fetch@>=2.3.0 <3.0.0", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.0.tgz" + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.0.tgz", + "integrity": "sha1-5jNFY4bUqlWGP2dqerDaqP3ssP0=" }, "node-forge": { "version": "0.8.4", - "from": "node-forge@>=0.8.0 <0.9.0", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.8.4.tgz" + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.8.4.tgz", + "integrity": "sha1-1nOGYrZhvhnicR7wGqOxghLxMDA=" }, "node-transloadit": { "version": "0.0.4", - "from": "node-transloadit@0.0.4", "resolved": "https://registry.npmjs.org/node-transloadit/-/node-transloadit-0.0.4.tgz", + "integrity": "sha1-4ZoHheON94NblO2AANHjXmg7zsE=", + "requires": { + "request": "~2.16.6", + "underscore": "1.2.1" + }, "dependencies": { "json-stringify-safe": { "version": "3.0.0", - "from": "json-stringify-safe@>=3.0.0 <3.1.0", - "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-3.0.0.tgz" + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-3.0.0.tgz", + "integrity": "sha1-nbew5TDH8onF6MhDKvGRwv91pbM=" }, "mime": { "version": "1.2.11", - "from": "mime@>=1.2.7 <1.3.0", - "resolved": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz" + "resolved": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz", + "integrity": "sha1-WCA+7Ybjpe8XrtK32evUfwpg3RA=" }, "qs": { "version": "0.5.6", - "from": "qs@>=0.5.4 <0.6.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-0.5.6.tgz" + "resolved": "https://registry.npmjs.org/qs/-/qs-0.5.6.tgz", + "integrity": "sha1-MbGtBYVnZRxSaSFQa5qHk5EaA4Q=" }, "request": { "version": "2.16.6", - "from": "request@>=2.16.6 <2.17.0", - "resolved": "https://registry.npmjs.org/request/-/request-2.16.6.tgz" + "resolved": "https://registry.npmjs.org/request/-/request-2.16.6.tgz", + "integrity": "sha1-hy/kRa5y3iZrN4edatfclI+gHK0=", + "requires": { + "aws-sign": "~0.2.0", + "cookie-jar": "~0.2.0", + "forever-agent": "~0.2.0", + "form-data": "~0.0.3", + "hawk": "~0.10.2", + "json-stringify-safe": "~3.0.0", + "mime": "~1.2.7", + "node-uuid": "~1.4.0", + "oauth-sign": "~0.2.0", + "qs": "~0.5.4", + "tunnel-agent": "~0.2.0" + } }, "underscore": { "version": "1.2.1", - "from": "underscore@1.2.1", - "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.2.1.tgz" + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.2.1.tgz", + "integrity": "sha1-/FxrB2VnPZKi1KyLTcCqiHAuK9Q=" } } }, "node-uuid": { "version": "1.4.8", - "from": "node-uuid@>=1.4.1 <1.5.0", - "resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.8.tgz" + "resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.8.tgz", + "integrity": "sha1-sEDrCSOWivq/jTL7HxfxFn/auQc=" + }, + "normalize-package-data": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", + "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", + "dev": true, + "requires": { + "hosted-git-info": "^2.1.4", + "resolve": "^1.10.0", + "semver": "2 || 3 || 4 || 5", + "validate-npm-package-license": "^3.0.1" + } }, "oauth-sign": { "version": "0.2.0", - "from": "oauth-sign@>=0.2.0 <0.3.0", - "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.2.0.tgz" + "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.2.0.tgz", + "integrity": "sha1-oOahcV2u0GLzIrYit/5a/RA1tuI=" + }, + "object-inspect": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.7.0.tgz", + "integrity": "sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw==", + "dev": true + }, + "object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "dev": true + }, + "object.assign": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.0.tgz", + "integrity": "sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w==", + "dev": true, + "requires": { + "define-properties": "^1.1.2", + "function-bind": "^1.1.1", + "has-symbols": "^1.0.0", + "object-keys": "^1.0.11" + } + }, + "object.values": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.1.tgz", + "integrity": "sha512-WTa54g2K8iu0kmS/us18jEmdv1a4Wi//BZ/DTVYEcH0XhLM5NYdpDHja3gt57VrZLcNAO2WGA+KpWsDBaHt6eA==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0-next.1", + "function-bind": "^1.1.1", + "has": "^1.0.3" + } }, "on-finished": { "version": "2.3.0", - "from": "on-finished@>=2.3.0 <2.4.0", - "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz" + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", + "integrity": "sha1-IPEzZIGwg811M3mSoWlxqi2QaUc=", + "requires": { + "ee-first": "1.1.1" + } }, "once": { "version": "1.4.0", - "from": "once@>=1.3.0 <2.0.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz" + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "requires": { + "wrappy": "1" + } + }, + "onetime": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.0.tgz", + "integrity": "sha512-5NcSkPHhwTVFIQN+TUqXoS5+dlElHXdpAWu9I0HP20YOtIi+aZ0Ct82jdlILDxjLEAWwvm+qj1m6aEtsDVmm6Q==", + "dev": true, + "requires": { + "mimic-fn": "^2.1.0" + } + }, + "optionator": { + "version": "0.8.3", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz", + "integrity": "sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==", + "dev": true, + "requires": { + "deep-is": "~0.1.3", + "fast-levenshtein": "~2.0.6", + "levn": "~0.3.0", + "prelude-ls": "~1.1.2", + "type-check": "~0.3.2", + "word-wrap": "~1.2.3" + } + }, + "os-tmpdir": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", + "integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=", + "dev": true }, "p-limit": { "version": "2.2.0", - "from": "p-limit@>=2.2.0 <3.0.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.2.0.tgz" + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.2.0.tgz", + "integrity": "sha1-QXyZQeYCepq8ulCS3SkE4lW1+8I=", + "requires": { + "p-try": "^2.0.0" + } + }, + "p-locate": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", + "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=", + "dev": true, + "requires": { + "p-limit": "^1.1.0" + } }, "p-try": { "version": "2.2.0", - "from": "p-try@>=2.0.0 <3.0.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz" + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha1-yyhoVA4xPWHeWPr741zpAE1VQOY=" + }, + "parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "requires": { + "callsites": "^3.0.0" + } }, "parse-duration": { "version": "0.1.1", - "from": "parse-duration@>=0.1.1 <0.2.0", - "resolved": "https://registry.npmjs.org/parse-duration/-/parse-duration-0.1.1.tgz" + "resolved": "https://registry.npmjs.org/parse-duration/-/parse-duration-0.1.1.tgz", + "integrity": "sha1-ExFN3JiRwezSgANiRFVN5DZHoiY=" + }, + "parse-json": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", + "integrity": "sha1-9ID0BDTvgHQfhGkJn43qGPVaTck=", + "dev": true, + "requires": { + "error-ex": "^1.2.0" + } }, "parse-ms": { "version": "2.1.0", - "from": "parse-ms@>=2.0.0 <3.0.0", - "resolved": "https://registry.npmjs.org/parse-ms/-/parse-ms-2.1.0.tgz" + "resolved": "https://registry.npmjs.org/parse-ms/-/parse-ms-2.1.0.tgz", + "integrity": "sha1-NIVlp1PUOR+lJAKZVrFyy3dTCX0=" }, "parseurl": { "version": "1.3.2", - "from": "parseurl@>=1.3.2 <1.4.0", - "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.2.tgz" + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.2.tgz", + "integrity": "sha1-/CidTtiZMRlGDBViUyYs3I3mW/M=" + }, + "path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=", + "dev": true }, "path-is-absolute": { "version": "1.0.1", - "from": "path-is-absolute@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz" + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=" + }, + "path-is-inside": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/path-is-inside/-/path-is-inside-1.0.2.tgz", + "integrity": "sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM=", + "dev": true + }, + "path-key": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", + "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=", + "dev": true }, "path-parse": { "version": "1.0.6", - "from": "path-parse@>=1.0.5 <2.0.0", - "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz" + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz", + "integrity": "sha1-1i27VnlAXXLEc37FhgDp3c8G0kw=" }, "path-to-regexp": { "version": "0.1.7", - "from": "path-to-regexp@0.1.7", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz" + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", + "integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=" + }, + "path-type": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-2.0.0.tgz", + "integrity": "sha1-8BLMuEFbcJb8LaoQVMPXI4lZTHM=", + "dev": true, + "requires": { + "pify": "^2.0.0" + } }, "pathval": { "version": "1.1.0", - "from": "pathval@>=1.1.0 <2.0.0", "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.0.tgz", + "integrity": "sha1-uULm1L3mUwBe9rcTYd74cn0GReA=", "dev": true }, "performance-now": { "version": "2.1.0", - "from": "performance-now@>=2.1.0 <3.0.0", - "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz" + "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", + "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=" }, "pify": { "version": "4.0.1", - "from": "pify@>=4.0.0 <5.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz" + "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", + "integrity": "sha1-SyzSXFDVmHNcUCkiJP2MbfQeMjE=" + }, + "pkg-dir": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-2.0.0.tgz", + "integrity": "sha1-9tXREJ4Z1j7fQo4L1X4Sd3YVM0s=", + "dev": true, + "requires": { + "find-up": "^2.1.0" + } }, "pngcrush": { "version": "0.0.3", - "from": "pngcrush@0.0.3", - "resolved": "https://registry.npmjs.org/pngcrush/-/pngcrush-0.0.3.tgz" + "resolved": "https://registry.npmjs.org/pngcrush/-/pngcrush-0.0.3.tgz", + "integrity": "sha1-v2dW6s2h+rNJwHdo6AXMEA0o+Tc=", + "requires": { + "gettemporaryfilepath": "=0.0.1" + } + }, + "prelude-ls": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", + "integrity": "sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ=", + "dev": true + }, + "prettier": { + "version": "1.19.1", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-1.19.1.tgz", + "integrity": "sha512-s7PoyDv/II1ObgQunCbB9PdLmUcBZcnWOcxDh7O0N/UwDEsHyqkW+Qh28jW+mVuCdx7gLB0BotYI1Y6uI9iyew==", + "dev": true + }, + "prettier-eslint": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/prettier-eslint/-/prettier-eslint-9.0.1.tgz", + "integrity": "sha512-KZT65QTosSAqBBqmrC+RpXbsMRe7Os2YSR9cAfFbDlyPAopzA/S5bioiZ3rpziNQNSJaOxmtXSx07EQ+o2Dlug==", + "dev": true, + "requires": { + "@typescript-eslint/parser": "^1.10.2", + "common-tags": "^1.4.0", + "core-js": "^3.1.4", + "dlv": "^1.1.0", + "eslint": "^5.0.0", + "indent-string": "^4.0.0", + "lodash.merge": "^4.6.0", + "loglevel-colored-level-prefix": "^1.0.0", + "prettier": "^1.7.0", + "pretty-format": "^23.0.1", + "require-relative": "^0.8.7", + "typescript": "^3.2.1", + "vue-eslint-parser": "^2.0.2" + }, + "dependencies": { + "ansi-escapes": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.2.0.tgz", + "integrity": "sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ==", + "dev": true + }, + "ansi-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", + "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", + "dev": true + }, + "cli-cursor": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-2.1.0.tgz", + "integrity": "sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU=", + "dev": true, + "requires": { + "restore-cursor": "^2.0.0" + } + }, + "eslint": { + "version": "5.16.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-5.16.0.tgz", + "integrity": "sha512-S3Rz11i7c8AA5JPv7xAH+dOyq/Cu/VXHiHXBPOU1k/JAM5dXqQPt3qcrhpHSorXmrpu2g0gkIBVXAqCpzfoZIg==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.0.0", + "ajv": "^6.9.1", + "chalk": "^2.1.0", + "cross-spawn": "^6.0.5", + "debug": "^4.0.1", + "doctrine": "^3.0.0", + "eslint-scope": "^4.0.3", + "eslint-utils": "^1.3.1", + "eslint-visitor-keys": "^1.0.0", + "espree": "^5.0.1", + "esquery": "^1.0.1", + "esutils": "^2.0.2", + "file-entry-cache": "^5.0.1", + "functional-red-black-tree": "^1.0.1", + "glob": "^7.1.2", + "globals": "^11.7.0", + "ignore": "^4.0.6", + "import-fresh": "^3.0.0", + "imurmurhash": "^0.1.4", + "inquirer": "^6.2.2", + "js-yaml": "^3.13.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.3.0", + "lodash": "^4.17.11", + "minimatch": "^3.0.4", + "mkdirp": "^0.5.1", + "natural-compare": "^1.4.0", + "optionator": "^0.8.2", + "path-is-inside": "^1.0.2", + "progress": "^2.0.0", + "regexpp": "^2.0.1", + "semver": "^5.5.1", + "strip-ansi": "^4.0.0", + "strip-json-comments": "^2.0.1", + "table": "^5.2.3", + "text-table": "^0.2.0" + } + }, + "eslint-scope": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-4.0.3.tgz", + "integrity": "sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg==", + "dev": true, + "requires": { + "esrecurse": "^4.1.0", + "estraverse": "^4.1.1" + } + }, + "espree": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-5.0.1.tgz", + "integrity": "sha512-qWAZcWh4XE/RwzLJejfcofscgMc9CamR6Tn1+XRXNzrvUSSbiAjGOI/fggztjIi7y9VLPqnICMIPiGyr8JaZ0A==", + "dev": true, + "requires": { + "acorn": "^6.0.7", + "acorn-jsx": "^5.0.0", + "eslint-visitor-keys": "^1.0.0" + } + }, + "figures": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-2.0.0.tgz", + "integrity": "sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI=", + "dev": true, + "requires": { + "escape-string-regexp": "^1.0.5" + } + }, + "globals": { + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "dev": true + }, + "inquirer": { + "version": "6.5.2", + "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-6.5.2.tgz", + "integrity": "sha512-cntlB5ghuB0iuO65Ovoi8ogLHiWGs/5yNrtUcKjFhSSiVeAIVpD7koaSU9RM8mpXw5YDi9RdYXGQMaOURB7ycQ==", + "dev": true, + "requires": { + "ansi-escapes": "^3.2.0", + "chalk": "^2.4.2", + "cli-cursor": "^2.1.0", + "cli-width": "^2.0.0", + "external-editor": "^3.0.3", + "figures": "^2.0.0", + "lodash": "^4.17.12", + "mute-stream": "0.0.7", + "run-async": "^2.2.0", + "rxjs": "^6.4.0", + "string-width": "^2.1.0", + "strip-ansi": "^5.1.0", + "through": "^2.3.6" + }, + "dependencies": { + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "requires": { + "ansi-regex": "^4.1.0" + } + } + } + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true + }, + "mimic-fn": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-1.2.0.tgz", + "integrity": "sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ==", + "dev": true + }, + "mute-stream": { + "version": "0.0.7", + "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz", + "integrity": "sha1-MHXOk7whuPq0PhvE2n6BFe0ee6s=", + "dev": true + }, + "onetime": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-2.0.1.tgz", + "integrity": "sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ=", + "dev": true, + "requires": { + "mimic-fn": "^1.0.0" + } + }, + "restore-cursor": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz", + "integrity": "sha1-n37ih/gv0ybU/RYpI9YhKe7g368=", + "dev": true, + "requires": { + "onetime": "^2.0.0", + "signal-exit": "^3.0.2" + } + }, + "string-width": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", + "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", + "dev": true, + "requires": { + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^4.0.0" + } + }, + "strip-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", + "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "dev": true, + "requires": { + "ansi-regex": "^3.0.0" + } + }, + "strip-json-comments": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", + "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=", + "dev": true + } + } + }, + "prettier-eslint-cli": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/prettier-eslint-cli/-/prettier-eslint-cli-5.0.0.tgz", + "integrity": "sha512-cei9UbN1aTrz3sQs88CWpvY/10PYTevzd76zoG1tdJ164OhmNTFRKPTOZrutVvscoQWzbnLKkviS3gu5JXwvZg==", + "dev": true, + "requires": { + "arrify": "^2.0.1", + "boolify": "^1.0.0", + "camelcase-keys": "^6.0.0", + "chalk": "^2.4.2", + "common-tags": "^1.8.0", + "core-js": "^3.1.4", + "eslint": "^5.0.0", + "find-up": "^4.1.0", + "get-stdin": "^7.0.0", + "glob": "^7.1.4", + "ignore": "^5.1.2", + "lodash.memoize": "^4.1.2", + "loglevel-colored-level-prefix": "^1.0.0", + "messageformat": "^2.2.1", + "prettier-eslint": "^9.0.0", + "rxjs": "^6.5.2", + "yargs": "^13.2.4" + }, + "dependencies": { + "ansi-escapes": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.2.0.tgz", + "integrity": "sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ==", + "dev": true + }, + "ansi-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", + "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", + "dev": true + }, + "cli-cursor": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-2.1.0.tgz", + "integrity": "sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU=", + "dev": true, + "requires": { + "restore-cursor": "^2.0.0" + } + }, + "eslint": { + "version": "5.16.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-5.16.0.tgz", + "integrity": "sha512-S3Rz11i7c8AA5JPv7xAH+dOyq/Cu/VXHiHXBPOU1k/JAM5dXqQPt3qcrhpHSorXmrpu2g0gkIBVXAqCpzfoZIg==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.0.0", + "ajv": "^6.9.1", + "chalk": "^2.1.0", + "cross-spawn": "^6.0.5", + "debug": "^4.0.1", + "doctrine": "^3.0.0", + "eslint-scope": "^4.0.3", + "eslint-utils": "^1.3.1", + "eslint-visitor-keys": "^1.0.0", + "espree": "^5.0.1", + "esquery": "^1.0.1", + "esutils": "^2.0.2", + "file-entry-cache": "^5.0.1", + "functional-red-black-tree": "^1.0.1", + "glob": "^7.1.2", + "globals": "^11.7.0", + "ignore": "^4.0.6", + "import-fresh": "^3.0.0", + "imurmurhash": "^0.1.4", + "inquirer": "^6.2.2", + "js-yaml": "^3.13.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.3.0", + "lodash": "^4.17.11", + "minimatch": "^3.0.4", + "mkdirp": "^0.5.1", + "natural-compare": "^1.4.0", + "optionator": "^0.8.2", + "path-is-inside": "^1.0.2", + "progress": "^2.0.0", + "regexpp": "^2.0.1", + "semver": "^5.5.1", + "strip-ansi": "^4.0.0", + "strip-json-comments": "^2.0.1", + "table": "^5.2.3", + "text-table": "^0.2.0" + }, + "dependencies": { + "ignore": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz", + "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==", + "dev": true + } + } + }, + "eslint-scope": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-4.0.3.tgz", + "integrity": "sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg==", + "dev": true, + "requires": { + "esrecurse": "^4.1.0", + "estraverse": "^4.1.1" + } + }, + "espree": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-5.0.1.tgz", + "integrity": "sha512-qWAZcWh4XE/RwzLJejfcofscgMc9CamR6Tn1+XRXNzrvUSSbiAjGOI/fggztjIi7y9VLPqnICMIPiGyr8JaZ0A==", + "dev": true, + "requires": { + "acorn": "^6.0.7", + "acorn-jsx": "^5.0.0", + "eslint-visitor-keys": "^1.0.0" + } + }, + "figures": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-2.0.0.tgz", + "integrity": "sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI=", + "dev": true, + "requires": { + "escape-string-regexp": "^1.0.5" + } + }, + "find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "requires": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + } + }, + "get-stdin": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-7.0.0.tgz", + "integrity": "sha512-zRKcywvrXlXsA0v0i9Io4KDRaAw7+a1ZpjRwl9Wox8PFlVCCHra7E9c4kqXCoCM9nR5tBkaTTZRBoCm60bFqTQ==", + "dev": true + }, + "globals": { + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "dev": true + }, + "ignore": { + "version": "5.1.4", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.1.4.tgz", + "integrity": "sha512-MzbUSahkTW1u7JpKKjY7LCARd1fU5W2rLdxlM4kdkayuCwZImjkpluF9CM1aLewYJguPDqewLam18Y6AU69A8A==", + "dev": true + }, + "inquirer": { + "version": "6.5.2", + "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-6.5.2.tgz", + "integrity": "sha512-cntlB5ghuB0iuO65Ovoi8ogLHiWGs/5yNrtUcKjFhSSiVeAIVpD7koaSU9RM8mpXw5YDi9RdYXGQMaOURB7ycQ==", + "dev": true, + "requires": { + "ansi-escapes": "^3.2.0", + "chalk": "^2.4.2", + "cli-cursor": "^2.1.0", + "cli-width": "^2.0.0", + "external-editor": "^3.0.3", + "figures": "^2.0.0", + "lodash": "^4.17.12", + "mute-stream": "0.0.7", + "run-async": "^2.2.0", + "rxjs": "^6.4.0", + "string-width": "^2.1.0", + "strip-ansi": "^5.1.0", + "through": "^2.3.6" + }, + "dependencies": { + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "requires": { + "ansi-regex": "^4.1.0" + } + } + } + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true + }, + "locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "requires": { + "p-locate": "^4.1.0" + } + }, + "mimic-fn": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-1.2.0.tgz", + "integrity": "sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ==", + "dev": true + }, + "mute-stream": { + "version": "0.0.7", + "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz", + "integrity": "sha1-MHXOk7whuPq0PhvE2n6BFe0ee6s=", + "dev": true + }, + "onetime": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-2.0.1.tgz", + "integrity": "sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ=", + "dev": true, + "requires": { + "mimic-fn": "^1.0.0" + } + }, + "p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "requires": { + "p-limit": "^2.2.0" + } + }, + "path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true + }, + "restore-cursor": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz", + "integrity": "sha1-n37ih/gv0ybU/RYpI9YhKe7g368=", + "dev": true, + "requires": { + "onetime": "^2.0.0", + "signal-exit": "^3.0.2" + } + }, + "string-width": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", + "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", + "dev": true, + "requires": { + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^4.0.0" + } + }, + "strip-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", + "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "dev": true, + "requires": { + "ansi-regex": "^3.0.0" + } + }, + "strip-json-comments": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", + "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=", + "dev": true + } + } + }, + "prettier-linter-helpers": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/prettier-linter-helpers/-/prettier-linter-helpers-1.0.0.tgz", + "integrity": "sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w==", + "dev": true, + "requires": { + "fast-diff": "^1.1.2" + } + }, + "pretty-format": { + "version": "23.6.0", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-23.6.0.tgz", + "integrity": "sha512-zf9NV1NSlDLDjycnwm6hpFATCGl/K1lt0R/GdkAK2O5LN/rwJoB+Mh93gGJjut4YbmecbfgLWVGSTCr0Ewvvbw==", + "dev": true, + "requires": { + "ansi-regex": "^3.0.0", + "ansi-styles": "^3.2.0" + }, + "dependencies": { + "ansi-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", + "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", + "dev": true + } + } }, "pretty-ms": { "version": "4.0.0", - "from": "pretty-ms@>=4.0.0 <5.0.0", - "resolved": "https://registry.npmjs.org/pretty-ms/-/pretty-ms-4.0.0.tgz" + "resolved": "https://registry.npmjs.org/pretty-ms/-/pretty-ms-4.0.0.tgz", + "integrity": "sha1-Mbr0G5T9AiJwmKqgO9YmCOsNbpI=", + "requires": { + "parse-ms": "^2.0.0" + } }, "process-nextick-args": { "version": "2.0.0", - "from": "process-nextick-args@>=2.0.0 <2.1.0", - "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.0.tgz" + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.0.tgz", + "integrity": "sha1-o31zL0JxtKsa0HDTVQjoKQeI/6o=" + }, + "progress": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", + "integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==", + "dev": true }, "prom-client": { "version": "11.5.1", - "from": "prom-client@>=11.1.3 <12.0.0", - "resolved": "https://registry.npmjs.org/prom-client/-/prom-client-11.5.1.tgz" + "resolved": "https://registry.npmjs.org/prom-client/-/prom-client-11.5.1.tgz", + "integrity": "sha1-FcZsrN7EUwELz68EEJvMNOa92pw=", + "requires": { + "tdigest": "^0.1.1" + } }, "protobufjs": { "version": "6.8.8", - "from": "protobufjs@>=6.8.6 <6.9.0", "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.8.8.tgz", + "integrity": "sha1-yLTxKC/XqQ5vWxCe0RyEr4KQjnw=", + "requires": { + "@protobufjs/aspromise": "^1.1.2", + "@protobufjs/base64": "^1.1.2", + "@protobufjs/codegen": "^2.0.4", + "@protobufjs/eventemitter": "^1.1.0", + "@protobufjs/fetch": "^1.1.0", + "@protobufjs/float": "^1.0.2", + "@protobufjs/inquire": "^1.1.0", + "@protobufjs/path": "^1.1.2", + "@protobufjs/pool": "^1.1.0", + "@protobufjs/utf8": "^1.1.0", + "@types/long": "^4.0.0", + "@types/node": "^10.1.0", + "long": "^4.0.0" + }, "dependencies": { "@types/node": { "version": "10.14.9", - "from": "@types/node@>=10.1.0 <11.0.0", - "resolved": "https://registry.npmjs.org/@types/node/-/node-10.14.9.tgz" + "resolved": "https://registry.npmjs.org/@types/node/-/node-10.14.9.tgz", + "integrity": "sha1-Lo1ngDnSeUPOU6GRM4YTMif9kGY=" } } }, "proxy-addr": { "version": "2.0.4", - "from": "proxy-addr@>=2.0.3 <2.1.0", - "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.4.tgz" + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.4.tgz", + "integrity": "sha1-7PxzO/Iv+Mb0B/onUye5q2fki5M=", + "requires": { + "forwarded": "~0.1.2", + "ipaddr.js": "1.8.0" + } }, "psl": { "version": "1.1.32", - "from": "psl@>=1.1.24 <2.0.0", - "resolved": "https://registry.npmjs.org/psl/-/psl-1.1.32.tgz" + "resolved": "https://registry.npmjs.org/psl/-/psl-1.1.32.tgz", + "integrity": "sha1-PxMnF88vnBaXJLK2yvNzz2lBmNs=" }, "punycode": { "version": "1.3.2", - "from": "punycode@1.3.2", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz" + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", + "integrity": "sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0=" }, "qs": { "version": "6.5.2", - "from": "qs@6.5.2", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz" + "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", + "integrity": "sha1-yzroBuh0BERYTvFUzo7pjUA/PjY=" }, "querystring": { "version": "0.2.0", - "from": "querystring@0.2.0", - "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz" + "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", + "integrity": "sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=" + }, + "quick-lru": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-4.0.1.tgz", + "integrity": "sha512-ARhCpm70fzdcvNQfPoy49IaanKkTlRWF2JMzqhcJbhSFRZv7nPTvZJdcY7301IPmvW+/p0RgIWnQDLJxifsQ7g==", + "dev": true + }, + "ramda": { + "version": "0.26.1", + "resolved": "https://registry.npmjs.org/ramda/-/ramda-0.26.1.tgz", + "integrity": "sha512-hLWjpy7EnsDBb0p+Z3B7rPi3GDeRG5ZtiI33kJhTt+ORCd38AbAIjB/9zRIUoeTbE/AVX5ZkU7m6bznsvrf8eQ==", + "dev": true }, "range-parser": { "version": "1.2.0", - "from": "range-parser@>=1.0.2 <2.0.0", - "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz" + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz", + "integrity": "sha1-9JvmtIeJTdxA3MlKMi9hEJLgDV4=" }, "raven": { "version": "1.1.3", - "from": "raven@1.1.3", "resolved": "https://registry.npmjs.org/raven/-/raven-1.1.3.tgz", + "integrity": "sha1-QnPBrm005CMPUbLAEEGjK5Iygio=", + "requires": { + "cookie": "0.3.1", + "json-stringify-safe": "5.0.1", + "lsmod": "1.0.0", + "stack-trace": "0.0.9", + "uuid": "3.0.0" + }, "dependencies": { "uuid": { "version": "3.0.0", - "from": "uuid@3.0.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.0.0.tgz" + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.0.0.tgz", + "integrity": "sha1-Zyj8BFnEUNeWqZwxg3VpvfZy1yg=" } } }, "raw-body": { "version": "2.3.3", - "from": "raw-body@2.3.3", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.3.3.tgz" + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.3.3.tgz", + "integrity": "sha1-GzJOzmtXBuFThVvBFIxlu39uoMM=", + "requires": { + "bytes": "3.0.0", + "http-errors": "1.6.3", + "iconv-lite": "0.4.23", + "unpipe": "1.0.0" + } + }, + "read-pkg": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-2.0.0.tgz", + "integrity": "sha1-jvHAYjxqbbDcZxPEv6xGMysjaPg=", + "dev": true, + "requires": { + "load-json-file": "^2.0.0", + "normalize-package-data": "^2.3.2", + "path-type": "^2.0.0" + } + }, + "read-pkg-up": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-2.0.0.tgz", + "integrity": "sha1-a3KoBImE4MQeeVEP1en6mbO1Sb4=", + "dev": true, + "requires": { + "find-up": "^2.0.0", + "read-pkg": "^2.0.0" + } }, "readable-stream": { "version": "1.0.34", - "from": "readable-stream@>=1.0.2 <1.1.0", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz", + "integrity": "sha1-Elgg40vIQtLyqq+v5MKRbuMsFXw=", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + }, "dependencies": { "isarray": { "version": "0.0.1", - "from": "isarray@0.0.1", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" } } }, "recluster": { "version": "0.3.7", - "from": "recluster@>=0.3.7 <0.4.0", - "resolved": "https://registry.npmjs.org/recluster/-/recluster-0.3.7.tgz" + "resolved": "https://registry.npmjs.org/recluster/-/recluster-0.3.7.tgz", + "integrity": "sha1-aKRx3ZC2obl3ZjTPdpZAWutWeJU=" + }, + "regexpp": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-2.0.1.tgz", + "integrity": "sha512-lv0M6+TkDVniA3aD1Eg0DVpfU/booSu7Eev3TDO/mZKHBfVjgCGTV4t4buppESEYDtkArYFOxTJWv6S5C+iaNw==", + "dev": true }, "request": { "version": "2.14.0", - "from": "request@2.14.0", "resolved": "https://registry.npmjs.org/request/-/request-2.14.0.tgz", + "integrity": "sha1-DYrLsLFMGrguAAt9OB+oyA0afYg=", + "requires": { + "form-data": "~0.0.3", + "mime": "~1.2.7" + }, "dependencies": { "form-data": { "version": "0.0.7", - "from": "form-data@~0.0.3", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-0.0.7.tgz", + "integrity": "sha1-chEYKiaiZs45cQ3IvEqBtwQIWb4=", + "requires": { + "async": "~0.1.9", + "combined-stream": "~0.0.4", + "mime": "~1.2.2" + }, "dependencies": { "async": { "version": "0.1.22", - "from": "async@~0.1.9" + "resolved": "https://registry.npmjs.org/async/-/async-0.1.22.tgz", + "integrity": "sha1-D8GqoIig4+8Ovi2IMbqw3PiEUGE=" }, "combined-stream": { "version": "0.0.4", - "from": "combined-stream@~0.0.4", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-0.0.4.tgz", + "integrity": "sha1-LRpDNH2+lRWkonlnMuW4hHOECyI=", + "requires": { + "delayed-stream": "0.0.5" + }, "dependencies": { "delayed-stream": { "version": "0.0.5", - "from": "delayed-stream@0.0.5" + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz", + "integrity": "sha1-1LH0OpPoKW3+AmlPRoC8N6MTxz8=" } } } @@ -1583,363 +4250,1013 @@ }, "mime": { "version": "1.2.9", - "from": "mime@~1.2.7" + "resolved": "https://registry.npmjs.org/mime/-/mime-1.2.9.tgz", + "integrity": "sha1-AJzUCGe9Nd5SGzuWbwTi+NTRPQk=" } } }, + "require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=", + "dev": true + }, "require-in-the-middle": { "version": "4.0.0", - "from": "require-in-the-middle@>=4.0.0 <5.0.0", "resolved": "https://registry.npmjs.org/require-in-the-middle/-/require-in-the-middle-4.0.0.tgz", + "integrity": "sha1-PHUoik7EgM30S8d950T4q+WFQFs=", + "requires": { + "debug": "^4.1.1", + "module-details-from-path": "^1.0.3", + "resolve": "^1.10.0" + }, "dependencies": { "debug": { "version": "4.1.1", - "from": "debug@>=4.1.1 <5.0.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz" + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha1-O3ImAlUQnGtYnO4FDx1RYTlmR5E=", + "requires": { + "ms": "^2.1.1" + } }, "ms": { "version": "2.1.2", - "from": "ms@^2.1.1", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz" + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha1-0J0fNXtEP0kzgqjrPM0YOHKuYAk=" } } }, "require-like": { "version": "0.1.2", - "from": "require-like@0.1.2", "resolved": "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz", + "integrity": "sha1-rW8wwTvs15cBDEaK+ndcDAprR/o=", + "dev": true + }, + "require-main-filename": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", + "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", + "dev": true + }, + "require-relative": { + "version": "0.8.7", + "resolved": "https://registry.npmjs.org/require-relative/-/require-relative-0.8.7.tgz", + "integrity": "sha1-eZlTn8ngR6N5KPoZb44VY9q9Nt4=", "dev": true }, "resolve": { "version": "1.11.0", - "from": "resolve@>=1.10.0 <2.0.0", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.11.0.tgz" + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.11.0.tgz", + "integrity": "sha1-QBSHC6KWF2uGND1Qtg87UGCc4jI=", + "requires": { + "path-parse": "^1.0.6" + } + }, + "resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true }, "response": { "version": "0.14.0", - "from": "response@0.14.0", "resolved": "https://registry.npmjs.org/response/-/response-0.14.0.tgz", + "integrity": "sha1-BmNS/z5rAm0EdYCUB2Y7Rob9JpY=", + "requires": { + "best-encoding": "^0.1.1", + "bl": "~0.7.0", + "caseless": "^0.3.0", + "mime": "~1.2.11" + }, "dependencies": { "mime": { "version": "1.2.11", - "from": "mime@>=1.2.11 <1.3.0", - "resolved": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz" + "resolved": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz", + "integrity": "sha1-WCA+7Ybjpe8XrtK32evUfwpg3RA=" } } }, + "restore-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz", + "integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==", + "dev": true, + "requires": { + "onetime": "^5.1.0", + "signal-exit": "^3.0.2" + } + }, "retry-axios": { "version": "0.3.2", - "from": "retry-axios@0.3.2", - "resolved": "https://registry.npmjs.org/retry-axios/-/retry-axios-0.3.2.tgz" + "resolved": "https://registry.npmjs.org/retry-axios/-/retry-axios-0.3.2.tgz", + "integrity": "sha1-V1fID1hbTMTEmGqi/9R6YMbTXhM=" }, "retry-request": { "version": "4.0.0", - "from": "retry-request@>=4.0.0 <5.0.0", - "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-4.0.0.tgz" + "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-4.0.0.tgz", + "integrity": "sha1-XDZhZiebPhDp16oTJ0RnoFy2kpA=", + "requires": { + "through2": "^2.0.0" + } }, "rimraf": { "version": "2.2.8", - "from": "rimraf@2.2.8", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.2.8.tgz" + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.2.8.tgz", + "integrity": "sha1-5Dm+Kq7jJzIZUnMPmaiSnk/FBYI=" + }, + "run-async": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/run-async/-/run-async-2.3.0.tgz", + "integrity": "sha1-A3GrSuC91yDUFm19/aZP96RFpsA=", + "dev": true, + "requires": { + "is-promise": "^2.1.0" + } + }, + "rxjs": { + "version": "6.5.3", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-6.5.3.tgz", + "integrity": "sha512-wuYsAYYFdWTAnAaPoKGNhfpWwKZbJW+HgAJ+mImp+Epl7BG8oNWBCTyRM8gba9k4lk8BgWdoYm21Mo/RYhhbgA==", + "dev": true, + "requires": { + "tslib": "^1.9.0" + } }, "safe-buffer": { "version": "5.1.1", - "from": "safe-buffer@5.1.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.1.tgz" + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.1.tgz", + "integrity": "sha1-iTMSr2myEj3vcfV4iQAWce6yyFM=" }, "safe-json-stringify": { "version": "1.2.0", - "from": "safe-json-stringify@>=1.0.0 <2.0.0", "resolved": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.2.0.tgz", + "integrity": "sha1-NW5EvJjx+TzkXfFLzXwBzahuCv0=", "optional": true }, "safer-buffer": { "version": "2.1.2", - "from": "safer-buffer@>=2.1.2 <3.0.0", - "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz" + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha1-RPoWGwGHuVSd2Eu5GAL5vYOFzWo=" }, "sandboxed-module": { "version": "2.0.3", - "from": "sandboxed-module@latest", "resolved": "https://registry.npmjs.org/sandboxed-module/-/sandboxed-module-2.0.3.tgz", - "dev": true + "integrity": "sha1-x+VFkzm7y6KMUwPusz9ug4e/upY=", + "dev": true, + "requires": { + "require-like": "0.1.2", + "stack-trace": "0.0.9" + } }, "sax": { "version": "1.2.1", - "from": "sax@1.2.1", - "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz" + "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz", + "integrity": "sha1-e45lYZCyKOgaZq6nSEgNgozS03o=" }, "semver": { "version": "6.1.1", - "from": "semver@>=6.0.0 <7.0.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.1.1.tgz" + "resolved": "https://registry.npmjs.org/semver/-/semver-6.1.1.tgz", + "integrity": "sha1-U/U9qbMLIQPNTxXqs6GOy8shDJs=" }, "send": { "version": "0.16.2", - "from": "send@0.16.2", "resolved": "https://registry.npmjs.org/send/-/send-0.16.2.tgz", + "integrity": "sha1-bsyh4PjBVtFBWXVZhI32RzCmu8E=", + "requires": { + "debug": "2.6.9", + "depd": "~1.1.2", + "destroy": "~1.0.4", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "fresh": "0.5.2", + "http-errors": "~1.6.2", + "mime": "1.4.1", + "ms": "2.0.0", + "on-finished": "~2.3.0", + "range-parser": "~1.2.0", + "statuses": "~1.4.0" + }, "dependencies": { "statuses": { "version": "1.4.0", - "from": "statuses@>=1.4.0 <1.5.0", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.4.0.tgz" + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.4.0.tgz", + "integrity": "sha1-u3PURtonlhBu/MG2AaJT1sRr0Ic=" } } }, "serve-static": { "version": "1.13.2", - "from": "serve-static@1.13.2", - "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.13.2.tgz" + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.13.2.tgz", + "integrity": "sha1-CV6Ecv1bRiN9tQzkhqQ/S4bGzsE=", + "requires": { + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "parseurl": "~1.3.2", + "send": "0.16.2" + } + }, + "set-blocking": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", + "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=", + "dev": true }, "setprototypeof": { "version": "1.1.0", - "from": "setprototypeof@1.1.0", - "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.0.tgz" + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.0.tgz", + "integrity": "sha1-0L2FU2iHtv58DYGMuWLZ2RxU5lY=" }, "settings-sharelatex": { "version": "1.1.0", - "from": "settings-sharelatex@1.1.0", "resolved": "https://registry.npmjs.org/settings-sharelatex/-/settings-sharelatex-1.1.0.tgz", + "integrity": "sha1-Tv4vUpPbjxwVlnEEx5BfqHD/mS0=", + "requires": { + "coffee-script": "1.6.0" + }, "dependencies": { "coffee-script": { "version": "1.6.0", - "from": "coffee-script@1.6.0", - "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz" + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz", + "integrity": "sha1-gIs5bhEPU9AhoZpO8fZb4OjjX6M=" } } }, + "shebang-command": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", + "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=", + "dev": true, + "requires": { + "shebang-regex": "^1.0.0" + } + }, + "shebang-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", + "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=", + "dev": true + }, "shimmer": { "version": "1.2.1", - "from": "shimmer@>=1.2.0 <2.0.0", - "resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.2.1.tgz" + "resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.2.1.tgz", + "integrity": "sha1-YQhZ994ye1h+/r9QH7QxF/mv8zc=" + }, + "signal-exit": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz", + "integrity": "sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0=", + "dev": true }, "sinon": { "version": "7.1.1", - "from": "sinon@7.1.1", "resolved": "https://registry.npmjs.org/sinon/-/sinon-7.1.1.tgz", - "dev": true + "integrity": "sha1-EgLzF6oU2Ty5tp/1C2vUnA4F/8k=", + "dev": true, + "requires": { + "@sinonjs/commons": "^1.2.0", + "@sinonjs/formatio": "^3.0.0", + "@sinonjs/samsam": "^2.1.2", + "diff": "^3.5.0", + "lodash.get": "^4.4.2", + "lolex": "^3.0.0", + "nise": "^1.4.6", + "supports-color": "^5.5.0", + "type-detect": "^4.0.8" + } + }, + "slice-ansi": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-2.1.0.tgz", + "integrity": "sha512-Qu+VC3EwYLldKa1fCxuuvULvSJOKEgk9pi8dZeCVK7TqBfUNTH4sFkk4joj8afVSfAYgJoSOetjx9QWOJ5mYoQ==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.0", + "astral-regex": "^1.0.0", + "is-fullwidth-code-point": "^2.0.0" + }, + "dependencies": { + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true + } + } }, "sntp": { "version": "0.1.4", - "from": "sntp@>=0.1.0 <0.2.0", - "resolved": "https://registry.npmjs.org/sntp/-/sntp-0.1.4.tgz" + "resolved": "https://registry.npmjs.org/sntp/-/sntp-0.1.4.tgz", + "integrity": "sha1-XvSBuVGnspr/30r9fyaDj8ESD4Q=", + "requires": { + "hoek": "0.7.x" + } }, "source-map": { "version": "0.6.1", - "from": "source-map@>=0.6.1 <0.7.0", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz" + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha1-dHIq8y6WFOnCh6jQu95IteLxomM=" + }, + "spdx-correct": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.0.tgz", + "integrity": "sha512-lr2EZCctC2BNR7j7WzJ2FpDznxky1sjfxvvYEyzxNyb6lZXHODmEoJeFu4JupYlkfha1KZpJyoqiJ7pgA1qq8Q==", + "dev": true, + "requires": { + "spdx-expression-parse": "^3.0.0", + "spdx-license-ids": "^3.0.0" + } + }, + "spdx-exceptions": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.2.0.tgz", + "integrity": "sha512-2XQACfElKi9SlVb1CYadKDXvoajPgBVPn/gOQLrTvHdElaVhr7ZEbqJaRnJLVNeaI4cMEAgVCeBMKF6MWRDCRA==", + "dev": true + }, + "spdx-expression-parse": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.0.tgz", + "integrity": "sha512-Yg6D3XpRD4kkOmTpdgbUiEJFKghJH03fiC1OPll5h/0sO6neh2jqRDVHOQ4o/LMea0tgCkbMgea5ip/e+MkWyg==", + "dev": true, + "requires": { + "spdx-exceptions": "^2.1.0", + "spdx-license-ids": "^3.0.0" + } + }, + "spdx-license-ids": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.5.tgz", + "integrity": "sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q==", + "dev": true }, "split": { "version": "1.0.1", - "from": "split@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz" + "resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz", + "integrity": "sha1-YFvZvjA6pZ+zX5Ip++oN3snqB9k=", + "requires": { + "through": "2" + } + }, + "sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=", + "dev": true }, "sshpk": { "version": "1.16.1", - "from": "sshpk@>=1.7.0 <2.0.0", - "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz" + "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz", + "integrity": "sha1-+2YcC+8ps520B2nuOfpwCT1vaHc=", + "requires": { + "asn1": "~0.2.3", + "assert-plus": "^1.0.0", + "bcrypt-pbkdf": "^1.0.0", + "dashdash": "^1.12.0", + "ecc-jsbn": "~0.1.1", + "getpass": "^0.1.1", + "jsbn": "~0.1.0", + "safer-buffer": "^2.0.2", + "tweetnacl": "~0.14.0" + } }, "stack-trace": { "version": "0.0.9", - "from": "stack-trace@0.0.9", - "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.9.tgz" + "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.9.tgz", + "integrity": "sha1-qPbq7KkGdMMz58Q5U/J1tFFRBpU=" }, "statsd-parser": { "version": "0.0.4", - "from": "statsd-parser@>=0.0.4 <0.1.0", - "resolved": "https://registry.npmjs.org/statsd-parser/-/statsd-parser-0.0.4.tgz" + "resolved": "https://registry.npmjs.org/statsd-parser/-/statsd-parser-0.0.4.tgz", + "integrity": "sha1-y9JDlTzELv/VSLXSI4jtaJ7GOb0=" }, "statuses": { "version": "1.5.0", - "from": "statuses@>=1.4.0 <2.0.0", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz" + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", + "integrity": "sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=" }, "stream-browserify": { "version": "2.0.1", - "from": "stream-browserify@>=2.0.1 <3.0.0", "resolved": "https://registry.npmjs.org/stream-browserify/-/stream-browserify-2.0.1.tgz", + "integrity": "sha1-ZiZu5fm9uZQKTkUUyvtDu3Hlyds=", + "requires": { + "inherits": "~2.0.1", + "readable-stream": "^2.0.2" + }, "dependencies": { "readable-stream": { "version": "2.3.6", - "from": "readable-stream@>=2.0.2 <3.0.0", - "resolved": "http://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz" + "resolved": "http://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", + "integrity": "sha1-sRwn2IuP8fvgcGQ8+UsMea4bCq8=", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } }, "string_decoder": { "version": "1.1.1", - "from": "string_decoder@>=1.1.1 <1.2.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz" + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha1-nPFhG6YmhdcDCunkujQUnDrwP8g=", + "requires": { + "safe-buffer": "~5.1.0" + } } } }, "stream-buffers": { "version": "0.2.6", - "from": "stream-buffers@>=0.2.5 <0.3.0", - "resolved": "https://registry.npmjs.org/stream-buffers/-/stream-buffers-0.2.6.tgz" + "resolved": "https://registry.npmjs.org/stream-buffers/-/stream-buffers-0.2.6.tgz", + "integrity": "sha1-GBwI1bs2kARfaUAbmuanoM8zE/w=" }, "stream-counter": { "version": "1.0.0", - "from": "stream-counter@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/stream-counter/-/stream-counter-1.0.0.tgz" + "resolved": "https://registry.npmjs.org/stream-counter/-/stream-counter-1.0.0.tgz", + "integrity": "sha1-kc8lac5NxQYf6816yyY5SloRR1E=" }, "stream-shift": { "version": "1.0.0", - "from": "stream-shift@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.0.tgz" + "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.0.tgz", + "integrity": "sha1-1cdSgl5TZ+eG944Y5EXqIjoVWVI=" + }, + "string-width": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", + "integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==", + "dev": true, + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.0" + }, + "dependencies": { + "strip-ansi": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", + "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", + "dev": true, + "requires": { + "ansi-regex": "^5.0.0" + } + } + } + }, + "string.prototype.trimleft": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/string.prototype.trimleft/-/string.prototype.trimleft-2.1.0.tgz", + "integrity": "sha512-FJ6b7EgdKxxbDxc79cOlok6Afd++TTs5szo+zJTUyow3ycrRfJVE2pq3vcN53XexvKZu/DJMDfeI/qMiZTrjTw==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "function-bind": "^1.1.1" + } + }, + "string.prototype.trimright": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/string.prototype.trimright/-/string.prototype.trimright-2.1.0.tgz", + "integrity": "sha512-fXZTSV55dNBwv16uw+hh5jkghxSnc5oHq+5K/gXgizHwAvMetdAJlHqqoFC1FSDVPYWLkAKl2cxpUT41sV7nSg==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "function-bind": "^1.1.1" + } }, "string_decoder": { "version": "0.10.31", - "from": "string_decoder@>=0.10.0 <0.11.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=" + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "requires": { + "ansi-regex": "^4.1.0" + }, + "dependencies": { + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + } + } + }, + "strip-bom": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", + "integrity": "sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=", + "dev": true + }, + "strip-json-comments": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.0.1.tgz", + "integrity": "sha512-VTyMAUfdm047mwKl+u79WIdrZxtFtn+nBxHeb844XBQ9uMNTuTHdx2hc5RiAJYqwTj3wc/xe5HLSdJSkJ+WfZw==", + "dev": true }, "supports-color": { "version": "5.5.0", - "from": "supports-color@>=5.5.0 <6.0.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "dev": true + "integrity": "sha1-4uaaRKyHcveKHsCzW2id9lMO/I8=", + "dev": true, + "requires": { + "has-flag": "^3.0.0" + } + }, + "table": { + "version": "5.4.6", + "resolved": "https://registry.npmjs.org/table/-/table-5.4.6.tgz", + "integrity": "sha512-wmEc8m4fjnob4gt5riFRtTu/6+4rSe12TpAELNSqHMfF3IqnA+CH37USM6/YR3qRZv7e56kAEAtd6nKZaxe0Ug==", + "dev": true, + "requires": { + "ajv": "^6.10.2", + "lodash": "^4.17.14", + "slice-ansi": "^2.1.0", + "string-width": "^3.0.0" + }, + "dependencies": { + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + } + } + } }, "tdigest": { "version": "0.1.1", - "from": "tdigest@>=0.1.1 <0.2.0", - "resolved": "https://registry.npmjs.org/tdigest/-/tdigest-0.1.1.tgz" + "resolved": "https://registry.npmjs.org/tdigest/-/tdigest-0.1.1.tgz", + "integrity": "sha1-Ljyyw56kSeVdHmzZEReszKRYgCE=", + "requires": { + "bintrees": "1.0.1" + } }, "teeny-request": { "version": "3.11.3", - "from": "teeny-request@>=3.6.0 <4.0.0", "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-3.11.3.tgz", + "integrity": "sha1-M1xin3ZF5dZZk2LfLzIwxMvCOlU=", + "requires": { + "https-proxy-agent": "^2.2.1", + "node-fetch": "^2.2.0", + "uuid": "^3.3.2" + }, "dependencies": { "uuid": { "version": "3.3.2", - "from": "uuid@^3.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz" + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", + "integrity": "sha1-G0r0lV6zB3xQHCOHL8ZROBFYcTE=" } } }, + "text-table": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", + "integrity": "sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=", + "dev": true + }, "through": { "version": "2.3.8", - "from": "through@>=2.0.0 <3.0.0", - "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz" + "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", + "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=" }, "through2": { "version": "2.0.5", - "from": "through2@>=2.0.3 <3.0.0", "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", + "integrity": "sha1-AcHjnrMdB8t9A6lqcIIyYLIxMs0=", + "requires": { + "readable-stream": "~2.3.6", + "xtend": "~4.0.1" + }, "dependencies": { "readable-stream": { "version": "2.3.6", - "from": "readable-stream@^2.1.5", - "resolved": "http://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz" + "resolved": "http://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", + "integrity": "sha1-sRwn2IuP8fvgcGQ8+UsMea4bCq8=", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } }, "string_decoder": { "version": "1.1.1", - "from": "string_decoder@~1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz" + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha1-nPFhG6YmhdcDCunkujQUnDrwP8g=", + "requires": { + "safe-buffer": "~5.1.0" + } } } }, + "tmp": { + "version": "0.0.33", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz", + "integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==", + "dev": true, + "requires": { + "os-tmpdir": "~1.0.2" + } + }, + "to-fast-properties": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", + "integrity": "sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4=", + "dev": true + }, "tough-cookie": { "version": "2.4.3", - "from": "tough-cookie@>=2.4.3 <2.5.0", "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.4.3.tgz", + "integrity": "sha1-U/Nto/R3g7CSWvoG/587FlKA94E=", + "requires": { + "psl": "^1.1.24", + "punycode": "^1.4.1" + }, "dependencies": { "punycode": { "version": "1.4.1", - "from": "punycode@>=1.4.1 <2.0.0", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz" + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", + "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4=" } } }, + "tslib": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.10.0.tgz", + "integrity": "sha512-qOebF53frne81cf0S9B41ByenJ3/IuH8yJKngAX35CmiZySA0khhkovshKK+jGCaMnVomla7gVlIcc3EvKPbTQ==", + "dev": true + }, "tunnel-agent": { "version": "0.2.0", - "from": "tunnel-agent@>=0.2.0 <0.3.0", - "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.2.0.tgz" + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.2.0.tgz", + "integrity": "sha1-aFPCr7GyEJ5FYp5JK9419Fnqaeg=" }, "tweetnacl": { "version": "0.14.5", - "from": "tweetnacl@>=0.14.0 <0.15.0", - "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz" + "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", + "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=" + }, + "type-check": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", + "integrity": "sha1-WITKtRLPHTVeP7eE8wgEsrUg23I=", + "dev": true, + "requires": { + "prelude-ls": "~1.1.2" + } }, "type-detect": { "version": "4.0.8", - "from": "type-detect@>=4.0.5 <5.0.0", "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha1-dkb7XxiHHPu3dJ5pvTmmOI63RQw=", + "dev": true + }, + "type-fest": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", + "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==", "dev": true }, "type-is": { "version": "1.6.16", - "from": "type-is@>=1.6.16 <1.7.0", - "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.16.tgz" + "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.16.tgz", + "integrity": "sha1-+JzjQVQcZysl7nrjxz3uOyvlAZQ=", + "requires": { + "media-typer": "0.3.0", + "mime-types": "~2.1.18" + } + }, + "typescript": { + "version": "3.7.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.7.3.tgz", + "integrity": "sha512-Mcr/Qk7hXqFBXMN7p7Lusj1ktCBydylfQM/FZCk5glCNQJrCUKPkMHdo9R0MTFWsC/4kPFvDS0fDPvukfCkFsw==", + "dev": true }, "underscore": { "version": "1.5.2", - "from": "underscore@>=1.5.2 <1.6.0", - "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.5.2.tgz" + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.5.2.tgz", + "integrity": "sha1-EzXF5PXm0zu7SwBrqMhqAPVW3gg=" }, "unpipe": { "version": "1.0.0", - "from": "unpipe@1.0.0", - "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz" + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw=" }, "uri-js": { "version": "4.2.2", - "from": "uri-js@>=4.2.2 <5.0.0", "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.2.2.tgz", + "integrity": "sha1-lMVA4f93KVbiKZUHwBCupsiDjrA=", + "requires": { + "punycode": "^2.1.0" + }, "dependencies": { "punycode": { "version": "2.1.1", - "from": "punycode@>=2.1.0 <3.0.0", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz" + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha1-tYsBCsQMIsVldhbI0sLALHv0eew=" } } }, "url": { "version": "0.10.3", - "from": "url@0.10.3", - "resolved": "https://registry.npmjs.org/url/-/url-0.10.3.tgz" + "resolved": "https://registry.npmjs.org/url/-/url-0.10.3.tgz", + "integrity": "sha1-Ah5NnHcF8hu/N9A861h2dAJ3TGQ=", + "requires": { + "punycode": "1.3.2", + "querystring": "0.2.0" + } }, "util-deprecate": { "version": "1.0.2", - "from": "util-deprecate@>=1.0.1 <1.1.0", - "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz" + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" }, "utils-merge": { "version": "1.0.1", - "from": "utils-merge@1.0.1", - "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz" + "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", + "integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=" + }, + "v8-compile-cache": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.1.0.tgz", + "integrity": "sha512-usZBT3PW+LOjM25wbqIlZwPeJV+3OSz3M1k1Ws8snlW39dZyYL9lOGC5FgPVHfk0jKmjiDV8Z0mIbVQPiwFs7g==", + "dev": true + }, + "validate-npm-package-license": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", + "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", + "dev": true, + "requires": { + "spdx-correct": "^3.0.0", + "spdx-expression-parse": "^3.0.0" + } }, "vary": { "version": "1.1.2", - "from": "vary@>=1.1.2 <1.2.0", - "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz" + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", + "integrity": "sha1-IpnwLG3tMNSllhsLn3RSShj2NPw=" }, "verror": { "version": "1.10.0", - "from": "verror@1.10.0", - "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz" + "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", + "integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=", + "requires": { + "assert-plus": "^1.0.0", + "core-util-is": "1.0.2", + "extsprintf": "^1.2.0" + } + }, + "vue-eslint-parser": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/vue-eslint-parser/-/vue-eslint-parser-2.0.3.tgz", + "integrity": "sha512-ZezcU71Owm84xVF6gfurBQUGg8WQ+WZGxgDEQu1IHFBZNx7BFZg3L1yHxrCBNNwbwFtE1GuvfJKMtb6Xuwc/Bw==", + "dev": true, + "requires": { + "debug": "^3.1.0", + "eslint-scope": "^3.7.1", + "eslint-visitor-keys": "^1.0.0", + "espree": "^3.5.2", + "esquery": "^1.0.0", + "lodash": "^4.17.4" + }, + "dependencies": { + "acorn-jsx": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-3.0.1.tgz", + "integrity": "sha1-r9+UiPsezvyDSPb7IvRk4ypYs2s=", + "dev": true, + "requires": { + "acorn": "^3.0.4" + } + }, + "eslint-scope": { + "version": "3.7.3", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-3.7.3.tgz", + "integrity": "sha512-W+B0SvF4gamyCTmUc+uITPY0989iXVfKvhwtmJocTaYoc/3khEHmEmvfY/Gn9HA9VV75jrQECsHizkNw1b68FA==", + "dev": true, + "requires": { + "esrecurse": "^4.1.0", + "estraverse": "^4.1.1" + } + }, + "espree": { + "version": "3.5.4", + "resolved": "https://registry.npmjs.org/espree/-/espree-3.5.4.tgz", + "integrity": "sha512-yAcIQxtmMiB/jL32dzEp2enBeidsB7xWPLNiw3IIkpVds1P+h7qF9YwJq1yUNzp2OKXgAprs4F61ih66UsoD1A==", + "dev": true, + "requires": { + "acorn": "^5.5.0", + "acorn-jsx": "^3.0.0" + } + } + } + }, + "which": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "dev": true, + "requires": { + "isexe": "^2.0.0" + } + }, + "which-module": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz", + "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=", + "dev": true + }, + "word-wrap": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", + "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", + "dev": true + }, + "wrap-ansi": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-5.1.0.tgz", + "integrity": "sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.0", + "string-width": "^3.0.0", + "strip-ansi": "^5.0.0" + }, + "dependencies": { + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + } + } + } }, "wrappy": { "version": "1.0.2", - "from": "wrappy@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" + }, + "write": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/write/-/write-1.0.3.tgz", + "integrity": "sha512-/lg70HAjtkUgWPVZhZcm+T4hkL8Zbtp1nFNOn3lRrxnlv50SRBv7cR7RqR+GMsd3hUXy9hWBo4CHTbFTcOYwig==", + "dev": true, + "requires": { + "mkdirp": "^0.5.1" + } }, "xml2js": { "version": "0.4.19", - "from": "xml2js@0.4.19", - "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.19.tgz" + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.19.tgz", + "integrity": "sha1-aGwg8hMgnpSr8NG88e+qKRx4J6c=", + "requires": { + "sax": ">=0.6.0", + "xmlbuilder": "~9.0.1" + } }, "xmlbuilder": { "version": "9.0.7", - "from": "xmlbuilder@>=9.0.1 <9.1.0", - "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-9.0.7.tgz" + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-9.0.7.tgz", + "integrity": "sha1-Ey7mPS7FVlxVfiD0wi35rKaGsQ0=" }, "xtend": { "version": "4.0.1", - "from": "xtend@>=4.0.1 <4.1.0", - "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz" + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz", + "integrity": "sha1-pcbVMr5lbiPbgg77lDofBJmNY68=" + }, + "y18n": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.0.tgz", + "integrity": "sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w==", + "dev": true }, "yallist": { "version": "3.0.3", - "from": "yallist@>=3.0.2 <4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.0.3.tgz" + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.0.3.tgz", + "integrity": "sha1-tLBJ4xS+VF486AIjbWzSLNkcPek=" + }, + "yargs": { + "version": "13.3.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-13.3.0.tgz", + "integrity": "sha512-2eehun/8ALW8TLoIl7MVaRUrg+yCnenu8B4kBlRxj3GJGDKU1Og7sMXPNm1BYyM1DOJmTZ4YeN/Nwxv+8XJsUA==", + "dev": true, + "requires": { + "cliui": "^5.0.0", + "find-up": "^3.0.0", + "get-caller-file": "^2.0.1", + "require-directory": "^2.1.1", + "require-main-filename": "^2.0.0", + "set-blocking": "^2.0.0", + "string-width": "^3.0.0", + "which-module": "^2.0.0", + "y18n": "^4.0.0", + "yargs-parser": "^13.1.1" + }, + "dependencies": { + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", + "dev": true + }, + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dev": true, + "requires": { + "locate-path": "^3.0.0" + } + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dev": true, + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dev": true, + "requires": { + "p-limit": "^2.0.0" + } + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + } + } + } + }, + "yargs-parser": { + "version": "13.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-13.1.1.tgz", + "integrity": "sha512-oVAVsHz6uFrg3XQheFII8ESO2ssAf9luWuAd6Wexsu4F3OtIW0o8IribPXYrD4WC24LWtPrJlGy87y5udK+dxQ==", + "dev": true, + "requires": { + "camelcase": "^5.0.0", + "decamelize": "^1.2.0" + } } } } diff --git a/services/filestore/package.json b/services/filestore/package.json index bcd8011f10..708656e0d6 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -46,8 +46,22 @@ "underscore": "~1.5.2" }, "devDependencies": { + "babel-eslint": "^10.0.3", "bunyan": "^1.3.5", "chai": "4.2.0", + "eslint": "^6.7.2", + "eslint-config-prettier": "^6.7.0", + "eslint-config-standard": "^14.1.0", + "eslint-plugin-chai-expect": "^2.1.0", + "eslint-plugin-chai-friendly": "^0.5.0", + "eslint-plugin-import": "^2.19.1", + "eslint-plugin-mocha": "^6.2.2", + "eslint-plugin-node": "^10.0.0", + "eslint-plugin-prettier": "^3.1.2", + "eslint-plugin-promise": "^4.2.1", + "eslint-plugin-standard": "^4.0.1", + "prettier-eslint": "^9.0.1", + "prettier-eslint-cli": "^5.0.0", "sandboxed-module": "2.0.3", "sinon": "7.1.1" } From ae3d25c969cba090dd1f319c4f109860fd3625b8 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 16 Dec 2019 10:16:50 +0000 Subject: [PATCH 324/555] Decaffeinate: add eslint and prettier rc files --- services/filestore/.eslintrc | 67 ++++++ services/filestore/.prettierrc | 4 + services/filestore/npm-shrinkwrap.json | 292 +++++++++++++++++++++++++ 3 files changed, 363 insertions(+) create mode 100644 services/filestore/.eslintrc create mode 100644 services/filestore/.prettierrc diff --git a/services/filestore/.eslintrc b/services/filestore/.eslintrc new file mode 100644 index 0000000000..f8776c68a0 --- /dev/null +++ b/services/filestore/.eslintrc @@ -0,0 +1,67 @@ +{ + "extends": [ + "standard", + "prettier", + "prettier/standard", + ], + "plugins": [ + "mocha", + "chai-expect", + "chai-friendly" + ], + "parser": "babel-eslint", + "env": { + "mocha": true + }, + "rules": { + // Swap the no-unused-expressions rule with a more chai-friendly one + "no-unused-expressions": 0, + "chai-friendly/no-unused-expressions": "error" + }, + "overrides": [ + { + // Test specific rules + "files": ["**/test/*/src/**/*.js"], + "globals": { + "expect": true + }, + "rules": { + // mocha-specific rules + "mocha/handle-done-callback": "error", + "mocha/no-exclusive-tests": "error", + "mocha/no-global-tests": "error", + "mocha/no-identical-title": "error", + "mocha/no-nested-tests": "error", + "mocha/no-pending-tests": "error", + "mocha/no-skipped-tests": "error", + "mocha/no-mocha-arrows": "error", + + // chai-specific rules + "chai-expect/missing-assertion": "error", + "chai-expect/terminating-properties": "error", + + // prefer-arrow-callback applies to all callbacks, not just ones in mocha tests. + // we don't enforce this at the top-level - just in tests to manage `this` scope + // based on mocha's context mechanism + "mocha/prefer-arrow-callback": "error" + } + }, + { + // Frontend test specific rules + "files": ["**/test/frontend/**/*.js"], + "globals": { + "expect": true, + "define": true, + "$": true + } + }, + { + // Backend specific rules + "files": ["**/app/src/**/*.js"], + "rules": { + // don't allow console.log in backend code + "no-console": "error" + } + } + ] +} diff --git a/services/filestore/.prettierrc b/services/filestore/.prettierrc new file mode 100644 index 0000000000..b2095be81e --- /dev/null +++ b/services/filestore/.prettierrc @@ -0,0 +1,4 @@ +{ + "semi": false, + "singleQuote": true +} diff --git a/services/filestore/npm-shrinkwrap.json b/services/filestore/npm-shrinkwrap.json index 413c41dca4..64529d81ca 100644 --- a/services/filestore/npm-shrinkwrap.json +++ b/services/filestore/npm-shrinkwrap.json @@ -583,6 +583,14 @@ "requires": { "lodash.unescape": "4.0.1", "semver": "5.5.0" + }, + "dependencies": { + "semver": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.5.0.tgz", + "integrity": "sha512-4SJ3dm0WAwWy/NVeioZh5AntkdJoWKxHxcmyP622fOkgHa4z3R0TdBJICINyaSDE6uNwVc8gZr+ZinwZAH4xIA==", + "dev": true + } } }, "abort-controller": { @@ -1184,6 +1192,14 @@ "semver": "^5.5.0", "shebang-command": "^1.2.0", "which": "^1.2.9" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true + } } }, "cryptiles": { @@ -1485,6 +1501,35 @@ "table": "^5.2.3", "text-table": "^0.2.0", "v8-compile-cache": "^2.0.3" + }, + "dependencies": { + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + }, + "lodash": { + "version": "4.17.15", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", + "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==", + "dev": true + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true + } } }, "eslint-config-prettier": { @@ -1581,6 +1626,15 @@ "esutils": "^2.0.2", "isarray": "^1.0.0" } + }, + "resolve": { + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.13.1.tgz", + "integrity": "sha512-CxqObCX8K8YtAhOBRg+lrcdn+LK+WYOS8tSjqSFbjtrI5PnS63QPhZl4+yKfrU9tdsbMu9Anr/amegT87M9Z6w==", + "dev": true, + "requires": { + "path-parse": "^1.0.6" + } } } }, @@ -1670,6 +1724,14 @@ "acorn": "^7.1.0", "acorn-jsx": "^5.1.0", "eslint-visitor-keys": "^1.1.0" + }, + "dependencies": { + "acorn": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.1.0.tgz", + "integrity": "sha512-kL5CuoXA/dgxlBbVrflsflzQ3PAas7RYZB52NOm/6839iVYJgKMJ3cQJD+t2i5+qFa8h3MDpEOJiS64E8JLnSQ==", + "dev": true + } } }, "esprima": { @@ -1842,6 +1904,17 @@ "chardet": "^0.7.0", "iconv-lite": "^0.4.24", "tmp": "^0.0.33" + }, + "dependencies": { + "iconv-lite": { + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "dev": true, + "requires": { + "safer-buffer": ">= 2.1.2 < 3" + } + } } }, "extsprintf": { @@ -1943,6 +2016,31 @@ "flatted": "^2.0.0", "rimraf": "2.6.3", "write": "1.0.3" + }, + "dependencies": { + "glob": { + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", + "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "rimraf": { + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.3.tgz", + "integrity": "sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA==", + "dev": true, + "requires": { + "glob": "^7.1.3" + } + } } }, "flatted": { @@ -2083,6 +2181,7 @@ "version": "6.0.4", "resolved": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz", "integrity": "sha1-DwiGD2oVUSey+t1PnOJLGqtuTSI=", + "optional": true, "requires": { "inflight": "^1.0.4", "inherits": "2", @@ -2372,6 +2471,14 @@ "string-width": "^4.1.0", "strip-ansi": "^5.1.0", "through": "^2.3.6" + }, + "dependencies": { + "lodash": { + "version": "4.17.15", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", + "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==", + "dev": true + } } }, "ipaddr.js": { @@ -2628,6 +2735,14 @@ "parse-json": "^2.2.0", "pify": "^2.0.0", "strip-bom": "^3.0.0" + }, + "dependencies": { + "pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", + "dev": true + } } }, "locate-path": { @@ -2839,6 +2954,12 @@ "requires": { "ansi-regex": "^2.0.0" } + }, + "supports-color": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz", + "integrity": "sha1-U10EXOa2Nj+kARcIRimZXp3zJMc=", + "dev": true } } }, @@ -2882,6 +3003,15 @@ "dev": true, "requires": { "minimist": "^1.2.0" + }, + "dependencies": { + "minimist": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz", + "integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ=", + "dev": true, + "optional": true + } } }, "map-obj": { @@ -3264,6 +3394,14 @@ "resolve": "^1.10.0", "semver": "2 || 3 || 4 || 5", "validate-npm-package-license": "^3.0.1" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true + } } }, "oauth-sign": { @@ -3367,6 +3505,23 @@ "dev": true, "requires": { "p-limit": "^1.1.0" + }, + "dependencies": { + "p-limit": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz", + "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==", + "dev": true, + "requires": { + "p-try": "^1.0.0" + } + }, + "p-try": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", + "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=", + "dev": true + } } }, "p-try": { @@ -3447,6 +3602,14 @@ "dev": true, "requires": { "pify": "^2.0.0" + }, + "dependencies": { + "pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", + "dev": true + } } }, "pathval": { @@ -3536,6 +3699,15 @@ "restore-cursor": "^2.0.0" } }, + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + }, "eslint": { "version": "5.16.0", "resolved": "https://registry.npmjs.org/eslint/-/eslint-5.16.0.tgz", @@ -3610,6 +3782,20 @@ "escape-string-regexp": "^1.0.5" } }, + "glob": { + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", + "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, "globals": { "version": "11.12.0", "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", @@ -3643,6 +3829,12 @@ "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", "dev": true }, + "lodash": { + "version": "4.17.15", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", + "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==", + "dev": true + }, "strip-ansi": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", @@ -3666,6 +3858,12 @@ "integrity": "sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ==", "dev": true }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, "mute-stream": { "version": "0.0.7", "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz", @@ -3691,6 +3889,12 @@ "signal-exit": "^3.0.2" } }, + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true + }, "string-width": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", @@ -3764,6 +3968,15 @@ "restore-cursor": "^2.0.0" } }, + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + }, "eslint": { "version": "5.16.0", "resolved": "https://registry.npmjs.org/eslint/-/eslint-5.16.0.tgz", @@ -3862,6 +4075,20 @@ "integrity": "sha512-zRKcywvrXlXsA0v0i9Io4KDRaAw7+a1ZpjRwl9Wox8PFlVCCHra7E9c4kqXCoCM9nR5tBkaTTZRBoCm60bFqTQ==", "dev": true }, + "glob": { + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", + "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, "globals": { "version": "11.12.0", "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", @@ -3901,6 +4128,12 @@ "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", "dev": true }, + "lodash": { + "version": "4.17.15", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", + "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==", + "dev": true + }, "strip-ansi": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", @@ -3933,6 +4166,12 @@ "integrity": "sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ==", "dev": true }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, "mute-stream": { "version": "0.0.7", "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz", @@ -3973,6 +4212,12 @@ "signal-exit": "^3.0.2" } }, + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true + }, "string-width": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", @@ -4779,6 +5024,18 @@ "string-width": "^3.0.0" }, "dependencies": { + "ajv": { + "version": "6.10.2", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.10.2.tgz", + "integrity": "sha512-TXtUUEYHuaTEbLZWIKUr5pmBuhDLy+8KYtPYdcV8qC+pOZL+NKqYwvWSRrVXHn+ZmRRAu8vJTAznH7Oag6RVRw==", + "dev": true, + "requires": { + "fast-deep-equal": "^2.0.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + } + }, "emoji-regex": { "version": "7.0.3", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", @@ -4791,6 +5048,12 @@ "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", "dev": true }, + "lodash": { + "version": "4.17.15", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", + "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==", + "dev": true + }, "string-width": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", @@ -5045,6 +5308,12 @@ "lodash": "^4.17.4" }, "dependencies": { + "acorn": { + "version": "5.7.3", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-5.7.3.tgz", + "integrity": "sha512-T/zvzYRfbVojPWahDsE5evJdHb3oJoQfFbsrKM7w5Zcs++Tr257tia3BmMP8XYVjp1S9RZXQMh7gao96BlqZOw==", + "dev": true + }, "acorn-jsx": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-3.0.1.tgz", @@ -5052,6 +5321,23 @@ "dev": true, "requires": { "acorn": "^3.0.4" + }, + "dependencies": { + "acorn": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-3.3.0.tgz", + "integrity": "sha1-ReN/s56No/JbruP/U2niu18iAXo=", + "dev": true + } + } + }, + "debug": { + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", + "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", + "dev": true, + "requires": { + "ms": "^2.1.1" } }, "eslint-scope": { @@ -5073,6 +5359,12 @@ "acorn": "^5.5.0", "acorn-jsx": "^3.0.0" } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true } } }, From 8d2c87420e0c48e477f6b89f397f912bb1d38d00 Mon Sep 17 00:00:00 2001 From: decaffeinate Date: Mon, 16 Dec 2019 10:24:32 +0000 Subject: [PATCH 325/555] decaffeinate: Rename AWSSDKPersistorManager.coffee and 13 other files from .coffee to .js --- .../{AWSSDKPersistorManager.coffee => AWSSDKPersistorManager.js} | 0 .../app/coffee/{BucketController.coffee => BucketController.js} | 0 services/filestore/app/coffee/{Errors.coffee => Errors.js} | 0 .../coffee/{FSPersistorManager.coffee => FSPersistorManager.js} | 0 .../app/coffee/{FileController.coffee => FileController.js} | 0 .../app/coffee/{FileConverter.coffee => FileConverter.js} | 0 .../filestore/app/coffee/{FileHandler.coffee => FileHandler.js} | 0 .../{HealthCheckController.coffee => HealthCheckController.js} | 0 .../app/coffee/{ImageOptimiser.coffee => ImageOptimiser.js} | 0 .../filestore/app/coffee/{KeyBuilder.coffee => KeyBuilder.js} | 0 .../app/coffee/{LocalFileWriter.coffee => LocalFileWriter.js} | 0 .../app/coffee/{PersistorManager.coffee => PersistorManager.js} | 0 .../coffee/{S3PersistorManager.coffee => S3PersistorManager.js} | 0 services/filestore/app/coffee/{SafeExec.coffee => SafeExec.js} | 0 14 files changed, 0 insertions(+), 0 deletions(-) rename services/filestore/app/coffee/{AWSSDKPersistorManager.coffee => AWSSDKPersistorManager.js} (100%) rename services/filestore/app/coffee/{BucketController.coffee => BucketController.js} (100%) rename services/filestore/app/coffee/{Errors.coffee => Errors.js} (100%) rename services/filestore/app/coffee/{FSPersistorManager.coffee => FSPersistorManager.js} (100%) rename services/filestore/app/coffee/{FileController.coffee => FileController.js} (100%) rename services/filestore/app/coffee/{FileConverter.coffee => FileConverter.js} (100%) rename services/filestore/app/coffee/{FileHandler.coffee => FileHandler.js} (100%) rename services/filestore/app/coffee/{HealthCheckController.coffee => HealthCheckController.js} (100%) rename services/filestore/app/coffee/{ImageOptimiser.coffee => ImageOptimiser.js} (100%) rename services/filestore/app/coffee/{KeyBuilder.coffee => KeyBuilder.js} (100%) rename services/filestore/app/coffee/{LocalFileWriter.coffee => LocalFileWriter.js} (100%) rename services/filestore/app/coffee/{PersistorManager.coffee => PersistorManager.js} (100%) rename services/filestore/app/coffee/{S3PersistorManager.coffee => S3PersistorManager.js} (100%) rename services/filestore/app/coffee/{SafeExec.coffee => SafeExec.js} (100%) diff --git a/services/filestore/app/coffee/AWSSDKPersistorManager.coffee b/services/filestore/app/coffee/AWSSDKPersistorManager.js similarity index 100% rename from services/filestore/app/coffee/AWSSDKPersistorManager.coffee rename to services/filestore/app/coffee/AWSSDKPersistorManager.js diff --git a/services/filestore/app/coffee/BucketController.coffee b/services/filestore/app/coffee/BucketController.js similarity index 100% rename from services/filestore/app/coffee/BucketController.coffee rename to services/filestore/app/coffee/BucketController.js diff --git a/services/filestore/app/coffee/Errors.coffee b/services/filestore/app/coffee/Errors.js similarity index 100% rename from services/filestore/app/coffee/Errors.coffee rename to services/filestore/app/coffee/Errors.js diff --git a/services/filestore/app/coffee/FSPersistorManager.coffee b/services/filestore/app/coffee/FSPersistorManager.js similarity index 100% rename from services/filestore/app/coffee/FSPersistorManager.coffee rename to services/filestore/app/coffee/FSPersistorManager.js diff --git a/services/filestore/app/coffee/FileController.coffee b/services/filestore/app/coffee/FileController.js similarity index 100% rename from services/filestore/app/coffee/FileController.coffee rename to services/filestore/app/coffee/FileController.js diff --git a/services/filestore/app/coffee/FileConverter.coffee b/services/filestore/app/coffee/FileConverter.js similarity index 100% rename from services/filestore/app/coffee/FileConverter.coffee rename to services/filestore/app/coffee/FileConverter.js diff --git a/services/filestore/app/coffee/FileHandler.coffee b/services/filestore/app/coffee/FileHandler.js similarity index 100% rename from services/filestore/app/coffee/FileHandler.coffee rename to services/filestore/app/coffee/FileHandler.js diff --git a/services/filestore/app/coffee/HealthCheckController.coffee b/services/filestore/app/coffee/HealthCheckController.js similarity index 100% rename from services/filestore/app/coffee/HealthCheckController.coffee rename to services/filestore/app/coffee/HealthCheckController.js diff --git a/services/filestore/app/coffee/ImageOptimiser.coffee b/services/filestore/app/coffee/ImageOptimiser.js similarity index 100% rename from services/filestore/app/coffee/ImageOptimiser.coffee rename to services/filestore/app/coffee/ImageOptimiser.js diff --git a/services/filestore/app/coffee/KeyBuilder.coffee b/services/filestore/app/coffee/KeyBuilder.js similarity index 100% rename from services/filestore/app/coffee/KeyBuilder.coffee rename to services/filestore/app/coffee/KeyBuilder.js diff --git a/services/filestore/app/coffee/LocalFileWriter.coffee b/services/filestore/app/coffee/LocalFileWriter.js similarity index 100% rename from services/filestore/app/coffee/LocalFileWriter.coffee rename to services/filestore/app/coffee/LocalFileWriter.js diff --git a/services/filestore/app/coffee/PersistorManager.coffee b/services/filestore/app/coffee/PersistorManager.js similarity index 100% rename from services/filestore/app/coffee/PersistorManager.coffee rename to services/filestore/app/coffee/PersistorManager.js diff --git a/services/filestore/app/coffee/S3PersistorManager.coffee b/services/filestore/app/coffee/S3PersistorManager.js similarity index 100% rename from services/filestore/app/coffee/S3PersistorManager.coffee rename to services/filestore/app/coffee/S3PersistorManager.js diff --git a/services/filestore/app/coffee/SafeExec.coffee b/services/filestore/app/coffee/SafeExec.js similarity index 100% rename from services/filestore/app/coffee/SafeExec.coffee rename to services/filestore/app/coffee/SafeExec.js From 6bd8452f19b3a8c82e7330e804cac94c25fd9ae1 Mon Sep 17 00:00:00 2001 From: decaffeinate Date: Mon, 16 Dec 2019 10:24:35 +0000 Subject: [PATCH 326/555] decaffeinate: Convert AWSSDKPersistorManager.coffee and 13 other files to JS --- .../app/coffee/AWSSDKPersistorManager.js | 225 +++++---- .../filestore/app/coffee/BucketController.js | 66 ++- services/filestore/app/coffee/Errors.js | 18 +- .../app/coffee/FSPersistorManager.js | 270 +++++----- .../filestore/app/coffee/FileController.js | 238 +++++---- .../filestore/app/coffee/FileConverter.js | 126 +++-- services/filestore/app/coffee/FileHandler.js | 268 +++++----- .../app/coffee/HealthCheckController.js | 119 +++-- .../filestore/app/coffee/ImageOptimiser.js | 58 ++- services/filestore/app/coffee/KeyBuilder.js | 98 ++-- .../filestore/app/coffee/LocalFileWriter.js | 122 +++-- .../filestore/app/coffee/PersistorManager.js | 40 +- .../app/coffee/S3PersistorManager.js | 460 ++++++++++-------- services/filestore/app/coffee/SafeExec.js | 88 ++-- 14 files changed, 1260 insertions(+), 936 deletions(-) diff --git a/services/filestore/app/coffee/AWSSDKPersistorManager.js b/services/filestore/app/coffee/AWSSDKPersistorManager.js index 168fc68d54..c263936073 100644 --- a/services/filestore/app/coffee/AWSSDKPersistorManager.js +++ b/services/filestore/app/coffee/AWSSDKPersistorManager.js @@ -1,106 +1,145 @@ -# This module is not used in production, which currently uses -# S3PersistorManager. The intention is to migrate S3PersistorManager to use the -# latest aws-sdk and delete this module so that PersistorManager would load the -# same backend for both the 's3' and 'aws-sdk' options. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// This module is not used in production, which currently uses +// S3PersistorManager. The intention is to migrate S3PersistorManager to use the +// latest aws-sdk and delete this module so that PersistorManager would load the +// same backend for both the 's3' and 'aws-sdk' options. -logger = require "logger-sharelatex" -aws = require "aws-sdk" -_ = require "underscore" -fs = require "fs" -Errors = require "./Errors" +const logger = require("logger-sharelatex"); +const aws = require("aws-sdk"); +const _ = require("underscore"); +const fs = require("fs"); +const Errors = require("./Errors"); -s3 = new aws.S3() +const s3 = new aws.S3(); -module.exports = - sendFile: (bucketName, key, fsPath, callback)-> - logger.log bucketName:bucketName, key:key, "send file data to s3" - stream = fs.createReadStream fsPath - s3.upload Bucket: bucketName, Key: key, Body: stream, (err, data) -> - if err? - logger.err err: err, Bucket: bucketName, Key: key, "error sending file data to s3" - callback err +module.exports = { + sendFile(bucketName, key, fsPath, callback){ + logger.log({bucketName, key}, "send file data to s3"); + const stream = fs.createReadStream(fsPath); + return s3.upload({Bucket: bucketName, Key: key, Body: stream}, function(err, data) { + if (err != null) { + logger.err({err, Bucket: bucketName, Key: key}, "error sending file data to s3"); + } + return callback(err); + }); + }, - sendStream: (bucketName, key, stream, callback)-> - logger.log bucketName:bucketName, key:key, "send file stream to s3" - s3.upload Bucket: bucketName, Key: key, Body: stream, (err, data) -> - if err? - logger.err err: err, Bucket: bucketName, Key: key, "error sending file stream to s3" - callback err + sendStream(bucketName, key, stream, callback){ + logger.log({bucketName, key}, "send file stream to s3"); + return s3.upload({Bucket: bucketName, Key: key, Body: stream}, function(err, data) { + if (err != null) { + logger.err({err, Bucket: bucketName, Key: key}, "error sending file stream to s3"); + } + return callback(err); + }); + }, - getFileStream: (bucketName, key, opts, callback = (err, res)->)-> - logger.log bucketName:bucketName, key:key, "get file stream from s3" - callback = _.once callback - params = - Bucket:bucketName + getFileStream(bucketName, key, opts, callback){ + if (callback == null) { callback = function(err, res){}; } + logger.log({bucketName, key}, "get file stream from s3"); + callback = _.once(callback); + const params = { + Bucket:bucketName, Key: key - if opts.start? and opts.end? - params['Range'] = "bytes=#{opts.start}-#{opts.end}" - request = s3.getObject params - stream = request.createReadStream() - stream.on 'readable', () -> - callback null, stream - stream.on 'error', (err) -> - logger.err err:err, bucketName:bucketName, key:key, "error getting file stream from s3" - if err.code == 'NoSuchKey' - return callback new Errors.NotFoundError "File not found in S3: #{bucketName}:#{key}" - callback err + }; + if ((opts.start != null) && (opts.end != null)) { + params['Range'] = `bytes=${opts.start}-${opts.end}`; + } + const request = s3.getObject(params); + const stream = request.createReadStream(); + stream.on('readable', () => callback(null, stream)); + return stream.on('error', function(err) { + logger.err({err, bucketName, key}, "error getting file stream from s3"); + if (err.code === 'NoSuchKey') { + return callback(new Errors.NotFoundError(`File not found in S3: ${bucketName}:${key}`)); + } + return callback(err); + }); + }, - copyFile: (bucketName, sourceKey, destKey, callback)-> - logger.log bucketName:bucketName, sourceKey:sourceKey, destKey: destKey, "copying file in s3" - source = bucketName + '/' + sourceKey - s3.copyObject {Bucket: bucketName, Key: destKey, CopySource: source}, (err) -> - if err? - logger.err err:err, bucketName:bucketName, sourceKey:sourceKey, destKey:destKey, "something went wrong copying file in s3" - callback err + copyFile(bucketName, sourceKey, destKey, callback){ + logger.log({bucketName, sourceKey, destKey}, "copying file in s3"); + const source = bucketName + '/' + sourceKey; + return s3.copyObject({Bucket: bucketName, Key: destKey, CopySource: source}, function(err) { + if (err != null) { + logger.err({err, bucketName, sourceKey, destKey}, "something went wrong copying file in s3"); + } + return callback(err); + }); + }, - deleteFile: (bucketName, key, callback)-> - logger.log bucketName:bucketName, key:key, "delete file in s3" - s3.deleteObject {Bucket: bucketName, Key: key}, (err) -> - if err? - logger.err err:err, bucketName:bucketName, key:key, "something went wrong deleting file in s3" - callback err + deleteFile(bucketName, key, callback){ + logger.log({bucketName, key}, "delete file in s3"); + return s3.deleteObject({Bucket: bucketName, Key: key}, function(err) { + if (err != null) { + logger.err({err, bucketName, key}, "something went wrong deleting file in s3"); + } + return callback(err); + }); + }, - deleteDirectory: (bucketName, key, callback)-> - logger.log bucketName:bucketName, key:key, "delete directory in s3" - s3.listObjects {Bucket: bucketName, Prefix: key}, (err, data) -> - if err? - logger.err err:err, bucketName:bucketName, key:key, "something went wrong listing prefix in s3" - return callback err - if data.Contents.length == 0 - logger.log bucketName:bucketName, key:key, "the directory is empty" - return callback() - keys = _.map data.Contents, (entry)-> - Key: entry.Key - s3.deleteObjects - Bucket: bucketName - Delete: - Objects: keys + deleteDirectory(bucketName, key, callback){ + logger.log({bucketName, key}, "delete directory in s3"); + return s3.listObjects({Bucket: bucketName, Prefix: key}, function(err, data) { + if (err != null) { + logger.err({err, bucketName, key}, "something went wrong listing prefix in s3"); + return callback(err); + } + if (data.Contents.length === 0) { + logger.log({bucketName, key}, "the directory is empty"); + return callback(); + } + const keys = _.map(data.Contents, entry => ({ + Key: entry.Key + })); + return s3.deleteObjects({ + Bucket: bucketName, + Delete: { + Objects: keys, Quiet: true - , (err) -> - if err? - logger.err err:err, bucketName:bucketName, key:keys, "something went wrong deleting directory in s3" - callback err + } + } + , function(err) { + if (err != null) { + logger.err({err, bucketName, key:keys}, "something went wrong deleting directory in s3"); + } + return callback(err); + }); + }); + }, - checkIfFileExists:(bucketName, key, callback)-> - logger.log bucketName:bucketName, key:key, "check file existence in s3" - s3.headObject {Bucket: bucketName, Key: key}, (err, data) -> - if err? - return (callback null, false) if err.code == 'NotFound' - logger.err err:err, bucketName:bucketName, key:key, "something went wrong checking head in s3" - return callback err - callback null, data.ETag? + checkIfFileExists(bucketName, key, callback){ + logger.log({bucketName, key}, "check file existence in s3"); + return s3.headObject({Bucket: bucketName, Key: key}, function(err, data) { + if (err != null) { + if (err.code === 'NotFound') { return (callback(null, false)); } + logger.err({err, bucketName, key}, "something went wrong checking head in s3"); + return callback(err); + } + return callback(null, (data.ETag != null)); + }); + }, - directorySize:(bucketName, key, callback)-> - logger.log bucketName:bucketName, key:key, "get project size in s3" - s3.listObjects {Bucket: bucketName, Prefix: key}, (err, data) -> - if err? - logger.err err:err, bucketName:bucketName, key:key, "something went wrong listing prefix in s3" - return callback err - if data.Contents.length == 0 - logger.log bucketName:bucketName, key:key, "the directory is empty" - return callback() - totalSize = 0 - _.each data.Contents, (entry)-> - totalSize += entry.Size - callback null, totalSize + directorySize(bucketName, key, callback){ + logger.log({bucketName, key}, "get project size in s3"); + return s3.listObjects({Bucket: bucketName, Prefix: key}, function(err, data) { + if (err != null) { + logger.err({err, bucketName, key}, "something went wrong listing prefix in s3"); + return callback(err); + } + if (data.Contents.length === 0) { + logger.log({bucketName, key}, "the directory is empty"); + return callback(); + } + let totalSize = 0; + _.each(data.Contents, entry => totalSize += entry.Size); + return callback(null, totalSize); + }); + } +}; diff --git a/services/filestore/app/coffee/BucketController.js b/services/filestore/app/coffee/BucketController.js index bb7bd4544b..ecd4f9a54f 100644 --- a/services/filestore/app/coffee/BucketController.js +++ b/services/filestore/app/coffee/BucketController.js @@ -1,29 +1,41 @@ -settings = require("settings-sharelatex") -logger = require("logger-sharelatex") -FileHandler = require("./FileHandler") -metrics = require("metrics-sharelatex") -Errors = require('./Errors') +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let BucketController; +const settings = require("settings-sharelatex"); +const logger = require("logger-sharelatex"); +const FileHandler = require("./FileHandler"); +const metrics = require("metrics-sharelatex"); +const Errors = require('./Errors'); -module.exports = BucketController = +module.exports = (BucketController = { - getFile: (req, res)-> - {bucket} = req.params - key = req.params[0] - credentials = settings.filestore.s3BucketCreds?[bucket] - options = { - key: key, - bucket: bucket, - credentials: credentials - } - metrics.inc "#{bucket}.getFile" - logger.log key:key, bucket:bucket, "receiving request to get file from bucket" - FileHandler.getFile bucket, key, options, (err, fileStream)-> - if err? - logger.err err:err, key:key, bucket:bucket, "problem getting file from bucket" - if err instanceof Errors.NotFoundError - return res.send 404 - else - return res.send 500 - else - logger.log key:key, bucket:bucket, "sending bucket file to response" - fileStream.pipe res + getFile(req, res){ + const {bucket} = req.params; + const key = req.params[0]; + const credentials = settings.filestore.s3BucketCreds != null ? settings.filestore.s3BucketCreds[bucket] : undefined; + const options = { + key, + bucket, + credentials + }; + metrics.inc(`${bucket}.getFile`); + logger.log({key, bucket}, "receiving request to get file from bucket"); + return FileHandler.getFile(bucket, key, options, function(err, fileStream){ + if (err != null) { + logger.err({err, key, bucket}, "problem getting file from bucket"); + if (err instanceof Errors.NotFoundError) { + return res.send(404); + } else { + return res.send(500); + } + } else { + logger.log({key, bucket}, "sending bucket file to response"); + return fileStream.pipe(res); + } + }); + } +}); diff --git a/services/filestore/app/coffee/Errors.js b/services/filestore/app/coffee/Errors.js index 3bd9479abe..c4f8f7004c 100644 --- a/services/filestore/app/coffee/Errors.js +++ b/services/filestore/app/coffee/Errors.js @@ -1,9 +1,11 @@ -NotFoundError = (message) -> - error = new Error(message) - error.name = "NotFoundError" - error.__proto__ = NotFoundError.prototype - return error -NotFoundError.prototype.__proto__ = Error.prototype +let Errors; +var NotFoundError = function(message) { + const error = new Error(message); + error.name = "NotFoundError"; + error.__proto__ = NotFoundError.prototype; + return error; +}; +NotFoundError.prototype.__proto__ = Error.prototype; -module.exports = Errors = - NotFoundError: NotFoundError +module.exports = (Errors = + {NotFoundError}); diff --git a/services/filestore/app/coffee/FSPersistorManager.js b/services/filestore/app/coffee/FSPersistorManager.js index 38e30f284f..c4792e6215 100644 --- a/services/filestore/app/coffee/FSPersistorManager.js +++ b/services/filestore/app/coffee/FSPersistorManager.js @@ -1,128 +1,164 @@ -logger = require("logger-sharelatex") -fs = require("fs") -path = require("path") -LocalFileWriter = require("./LocalFileWriter") -Errors = require('./Errors') -rimraf = require("rimraf") -_ = require "underscore" +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const logger = require("logger-sharelatex"); +const fs = require("fs"); +const path = require("path"); +const LocalFileWriter = require("./LocalFileWriter"); +const Errors = require('./Errors'); +const rimraf = require("rimraf"); +const _ = require("underscore"); -filterName = (key) -> - return key.replace /\//g, "_" +const filterName = key => key.replace(/\//g, "_"); -module.exports = - sendFile: ( location, target, source, callback = (err)->) -> - filteredTarget = filterName target - logger.log location:location, target:filteredTarget, source:source, "sending file" - done = _.once (err) -> - if err? - logger.err err:err, location:location, target:filteredTarget, source:source, "Error on put of file" - callback(err) - # actually copy the file (instead of moving it) to maintain consistent behaviour - # between the different implementations - sourceStream = fs.createReadStream source - sourceStream.on 'error', done - targetStream = fs.createWriteStream "#{location}/#{filteredTarget}" - targetStream.on 'error', done - targetStream.on 'finish', () -> - done() - sourceStream.pipe targetStream +module.exports = { + sendFile( location, target, source, callback) { + if (callback == null) { callback = function(err){}; } + const filteredTarget = filterName(target); + logger.log({location, target:filteredTarget, source}, "sending file"); + const done = _.once(function(err) { + if (err != null) { + logger.err({err, location, target:filteredTarget, source}, "Error on put of file"); + } + return callback(err); + }); + // actually copy the file (instead of moving it) to maintain consistent behaviour + // between the different implementations + const sourceStream = fs.createReadStream(source); + sourceStream.on('error', done); + const targetStream = fs.createWriteStream(`${location}/${filteredTarget}`); + targetStream.on('error', done); + targetStream.on('finish', () => done()); + return sourceStream.pipe(targetStream); + }, - sendStream: ( location, target, sourceStream, callback = (err)->) -> - logger.log location:location, target:target, "sending file stream" - sourceStream.on "error", (err)-> - logger.err location:location, target:target, err:err "error on stream to send" - LocalFileWriter.writeStream sourceStream, null, (err, fsPath)=> - if err? - logger.err location:location, target:target, fsPath:fsPath, err:err, "something went wrong writing stream to disk" - return callback err - @sendFile location, target, fsPath, (err) -> - # delete the temporary file created above and return the original error - LocalFileWriter.deleteFile fsPath, () -> - callback(err) + sendStream( location, target, sourceStream, callback) { + if (callback == null) { callback = function(err){}; } + logger.log({location, target}, "sending file stream"); + sourceStream.on("error", err => logger.err({location, target, err:err("error on stream to send")})); + return LocalFileWriter.writeStream(sourceStream, null, (err, fsPath)=> { + if (err != null) { + logger.err({location, target, fsPath, err}, "something went wrong writing stream to disk"); + return callback(err); + } + return this.sendFile(location, target, fsPath, err => // delete the temporary file created above and return the original error + LocalFileWriter.deleteFile(fsPath, () => callback(err))); + }); + }, - # opts may be {start: Number, end: Number} - getFileStream: (location, name, opts, callback = (err, res)->) -> - filteredName = filterName name - logger.log location:location, filteredName:filteredName, "getting file" - fs.open "#{location}/#{filteredName}", 'r', (err, fd) -> - if err? - logger.err err:err, location:location, filteredName:name, "Error reading from file" - if err.code == 'ENOENT' - return callback new Errors.NotFoundError(err.message), null - else - return callback err, null - opts.fd = fd - sourceStream = fs.createReadStream null, opts - return callback null, sourceStream + // opts may be {start: Number, end: Number} + getFileStream(location, name, opts, callback) { + if (callback == null) { callback = function(err, res){}; } + const filteredName = filterName(name); + logger.log({location, filteredName}, "getting file"); + return fs.open(`${location}/${filteredName}`, 'r', function(err, fd) { + if (err != null) { + logger.err({err, location, filteredName:name}, "Error reading from file"); + } + if (err.code === 'ENOENT') { + return callback(new Errors.NotFoundError(err.message), null); + } else { + return callback(err, null); + } + opts.fd = fd; + const sourceStream = fs.createReadStream(null, opts); + return callback(null, sourceStream); + }); + }, - getFileSize: (location, filename, callback) -> - fullPath = path.join(location, filterName(filename)) - fs.stat fullPath, (err, stats) -> - if err? - if err.code == 'ENOENT' - logger.log({location:location, filename:filename}, "file not found") - callback(new Errors.NotFoundError(err.message)) - else - logger.err({err:err, location:location, filename:filename}, "failed to stat file") - callback(err) - return - callback(null, stats.size) + getFileSize(location, filename, callback) { + const fullPath = path.join(location, filterName(filename)); + return fs.stat(fullPath, function(err, stats) { + if (err != null) { + if (err.code === 'ENOENT') { + logger.log({location, filename}, "file not found"); + callback(new Errors.NotFoundError(err.message)); + } else { + logger.err({err, location, filename}, "failed to stat file"); + callback(err); + } + return; + } + return callback(null, stats.size); + }); + }, - copyFile: (location, fromName, toName, callback = (err)->)-> - filteredFromName=filterName fromName - filteredToName=filterName toName - logger.log location:location, fromName:filteredFromName, toName:filteredToName, "copying file" - sourceStream = fs.createReadStream "#{location}/#{filteredFromName}" - sourceStream.on 'error', (err) -> - logger.err err:err, location:location, key:filteredFromName, "Error reading from file" - callback err - targetStream = fs.createWriteStream "#{location}/#{filteredToName}" - targetStream.on 'error', (err) -> - logger.err err:err, location:location, key:filteredToName, "Error writing to file" - callback err - targetStream.on 'finish', () -> - callback null - sourceStream.pipe targetStream + copyFile(location, fromName, toName, callback){ + if (callback == null) { callback = function(err){}; } + const filteredFromName=filterName(fromName); + const filteredToName=filterName(toName); + logger.log({location, fromName:filteredFromName, toName:filteredToName}, "copying file"); + const sourceStream = fs.createReadStream(`${location}/${filteredFromName}`); + sourceStream.on('error', function(err) { + logger.err({err, location, key:filteredFromName}, "Error reading from file"); + return callback(err); + }); + const targetStream = fs.createWriteStream(`${location}/${filteredToName}`); + targetStream.on('error', function(err) { + logger.err({err, location, key:filteredToName}, "Error writing to file"); + return callback(err); + }); + targetStream.on('finish', () => callback(null)); + return sourceStream.pipe(targetStream); + }, - deleteFile: (location, name, callback)-> - filteredName = filterName name - logger.log location:location, filteredName:filteredName, "delete file" - fs.unlink "#{location}/#{filteredName}", (err) -> - if err? - logger.err err:err, location:location, filteredName:filteredName, "Error on delete." - callback err - else - callback() + deleteFile(location, name, callback){ + const filteredName = filterName(name); + logger.log({location, filteredName}, "delete file"); + return fs.unlink(`${location}/${filteredName}`, function(err) { + if (err != null) { + logger.err({err, location, filteredName}, "Error on delete."); + return callback(err); + } else { + return callback(); + } + }); + }, - deleteDirectory: (location, name, callback = (err)->)-> - filteredName = filterName name.replace(/\/$/,'') - rimraf "#{location}/#{filteredName}", (err) -> - if err? - logger.err err:err, location:location, filteredName:filteredName, "Error on rimraf rmdir." - callback err - else - callback() + deleteDirectory(location, name, callback){ + if (callback == null) { callback = function(err){}; } + const filteredName = filterName(name.replace(/\/$/,'')); + return rimraf(`${location}/${filteredName}`, function(err) { + if (err != null) { + logger.err({err, location, filteredName}, "Error on rimraf rmdir."); + return callback(err); + } else { + return callback(); + } + }); + }, - checkIfFileExists:(location, name, callback = (err,exists)->)-> - filteredName = filterName name - logger.log location:location, filteredName:filteredName, "checking if file exists" - fs.exists "#{location}/#{filteredName}", (exists) -> - logger.log location:location, filteredName:filteredName, exists:exists, "checked if file exists" - callback null, exists + checkIfFileExists(location, name, callback){ + if (callback == null) { callback = function(err,exists){}; } + const filteredName = filterName(name); + logger.log({location, filteredName}, "checking if file exists"); + return fs.exists(`${location}/${filteredName}`, function(exists) { + logger.log({location, filteredName, exists}, "checked if file exists"); + return callback(null, exists); + }); + }, - directorySize:(location, name, callback)-> - filteredName = filterName name.replace(/\/$/,'') - logger.log location:location, filteredName:filteredName, "get project size in file system" - fs.readdir "#{location}/#{filteredName}", (err, files) -> - if err? - logger.err err:err, location:location, filteredName:filteredName, "something went wrong listing prefix in aws" - return callback(err) - totalSize = 0 - _.each files, (entry)-> - fd = fs.openSync "#{location}/#{filteredName}/#{entry}", 'r' - fileStats = fs.fstatSync(fd) - totalSize += fileStats.size - fs.closeSync fd - logger.log totalSize:totalSize, "total size", files:files - callback null, totalSize + directorySize(location, name, callback){ + const filteredName = filterName(name.replace(/\/$/,'')); + logger.log({location, filteredName}, "get project size in file system"); + return fs.readdir(`${location}/${filteredName}`, function(err, files) { + if (err != null) { + logger.err({err, location, filteredName}, "something went wrong listing prefix in aws"); + return callback(err); + } + let totalSize = 0; + _.each(files, function(entry){ + const fd = fs.openSync(`${location}/${filteredName}/${entry}`, 'r'); + const fileStats = fs.fstatSync(fd); + totalSize += fileStats.size; + return fs.closeSync(fd); + }); + logger.log({totalSize}, "total size", {files}); + return callback(null, totalSize); + }); + } +}; diff --git a/services/filestore/app/coffee/FileController.js b/services/filestore/app/coffee/FileController.js index f98dbd1e49..f40d4a0317 100644 --- a/services/filestore/app/coffee/FileController.js +++ b/services/filestore/app/coffee/FileController.js @@ -1,113 +1,145 @@ -PersistorManager = require("./PersistorManager") -settings = require("settings-sharelatex") -logger = require("logger-sharelatex") -FileHandler = require("./FileHandler") -metrics = require("metrics-sharelatex") -parseRange = require('range-parser') -Errors = require('./Errors') +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let FileController; +const PersistorManager = require("./PersistorManager"); +const settings = require("settings-sharelatex"); +const logger = require("logger-sharelatex"); +const FileHandler = require("./FileHandler"); +const metrics = require("metrics-sharelatex"); +const parseRange = require('range-parser'); +const Errors = require('./Errors'); -oneDayInSeconds = 60 * 60 * 24 -maxSizeInBytes = 1024 * 1024 * 1024 # 1GB +const oneDayInSeconds = 60 * 60 * 24; +const maxSizeInBytes = 1024 * 1024 * 1024; // 1GB -module.exports = FileController = +module.exports = (FileController = { - getFile: (req, res)-> - {key, bucket} = req - {format, style} = req.query - options = { - key: key, - bucket: bucket, - format: format, - style: style, + getFile(req, res){ + const {key, bucket} = req; + const {format, style} = req.query; + const options = { + key, + bucket, + format, + style, + }; + metrics.inc("getFile"); + logger.log({key, bucket, format, style}, "receiving request to get file"); + if (req.headers.range != null) { + const range = FileController._get_range(req.headers.range); + options.start = range.start; + options.end = range.end; + logger.log({start: range.start, end: range.end}, "getting range of bytes from file"); } - metrics.inc "getFile" - logger.log key:key, bucket:bucket, format:format, style: style, "receiving request to get file" - if req.headers.range? - range = FileController._get_range(req.headers.range) - options.start = range.start - options.end = range.end - logger.log start: range.start, end: range.end, "getting range of bytes from file" - FileHandler.getFile bucket, key, options, (err, fileStream)-> - if err? - if err instanceof Errors.NotFoundError - return res.send 404 - else - logger.err err:err, key:key, bucket:bucket, format:format, style:style, "problem getting file" - return res.send 500 - else if req.query.cacheWarm - logger.log key:key, bucket:bucket, format:format, style:style, "request is only for cache warm so not sending stream" - res.send 200 - else - logger.log key:key, bucket:bucket, format:format, style:style, "sending file to response" - fileStream.pipe res + return FileHandler.getFile(bucket, key, options, function(err, fileStream){ + if (err != null) { + if (err instanceof Errors.NotFoundError) { + return res.send(404); + } else { + logger.err({err, key, bucket, format, style}, "problem getting file"); + return res.send(500); + } + } else if (req.query.cacheWarm) { + logger.log({key, bucket, format, style}, "request is only for cache warm so not sending stream"); + return res.send(200); + } else { + logger.log({key, bucket, format, style}, "sending file to response"); + return fileStream.pipe(res); + } + }); + }, - getFileHead: (req, res) -> - {key, bucket} = req - metrics.inc("getFileSize") - logger.log({ key: key, bucket: bucket }, "receiving request to get file metadata") - FileHandler.getFileSize bucket, key, (err, fileSize) -> - if err? - if err instanceof Errors.NotFoundError - res.status(404).end() - else - res.status(500).end() - return - res.set("Content-Length", fileSize) - res.status(200).end() + getFileHead(req, res) { + const {key, bucket} = req; + metrics.inc("getFileSize"); + logger.log({ key, bucket }, "receiving request to get file metadata"); + return FileHandler.getFileSize(bucket, key, function(err, fileSize) { + if (err != null) { + if (err instanceof Errors.NotFoundError) { + res.status(404).end(); + } else { + res.status(500).end(); + } + return; + } + res.set("Content-Length", fileSize); + return res.status(200).end(); + }); + }, - insertFile: (req, res)-> - metrics.inc "insertFile" - {key, bucket} = req - logger.log key:key, bucket:bucket, "receiving request to insert file" - FileHandler.insertFile bucket, key, req, (err)-> - if err? - logger.log err: err, key: key, bucket: bucket, "error inserting file" - res.send 500 - else - res.send 200 + insertFile(req, res){ + metrics.inc("insertFile"); + const {key, bucket} = req; + logger.log({key, bucket}, "receiving request to insert file"); + return FileHandler.insertFile(bucket, key, req, function(err){ + if (err != null) { + logger.log({err, key, bucket}, "error inserting file"); + return res.send(500); + } else { + return res.send(200); + } + }); + }, - copyFile: (req, res)-> - metrics.inc "copyFile" - {key, bucket} = req - oldProject_id = req.body.source.project_id - oldFile_id = req.body.source.file_id - logger.log key:key, bucket:bucket, oldProject_id:oldProject_id, oldFile_id:oldFile_id, "receiving request to copy file" - PersistorManager.copyFile bucket, "#{oldProject_id}/#{oldFile_id}", key, (err)-> - if err? - if err instanceof Errors.NotFoundError - res.send 404 - else - logger.log err:err, oldProject_id:oldProject_id, oldFile_id:oldFile_id, "something went wrong copying file" - res.send 500 - else - res.send 200 + copyFile(req, res){ + metrics.inc("copyFile"); + const {key, bucket} = req; + const oldProject_id = req.body.source.project_id; + const oldFile_id = req.body.source.file_id; + logger.log({key, bucket, oldProject_id, oldFile_id}, "receiving request to copy file"); + return PersistorManager.copyFile(bucket, `${oldProject_id}/${oldFile_id}`, key, function(err){ + if (err != null) { + if (err instanceof Errors.NotFoundError) { + return res.send(404); + } else { + logger.log({err, oldProject_id, oldFile_id}, "something went wrong copying file"); + return res.send(500); + } + } else { + return res.send(200); + } + }); + }, - deleteFile: (req, res)-> - metrics.inc "deleteFile" - {key, bucket} = req - logger.log key:key, bucket:bucket, "receiving request to delete file" - FileHandler.deleteFile bucket, key, (err)-> - if err? - logger.log err:err, key:key, bucket:bucket, "something went wrong deleting file" - res.send 500 - else - res.send 204 + deleteFile(req, res){ + metrics.inc("deleteFile"); + const {key, bucket} = req; + logger.log({key, bucket}, "receiving request to delete file"); + return FileHandler.deleteFile(bucket, key, function(err){ + if (err != null) { + logger.log({err, key, bucket}, "something went wrong deleting file"); + return res.send(500); + } else { + return res.send(204); + } + }); + }, - _get_range: (header) -> - parsed = parseRange(maxSizeInBytes, header) - if parsed == -1 or parsed == -2 or parsed.type != 'bytes' - null - else - range = parsed[0] - {start: range.start, end: range.end} + _get_range(header) { + const parsed = parseRange(maxSizeInBytes, header); + if ((parsed === -1) || (parsed === -2) || (parsed.type !== 'bytes')) { + return null; + } else { + const range = parsed[0]; + return {start: range.start, end: range.end}; + } + }, - directorySize: (req, res)-> - metrics.inc "projectSize" - {project_id, bucket} = req - logger.log project_id:project_id, bucket:bucket, "receiving request to project size" - FileHandler.getDirectorySize bucket, project_id, (err, size)-> - if err? - logger.log err: err, project_id: project_id, bucket: bucket, "error inserting file" - res.send 500 - else - res.json {'total bytes' : size} + directorySize(req, res){ + metrics.inc("projectSize"); + const {project_id, bucket} = req; + logger.log({project_id, bucket}, "receiving request to project size"); + return FileHandler.getDirectorySize(bucket, project_id, function(err, size){ + if (err != null) { + logger.log({err, project_id, bucket}, "error inserting file"); + return res.send(500); + } else { + return res.json({'total bytes' : size}); + } + }); + } +}); diff --git a/services/filestore/app/coffee/FileConverter.js b/services/filestore/app/coffee/FileConverter.js index 894b0e89fd..b35265c22b 100644 --- a/services/filestore/app/coffee/FileConverter.js +++ b/services/filestore/app/coffee/FileConverter.js @@ -1,62 +1,80 @@ -_ = require("underscore") -metrics = require("metrics-sharelatex") -logger = require("logger-sharelatex") -safe_exec = require("./SafeExec") -approvedFormats = ["png"] -Settings = require "settings-sharelatex" +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const _ = require("underscore"); +const metrics = require("metrics-sharelatex"); +const logger = require("logger-sharelatex"); +const safe_exec = require("./SafeExec"); +const approvedFormats = ["png"]; +const Settings = require("settings-sharelatex"); -fourtySeconds = 40 * 1000 +const fourtySeconds = 40 * 1000; -childProcessOpts = - killSignal: "SIGTERM" +const childProcessOpts = { + killSignal: "SIGTERM", timeout: fourtySeconds +}; -module.exports = +module.exports = { - convert: (sourcePath, requestedFormat, callback)-> - logger.log sourcePath:sourcePath, requestedFormat:requestedFormat, "converting file format" - timer = new metrics.Timer("imageConvert") - destPath = "#{sourcePath}.#{requestedFormat}" - sourcePath = "#{sourcePath}[0]" - if !_.include approvedFormats, requestedFormat - err = new Error("invalid format requested") - return callback err - width = "600x" - command = ["convert", "-define", "pdf:fit-page=#{width}", "-flatten", "-density", "300", sourcePath, destPath] - command = Settings.commands.convertCommandPrefix.concat(command) - safe_exec command, childProcessOpts, (err, stdout, stderr)-> - timer.done() - if err? - logger.err err:err, stderr:stderr, sourcePath:sourcePath, requestedFormat:requestedFormat, destPath:destPath, "something went wrong converting file" - else - logger.log sourcePath:sourcePath, requestedFormat:requestedFormat, destPath:destPath, "finished converting file" - callback(err, destPath) + convert(sourcePath, requestedFormat, callback){ + logger.log({sourcePath, requestedFormat}, "converting file format"); + const timer = new metrics.Timer("imageConvert"); + const destPath = `${sourcePath}.${requestedFormat}`; + sourcePath = `${sourcePath}[0]`; + if (!_.include(approvedFormats, requestedFormat)) { + const err = new Error("invalid format requested"); + return callback(err); + } + const width = "600x"; + let command = ["convert", "-define", `pdf:fit-page=${width}`, "-flatten", "-density", "300", sourcePath, destPath]; + command = Settings.commands.convertCommandPrefix.concat(command); + return safe_exec(command, childProcessOpts, function(err, stdout, stderr){ + timer.done(); + if (err != null) { + logger.err({err, stderr, sourcePath, requestedFormat, destPath}, "something went wrong converting file"); + } else { + logger.log({sourcePath, requestedFormat, destPath}, "finished converting file"); + } + return callback(err, destPath); + }); + }, - thumbnail: (sourcePath, callback)-> - destPath = "#{sourcePath}.png" - sourcePath = "#{sourcePath}[0]" - width = "260x" - command = ["convert", "-flatten", "-background", "white", "-density", "300", "-define", "pdf:fit-page=#{width}", sourcePath, "-resize", width, destPath] - logger.log sourcePath:sourcePath, destPath:destPath, command:command, "thumbnail convert file" - command = Settings.commands.convertCommandPrefix.concat(command) - safe_exec command, childProcessOpts, (err, stdout, stderr)-> - if err? - logger.err err:err, stderr:stderr, sourcePath:sourcePath, "something went wrong converting file to thumbnail" - else - logger.log sourcePath:sourcePath, destPath:destPath, "finished thumbnailing file" - callback(err, destPath) + thumbnail(sourcePath, callback){ + const destPath = `${sourcePath}.png`; + sourcePath = `${sourcePath}[0]`; + const width = "260x"; + let command = ["convert", "-flatten", "-background", "white", "-density", "300", "-define", `pdf:fit-page=${width}`, sourcePath, "-resize", width, destPath]; + logger.log({sourcePath, destPath, command}, "thumbnail convert file"); + command = Settings.commands.convertCommandPrefix.concat(command); + return safe_exec(command, childProcessOpts, function(err, stdout, stderr){ + if (err != null) { + logger.err({err, stderr, sourcePath}, "something went wrong converting file to thumbnail"); + } else { + logger.log({sourcePath, destPath}, "finished thumbnailing file"); + } + return callback(err, destPath); + }); + }, - preview: (sourcePath, callback)-> - logger.log sourcePath:sourcePath, "preview convert file" - destPath = "#{sourcePath}.png" - sourcePath = "#{sourcePath}[0]" - width = "548x" - command = ["convert", "-flatten", "-background", "white", "-density", "300", "-define", "pdf:fit-page=#{width}", sourcePath, "-resize", width, destPath] - command = Settings.commands.convertCommandPrefix.concat(command) - safe_exec command, childProcessOpts, (err, stdout, stderr)-> - if err? - logger.err err:err, stderr:stderr, sourcePath:sourcePath, destPath:destPath, "something went wrong converting file to preview" - else - logger.log sourcePath:sourcePath, destPath:destPath, "finished converting file to preview" - callback(err, destPath) + preview(sourcePath, callback){ + logger.log({sourcePath}, "preview convert file"); + const destPath = `${sourcePath}.png`; + sourcePath = `${sourcePath}[0]`; + const width = "548x"; + let command = ["convert", "-flatten", "-background", "white", "-density", "300", "-define", `pdf:fit-page=${width}`, sourcePath, "-resize", width, destPath]; + command = Settings.commands.convertCommandPrefix.concat(command); + return safe_exec(command, childProcessOpts, function(err, stdout, stderr){ + if (err != null) { + logger.err({err, stderr, sourcePath, destPath}, "something went wrong converting file to preview"); + } else { + logger.log({sourcePath, destPath}, "finished converting file to preview"); + } + return callback(err, destPath); + }); + } +}; diff --git a/services/filestore/app/coffee/FileHandler.js b/services/filestore/app/coffee/FileHandler.js index cb8d78a0fe..548e206a12 100644 --- a/services/filestore/app/coffee/FileHandler.js +++ b/services/filestore/app/coffee/FileHandler.js @@ -1,129 +1,169 @@ -settings = require("settings-sharelatex") -PersistorManager = require("./PersistorManager") -LocalFileWriter = require("./LocalFileWriter") -logger = require("logger-sharelatex") -FileConverter = require("./FileConverter") -KeyBuilder = require("./KeyBuilder") -async = require("async") -ImageOptimiser = require("./ImageOptimiser") -Errors = require('./Errors') +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let FileHandler; +const settings = require("settings-sharelatex"); +const PersistorManager = require("./PersistorManager"); +const LocalFileWriter = require("./LocalFileWriter"); +const logger = require("logger-sharelatex"); +const FileConverter = require("./FileConverter"); +const KeyBuilder = require("./KeyBuilder"); +const async = require("async"); +const ImageOptimiser = require("./ImageOptimiser"); +const Errors = require('./Errors'); -module.exports = FileHandler = +module.exports = (FileHandler = { - insertFile: (bucket, key, stream, callback)-> - convertedKey = KeyBuilder.getConvertedFolderKey key - PersistorManager.deleteDirectory bucket, convertedKey, (error) -> - return callback(error) if error? - PersistorManager.sendStream bucket, key, stream, callback + insertFile(bucket, key, stream, callback){ + const convertedKey = KeyBuilder.getConvertedFolderKey(key); + return PersistorManager.deleteDirectory(bucket, convertedKey, function(error) { + if (error != null) { return callback(error); } + return PersistorManager.sendStream(bucket, key, stream, callback); + }); + }, - deleteFile: (bucket, key, callback)-> - convertedKey = KeyBuilder.getConvertedFolderKey key - async.parallel [ - (done)-> PersistorManager.deleteFile bucket, key, done - (done)-> PersistorManager.deleteDirectory bucket, convertedKey, done - ], callback + deleteFile(bucket, key, callback){ + const convertedKey = KeyBuilder.getConvertedFolderKey(key); + return async.parallel([ + done => PersistorManager.deleteFile(bucket, key, done), + done => PersistorManager.deleteDirectory(bucket, convertedKey, done) + ], callback); + }, - getFile: (bucket, key, opts = {}, callback)-> - # In this call, opts can contain credentials - logger.log bucket:bucket, key:key, opts:@_scrubSecrets(opts), "getting file" - if !opts.format? and !opts.style? - @_getStandardFile bucket, key, opts, callback - else - @_getConvertedFile bucket, key, opts, callback + getFile(bucket, key, opts, callback){ + // In this call, opts can contain credentials + if (opts == null) { opts = {}; } + logger.log({bucket, key, opts:this._scrubSecrets(opts)}, "getting file"); + if ((opts.format == null) && (opts.style == null)) { + return this._getStandardFile(bucket, key, opts, callback); + } else { + return this._getConvertedFile(bucket, key, opts, callback); + } + }, - getFileSize: (bucket, key, callback) -> - PersistorManager.getFileSize(bucket, key, callback) + getFileSize(bucket, key, callback) { + return PersistorManager.getFileSize(bucket, key, callback); + }, - _getStandardFile: (bucket, key, opts, callback)-> - PersistorManager.getFileStream bucket, key, opts, (err, fileStream)-> - if err? and !(err instanceof Errors.NotFoundError) - logger.err bucket:bucket, key:key, opts:FileHandler._scrubSecrets(opts), "error getting fileStream" - callback err, fileStream + _getStandardFile(bucket, key, opts, callback){ + return PersistorManager.getFileStream(bucket, key, opts, function(err, fileStream){ + if ((err != null) && !(err instanceof Errors.NotFoundError)) { + logger.err({bucket, key, opts:FileHandler._scrubSecrets(opts)}, "error getting fileStream"); + } + return callback(err, fileStream); + }); + }, - _getConvertedFile: (bucket, key, opts, callback)-> - convertedKey = KeyBuilder.addCachingToKey key, opts - PersistorManager.checkIfFileExists bucket, convertedKey, (err, exists)=> - if err? - return callback err - if exists - PersistorManager.getFileStream bucket, convertedKey, opts, callback - else - @_getConvertedFileAndCache bucket, key, convertedKey, opts, callback + _getConvertedFile(bucket, key, opts, callback){ + const convertedKey = KeyBuilder.addCachingToKey(key, opts); + return PersistorManager.checkIfFileExists(bucket, convertedKey, (err, exists)=> { + if (err != null) { + return callback(err); + } + if (exists) { + return PersistorManager.getFileStream(bucket, convertedKey, opts, callback); + } else { + return this._getConvertedFileAndCache(bucket, key, convertedKey, opts, callback); + } + }); + }, - _getConvertedFileAndCache: (bucket, key, convertedKey, opts, callback)-> - convertedFsPath = "" - originalFsPath = "" - async.series [ - (cb) => - @_convertFile bucket, key, opts, (err, fileSystemPath, originalFsPath) -> - convertedFsPath = fileSystemPath - originalFsPath = originalFsPath - cb err - (cb)-> - ImageOptimiser.compressPng convertedFsPath, cb - (cb)-> - PersistorManager.sendFile bucket, convertedKey, convertedFsPath, cb - ], (err)-> - if err? - LocalFileWriter.deleteFile convertedFsPath, -> - LocalFileWriter.deleteFile originalFsPath, -> - return callback(err) - # Send back the converted file from the local copy to avoid problems - # with the file not being present in S3 yet. As described in the - # documentation below, we have already made a 'HEAD' request in - # checkIfFileExists so we only have "eventual consistency" if we try - # to stream it from S3 here. This was a cause of many 403 errors. - # - # "Amazon S3 provides read-after-write consistency for PUTS of new - # objects in your S3 bucket in all regions with one caveat. The - # caveat is that if you make a HEAD or GET request to the key name - # (to find if the object exists) before creating the object, Amazon - # S3 provides eventual consistency for read-after-write."" - # https://docs.aws.amazon.com/AmazonS3/latest/dev/Introduction.html#ConsistencyModel - LocalFileWriter.getStream convertedFsPath, (err, readStream) -> - return callback(err) if err? - readStream.on 'end', () -> - logger.log {convertedFsPath: convertedFsPath}, "deleting temporary file" - LocalFileWriter.deleteFile convertedFsPath, -> - callback(null, readStream) + _getConvertedFileAndCache(bucket, key, convertedKey, opts, callback){ + let convertedFsPath = ""; + const originalFsPath = ""; + return async.series([ + cb => { + return this._convertFile(bucket, key, opts, function(err, fileSystemPath, originalFsPath) { + convertedFsPath = fileSystemPath; + originalFsPath = originalFsPath; + return cb(err); + }); + }, + cb => ImageOptimiser.compressPng(convertedFsPath, cb), + cb => PersistorManager.sendFile(bucket, convertedKey, convertedFsPath, cb) + ], function(err){ + if (err != null) { + LocalFileWriter.deleteFile(convertedFsPath, function() {}); + LocalFileWriter.deleteFile(originalFsPath, function() {}); + return callback(err); + } + // Send back the converted file from the local copy to avoid problems + // with the file not being present in S3 yet. As described in the + // documentation below, we have already made a 'HEAD' request in + // checkIfFileExists so we only have "eventual consistency" if we try + // to stream it from S3 here. This was a cause of many 403 errors. + // + // "Amazon S3 provides read-after-write consistency for PUTS of new + // objects in your S3 bucket in all regions with one caveat. The + // caveat is that if you make a HEAD or GET request to the key name + // (to find if the object exists) before creating the object, Amazon + // S3 provides eventual consistency for read-after-write."" + // https://docs.aws.amazon.com/AmazonS3/latest/dev/Introduction.html#ConsistencyModel + return LocalFileWriter.getStream(convertedFsPath, function(err, readStream) { + if (err != null) { return callback(err); } + readStream.on('end', function() { + logger.log({convertedFsPath}, "deleting temporary file"); + return LocalFileWriter.deleteFile(convertedFsPath, function() {}); + }); + return callback(null, readStream); + }); + }); + }, - _convertFile: (bucket, originalKey, opts, callback)-> - @_writeS3FileToDisk bucket, originalKey, opts, (err, originalFsPath)-> - if err? - return callback(err) - done = (err, destPath)-> - if err? - logger.err err:err, bucket:bucket, originalKey:originalKey, opts:FileHandler._scrubSecrets(opts), "error converting file" - return callback(err) - LocalFileWriter.deleteFile originalFsPath, -> - callback(err, destPath, originalFsPath) + _convertFile(bucket, originalKey, opts, callback){ + return this._writeS3FileToDisk(bucket, originalKey, opts, function(err, originalFsPath){ + if (err != null) { + return callback(err); + } + const done = function(err, destPath){ + if (err != null) { + logger.err({err, bucket, originalKey, opts:FileHandler._scrubSecrets(opts)}, "error converting file"); + return callback(err); + } + LocalFileWriter.deleteFile(originalFsPath, function() {}); + return callback(err, destPath, originalFsPath); + }; - logger.log opts:opts, "converting file depending on opts" + logger.log({opts}, "converting file depending on opts"); - if opts.format? - FileConverter.convert originalFsPath, opts.format, done - else if opts.style == "thumbnail" - FileConverter.thumbnail originalFsPath, done - else if opts.style == "preview" - FileConverter.preview originalFsPath, done - else - return callback(new Error("should have specified opts to convert file with #{JSON.stringify(opts)}")) + if (opts.format != null) { + return FileConverter.convert(originalFsPath, opts.format, done); + } else if (opts.style === "thumbnail") { + return FileConverter.thumbnail(originalFsPath, done); + } else if (opts.style === "preview") { + return FileConverter.preview(originalFsPath, done); + } else { + return callback(new Error(`should have specified opts to convert file with ${JSON.stringify(opts)}`)); + } + }); + }, - _writeS3FileToDisk: (bucket, key, opts, callback)-> - PersistorManager.getFileStream bucket, key, opts, (err, fileStream)-> - if err? - return callback(err) - LocalFileWriter.writeStream fileStream, key, callback + _writeS3FileToDisk(bucket, key, opts, callback){ + return PersistorManager.getFileStream(bucket, key, opts, function(err, fileStream){ + if (err != null) { + return callback(err); + } + return LocalFileWriter.writeStream(fileStream, key, callback); + }); + }, - getDirectorySize: (bucket, project_id, callback)-> - logger.log bucket:bucket, project_id:project_id, "getting project size" - PersistorManager.directorySize bucket, project_id, (err, size)-> - if err? - logger.err bucket:bucket, project_id:project_id, "error getting size" - callback err, size + getDirectorySize(bucket, project_id, callback){ + logger.log({bucket, project_id}, "getting project size"); + return PersistorManager.directorySize(bucket, project_id, function(err, size){ + if (err != null) { + logger.err({bucket, project_id}, "error getting size"); + } + return callback(err, size); + }); + }, - _scrubSecrets: (opts)-> - safe = Object.assign {}, opts - delete safe.credentials - safe + _scrubSecrets(opts){ + const safe = Object.assign({}, opts); + delete safe.credentials; + return safe; + } +}); diff --git a/services/filestore/app/coffee/HealthCheckController.js b/services/filestore/app/coffee/HealthCheckController.js index 643e15adb3..4f49f4a19a 100644 --- a/services/filestore/app/coffee/HealthCheckController.js +++ b/services/filestore/app/coffee/HealthCheckController.js @@ -1,57 +1,74 @@ -fs = require("fs-extra") -path = require("path") -async = require("async") -fileConverter = require("./FileConverter") -keyBuilder = require("./KeyBuilder") -fileController = require("./FileController") -logger = require('logger-sharelatex') -settings = require("settings-sharelatex") -streamBuffers = require("stream-buffers") -_ = require('underscore') +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const fs = require("fs-extra"); +const path = require("path"); +const async = require("async"); +const fileConverter = require("./FileConverter"); +const keyBuilder = require("./KeyBuilder"); +const fileController = require("./FileController"); +const logger = require('logger-sharelatex'); +const settings = require("settings-sharelatex"); +const streamBuffers = require("stream-buffers"); +const _ = require('underscore'); -checkCanStoreFiles = (callback)-> - callback = _.once(callback) - req = {params:{}, query:{}, headers:{}} - req.params.project_id = settings.health_check.project_id - req.params.file_id = settings.health_check.file_id - myWritableStreamBuffer = new streamBuffers.WritableStreamBuffer(initialSize: 100) - res = { - send: (code) -> - if code != 200 - callback(new Error("non-200 code from getFile: #{code}")) +const checkCanStoreFiles = function(callback){ + callback = _.once(callback); + const req = {params:{}, query:{}, headers:{}}; + req.params.project_id = settings.health_check.project_id; + req.params.file_id = settings.health_check.file_id; + const myWritableStreamBuffer = new streamBuffers.WritableStreamBuffer({initialSize: 100}); + const res = { + send(code) { + if (code !== 200) { + return callback(new Error(`non-200 code from getFile: ${code}`)); + } + } + }; + myWritableStreamBuffer.send = res.send; + return keyBuilder.userFileKey(req, res, function() { + fileController.getFile(req, myWritableStreamBuffer); + return myWritableStreamBuffer.on("close", function() { + if (myWritableStreamBuffer.size() > 0) { + return callback(); + } else { + const err = "no data in write stream buffer for health check"; + logger.err({err,}, "error performing health check"); + return callback(err); + } + }); + }); +}; + +const checkFileConvert = function(callback){ + if (!settings.enableConversions) { + return callback(); } - myWritableStreamBuffer.send = res.send - keyBuilder.userFileKey req, res, -> - fileController.getFile req, myWritableStreamBuffer - myWritableStreamBuffer.on "close", -> - if myWritableStreamBuffer.size() > 0 - callback() - else - err = "no data in write stream buffer for health check" - logger.err {err,}, "error performing health check" - callback(err) - -checkFileConvert = (callback)-> - if !settings.enableConversions - return callback() - imgPath = path.join(settings.path.uploadFolder, "/tiny.pdf") - async.waterfall [ - (cb)-> - fs.copy("./tiny.pdf", imgPath, cb) - (cb)-> fileConverter.thumbnail imgPath, cb - (resultPath, cb)-> fs.unlink resultPath, cb - (cb)-> fs.unlink imgPath, cb - ], callback + const imgPath = path.join(settings.path.uploadFolder, "/tiny.pdf"); + return async.waterfall([ + cb => fs.copy("./tiny.pdf", imgPath, cb), + cb => fileConverter.thumbnail(imgPath, cb), + (resultPath, cb) => fs.unlink(resultPath, cb), + cb => fs.unlink(imgPath, cb) + ], callback); +}; -module.exports = +module.exports = { - check: (req, res) -> - logger.log {}, "performing health check" - async.parallel [checkFileConvert, checkCanStoreFiles], (err)-> - if err? - logger.err err:err, "Health check: error running" - res.send 500 - else - res.send 200 + check(req, res) { + logger.log({}, "performing health check"); + return async.parallel([checkFileConvert, checkCanStoreFiles], function(err){ + if (err != null) { + logger.err({err}, "Health check: error running"); + return res.send(500); + } else { + return res.send(200); + } + }); + } +}; diff --git a/services/filestore/app/coffee/ImageOptimiser.js b/services/filestore/app/coffee/ImageOptimiser.js index 4c4a353f21..570730618d 100644 --- a/services/filestore/app/coffee/ImageOptimiser.js +++ b/services/filestore/app/coffee/ImageOptimiser.js @@ -1,25 +1,39 @@ -exec = require('child_process').exec -logger = require("logger-sharelatex") -Settings = require "settings-sharelatex" +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const { + exec +} = require('child_process'); +const logger = require("logger-sharelatex"); +const Settings = require("settings-sharelatex"); -module.exports = +module.exports = { - compressPng: (localPath, callback)-> - startTime = new Date() - logger.log localPath:localPath, "optimising png path" - args = "optipng #{localPath}" - opts = - timeout: 30 * 1000 + compressPng(localPath, callback){ + const startTime = new Date(); + logger.log({localPath}, "optimising png path"); + const args = `optipng ${localPath}`; + const opts = { + timeout: 30 * 1000, killSignal: "SIGKILL" - if !Settings.enableConversions - error = new Error("Image conversions are disabled") - return callback(error) - exec args, opts,(err, stdout, stderr)-> - if err? and err.signal == 'SIGKILL' - logger.warn {err: err, stderr: stderr, localPath: localPath}, "optimiser timeout reached" - err = null - else if err? - logger.err err:err, stderr:stderr, localPath:localPath, "something went wrong converting compressPng" - else - logger.log localPath:localPath, "finished compressPng file" - callback(err) + }; + if (!Settings.enableConversions) { + const error = new Error("Image conversions are disabled"); + return callback(error); + } + return exec(args, opts,function(err, stdout, stderr){ + if ((err != null) && (err.signal === 'SIGKILL')) { + logger.warn({err, stderr, localPath}, "optimiser timeout reached"); + err = null; + } else if (err != null) { + logger.err({err, stderr, localPath}, "something went wrong converting compressPng"); + } else { + logger.log({localPath}, "finished compressPng file"); + } + return callback(err); + }); + } +}; diff --git a/services/filestore/app/coffee/KeyBuilder.js b/services/filestore/app/coffee/KeyBuilder.js index 18f1d96952..f5c8b7d5da 100644 --- a/services/filestore/app/coffee/KeyBuilder.js +++ b/services/filestore/app/coffee/KeyBuilder.js @@ -1,50 +1,68 @@ -settings = require("settings-sharelatex") +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const settings = require("settings-sharelatex"); -module.exports = +module.exports = { - getConvertedFolderKey: (key)-> - key = "#{key}-converted-cache/" + getConvertedFolderKey(key){ + return key = `${key}-converted-cache/`; + }, - addCachingToKey: (key, opts)-> - key = @getConvertedFolderKey(key) - if opts.format? and !opts.style? - key = "#{key}format-#{opts.format}" - if opts.style? and !opts.format? - key = "#{key}style-#{opts.style}" - if opts.style? and opts.format? - key = "#{key}format-#{opts.format}-style-#{opts.style}" - return key + addCachingToKey(key, opts){ + key = this.getConvertedFolderKey(key); + if ((opts.format != null) && (opts.style == null)) { + key = `${key}format-${opts.format}`; + } + if ((opts.style != null) && (opts.format == null)) { + key = `${key}style-${opts.style}`; + } + if ((opts.style != null) && (opts.format != null)) { + key = `${key}format-${opts.format}-style-${opts.style}`; + } + return key; + }, - userFileKey: (req, res, next)-> - {project_id, file_id} = req.params - req.key = "#{project_id}/#{file_id}" - req.bucket = settings.filestore.stores.user_files - next() + userFileKey(req, res, next){ + const {project_id, file_id} = req.params; + req.key = `${project_id}/${file_id}`; + req.bucket = settings.filestore.stores.user_files; + return next(); + }, - publicFileKey: (req, res, next)-> - {project_id, public_file_id} = req.params - if not settings.filestore.stores.public_files? - res.status(501).send("public files not available") - else - req.key = "#{project_id}/#{public_file_id}" - req.bucket = settings.filestore.stores.public_files - next() + publicFileKey(req, res, next){ + const {project_id, public_file_id} = req.params; + if ((settings.filestore.stores.public_files == null)) { + return res.status(501).send("public files not available"); + } else { + req.key = `${project_id}/${public_file_id}`; + req.bucket = settings.filestore.stores.public_files; + return next(); + } + }, - templateFileKey: (req, res, next)-> - {template_id, format, version, sub_type} = req.params - req.key = "#{template_id}/v/#{version}/#{format}" - if sub_type? - req.key = "#{req.key}/#{sub_type}" - req.bucket = settings.filestore.stores.template_files - req.version = version - opts = req.query - next() + templateFileKey(req, res, next){ + const {template_id, format, version, sub_type} = req.params; + req.key = `${template_id}/v/${version}/${format}`; + if (sub_type != null) { + req.key = `${req.key}/${sub_type}`; + } + req.bucket = settings.filestore.stores.template_files; + req.version = version; + const opts = req.query; + return next(); + }, - publicProjectKey: (req, res, next)-> - {project_id} = req.params - req.project_id = project_id - req.bucket = settings.filestore.stores.user_files - next() + publicProjectKey(req, res, next){ + const {project_id} = req.params; + req.project_id = project_id; + req.bucket = settings.filestore.stores.user_files; + return next(); + } +}; diff --git a/services/filestore/app/coffee/LocalFileWriter.js b/services/filestore/app/coffee/LocalFileWriter.js index 7046da15e7..a6457de526 100644 --- a/services/filestore/app/coffee/LocalFileWriter.js +++ b/services/filestore/app/coffee/LocalFileWriter.js @@ -1,56 +1,76 @@ -fs = require("fs") -uuid = require('node-uuid') -path = require("path") -_ = require("underscore") -logger = require("logger-sharelatex") -metrics = require("metrics-sharelatex") -Settings = require("settings-sharelatex") -Errors = require "./Errors" +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const fs = require("fs"); +const uuid = require('node-uuid'); +const path = require("path"); +const _ = require("underscore"); +const logger = require("logger-sharelatex"); +const metrics = require("metrics-sharelatex"); +const Settings = require("settings-sharelatex"); +const Errors = require("./Errors"); -module.exports = +module.exports = { - writeStream: (stream, key, callback)-> - timer = new metrics.Timer("writingFile") - callback = _.once callback - fsPath = @_getPath(key) - logger.log fsPath:fsPath, "writing file locally" - writeStream = fs.createWriteStream(fsPath) - writeStream.on "finish", -> - timer.done() - logger.log fsPath:fsPath, "finished writing file locally" - callback(null, fsPath) - writeStream.on "error", (err)-> - logger.err err:err, fsPath:fsPath, "problem writing file locally, with write stream" - callback err - stream.on "error", (err)-> - logger.log err:err, fsPath:fsPath, "problem writing file locally, with read stream" - callback err - stream.pipe writeStream + writeStream(stream, key, callback){ + const timer = new metrics.Timer("writingFile"); + callback = _.once(callback); + const fsPath = this._getPath(key); + logger.log({fsPath}, "writing file locally"); + const writeStream = fs.createWriteStream(fsPath); + writeStream.on("finish", function() { + timer.done(); + logger.log({fsPath}, "finished writing file locally"); + return callback(null, fsPath); + }); + writeStream.on("error", function(err){ + logger.err({err, fsPath}, "problem writing file locally, with write stream"); + return callback(err); + }); + stream.on("error", function(err){ + logger.log({err, fsPath}, "problem writing file locally, with read stream"); + return callback(err); + }); + return stream.pipe(writeStream); + }, - getStream: (fsPath, _callback = (err, res)->) -> - callback = _.once _callback - timer = new metrics.Timer("readingFile") - logger.log fsPath:fsPath, "reading file locally" - readStream = fs.createReadStream(fsPath) - readStream.on "end", -> - timer.done() - logger.log fsPath:fsPath, "finished reading file locally" - readStream.on "error", (err)-> - logger.err err:err, fsPath:fsPath, "problem reading file locally, with read stream" - if err.code == 'ENOENT' - callback new Errors.NotFoundError(err.message), null - else - callback err - callback null, readStream + getStream(fsPath, _callback) { + if (_callback == null) { _callback = function(err, res){}; } + const callback = _.once(_callback); + const timer = new metrics.Timer("readingFile"); + logger.log({fsPath}, "reading file locally"); + const readStream = fs.createReadStream(fsPath); + readStream.on("end", function() { + timer.done(); + return logger.log({fsPath}, "finished reading file locally"); + }); + readStream.on("error", function(err){ + logger.err({err, fsPath}, "problem reading file locally, with read stream"); + if (err.code === 'ENOENT') { + return callback(new Errors.NotFoundError(err.message), null); + } else { + return callback(err); + } + }); + return callback(null, readStream); + }, - deleteFile: (fsPath, callback)-> - if !fsPath? or fsPath == "" - return callback() - logger.log fsPath:fsPath, "removing local temp file" - fs.unlink fsPath, callback + deleteFile(fsPath, callback){ + if ((fsPath == null) || (fsPath === "")) { + return callback(); + } + logger.log({fsPath}, "removing local temp file"); + return fs.unlink(fsPath, callback); + }, - _getPath : (key)-> - if !key? - key = uuid.v1() - key = key.replace(/\//g,"-") - path.join(Settings.path.uploadFolder, key) + _getPath(key){ + if ((key == null)) { + key = uuid.v1(); + } + key = key.replace(/\//g,"-"); + return path.join(Settings.path.uploadFolder, key); + } +}; diff --git a/services/filestore/app/coffee/PersistorManager.js b/services/filestore/app/coffee/PersistorManager.js index 85c9dd83cd..993a874d6a 100644 --- a/services/filestore/app/coffee/PersistorManager.js +++ b/services/filestore/app/coffee/PersistorManager.js @@ -1,16 +1,28 @@ -settings = require("settings-sharelatex") -logger = require("logger-sharelatex") +/* + * decaffeinate suggestions: + * DS103: Rewrite code to no longer use __guard__ + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const settings = require("settings-sharelatex"); +const logger = require("logger-sharelatex"); -# assume s3 if none specified -settings?.filestore?.backend ||= "s3" +// assume s3 if none specified +__guard__(settings != null ? settings.filestore : undefined, x => x.backend || (settings.filestore.backend = "s3")); -logger.log backend:settings?.filestore?.backend, "Loading backend" -module.exports = switch settings?.filestore?.backend - when "aws-sdk" - require "./AWSSDKPersistorManager" - when "s3" - require("./S3PersistorManager") - when "fs" - require("./FSPersistorManager") - else - throw new Error( "Unknown filestore backend: #{settings.filestore.backend}" ) +logger.log({backend:__guard__(settings != null ? settings.filestore : undefined, x1 => x1.backend)}, "Loading backend"); +module.exports = (() => { switch (__guard__(settings != null ? settings.filestore : undefined, x2 => x2.backend)) { + case "aws-sdk": + return require("./AWSSDKPersistorManager"); + case "s3": + return require("./S3PersistorManager"); + case "fs": + return require("./FSPersistorManager"); + default: + throw new Error( `Unknown filestore backend: ${settings.filestore.backend}` ); +} })(); + +function __guard__(value, transform) { + return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; +} \ No newline at end of file diff --git a/services/filestore/app/coffee/S3PersistorManager.js b/services/filestore/app/coffee/S3PersistorManager.js index 89522a4643..a8edbc6844 100644 --- a/services/filestore/app/coffee/S3PersistorManager.js +++ b/services/filestore/app/coffee/S3PersistorManager.js @@ -1,238 +1,290 @@ -# This module is the one which is used in production. It needs to be migrated -# to use aws-sdk throughout, see the comments in AWSSDKPersistorManager for -# details. The knox library is unmaintained and has bugs. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// This module is the one which is used in production. It needs to be migrated +// to use aws-sdk throughout, see the comments in AWSSDKPersistorManager for +// details. The knox library is unmaintained and has bugs. -http = require('http') -http.globalAgent.maxSockets = 300 -https = require('https') -https.globalAgent.maxSockets = 300 -settings = require("settings-sharelatex") -request = require("request") -logger = require("logger-sharelatex") -metrics = require("metrics-sharelatex") -fs = require("fs") -knox = require("knox") -path = require("path") -LocalFileWriter = require("./LocalFileWriter") -Errors = require("./Errors") -_ = require("underscore") -awsS3 = require "aws-sdk/clients/s3" -URL = require('url') +const http = require('http'); +http.globalAgent.maxSockets = 300; +const https = require('https'); +https.globalAgent.maxSockets = 300; +const settings = require("settings-sharelatex"); +const request = require("request"); +const logger = require("logger-sharelatex"); +const metrics = require("metrics-sharelatex"); +const fs = require("fs"); +const knox = require("knox"); +const path = require("path"); +const LocalFileWriter = require("./LocalFileWriter"); +const Errors = require("./Errors"); +const _ = require("underscore"); +const awsS3 = require("aws-sdk/clients/s3"); +const URL = require('url'); -thirtySeconds = 30 * 1000 +const thirtySeconds = 30 * 1000; -buildDefaultOptions = (bucketName, method, key)-> - if settings.filestore.s3.endpoint - endpoint = "#{settings.filestore.s3.endpoint}/#{bucketName}" - else - endpoint = "https://#{bucketName}.s3.amazonaws.com" +const buildDefaultOptions = function(bucketName, method, key){ + let endpoint; + if (settings.filestore.s3.endpoint) { + endpoint = `${settings.filestore.s3.endpoint}/${bucketName}`; + } else { + endpoint = `https://${bucketName}.s3.amazonaws.com`; + } return { - aws: - key: settings.filestore.s3.key - secret: settings.filestore.s3.secret + aws: { + key: settings.filestore.s3.key, + secret: settings.filestore.s3.secret, bucket: bucketName - method: method - timeout: thirtySeconds - uri:"#{endpoint}/#{key}" + }, + method, + timeout: thirtySeconds, + uri:`${endpoint}/${key}` + }; +}; + +const getS3Options = function(credentials) { + const options = { + credentials: { + accessKeyId: credentials.auth_key, + secretAccessKey: credentials.auth_secret + } + }; + + if (settings.filestore.s3.endpoint) { + const endpoint = URL.parse(settings.filestore.s3.endpoint); + options.endpoint = settings.filestore.s3.endpoint; + options.sslEnabled = endpoint.protocol === 'https'; } -getS3Options = (credentials) -> - options = - credentials: - accessKeyId: credentials.auth_key - secretAccessKey: credentials.auth_secret + return options; +}; - if settings.filestore.s3.endpoint - endpoint = URL.parse(settings.filestore.s3.endpoint) - options.endpoint = settings.filestore.s3.endpoint - options.sslEnabled = endpoint.protocol == 'https' - - return options - -defaultS3Client = new awsS3(getS3Options({ +const defaultS3Client = new awsS3(getS3Options({ auth_key: settings.filestore.s3.key, auth_secret: settings.filestore.s3.secret -})) +})); -getS3Client = (credentials) -> - if credentials? - return new awsS3(getS3Options(credentials)) - else - return defaultS3Client +const getS3Client = function(credentials) { + if (credentials != null) { + return new awsS3(getS3Options(credentials)); + } else { + return defaultS3Client; + } +}; -getKnoxClient = (bucketName) => - options = - key: settings.filestore.s3.key - secret: settings.filestore.s3.secret +const getKnoxClient = bucketName => { + const options = { + key: settings.filestore.s3.key, + secret: settings.filestore.s3.secret, bucket: bucketName - if settings.filestore.s3.endpoint - endpoint = URL.parse(settings.filestore.s3.endpoint) - options.endpoint = endpoint.hostname - options.port = endpoint.port - return knox.createClient(options) + }; + if (settings.filestore.s3.endpoint) { + const endpoint = URL.parse(settings.filestore.s3.endpoint); + options.endpoint = endpoint.hostname; + options.port = endpoint.port; + } + return knox.createClient(options); +}; -module.exports = +module.exports = { - sendFile: (bucketName, key, fsPath, callback)-> - s3Client = getKnoxClient(bucketName) - uploaded = 0 - putEventEmiter = s3Client.putFile fsPath, key, (err, res)-> - metrics.count 's3.egress', uploaded - if err? - logger.err err:err, bucketName:bucketName, key:key, fsPath:fsPath,"something went wrong uploading file to s3" - return callback(err) - if !res? - logger.err err:err, res:res, bucketName:bucketName, key:key, fsPath:fsPath, "no response from s3 put file" - return callback("no response from put file") - if res.statusCode != 200 - logger.err bucketName:bucketName, key:key, fsPath:fsPath, "non 200 response from s3 putting file" - return callback("non 200 response from s3 on put file") - logger.log res:res, bucketName:bucketName, key:key, fsPath:fsPath,"file uploaded to s3" - callback(err) - putEventEmiter.on "error", (err)-> - logger.err err:err, bucketName:bucketName, key:key, fsPath:fsPath, "error emmited on put of file" - callback err - putEventEmiter.on "progress", (progress)-> - uploaded = progress.written + sendFile(bucketName, key, fsPath, callback){ + const s3Client = getKnoxClient(bucketName); + let uploaded = 0; + const putEventEmiter = s3Client.putFile(fsPath, key, function(err, res){ + metrics.count('s3.egress', uploaded); + if (err != null) { + logger.err({err, bucketName, key, fsPath},"something went wrong uploading file to s3"); + return callback(err); + } + if ((res == null)) { + logger.err({err, res, bucketName, key, fsPath}, "no response from s3 put file"); + return callback("no response from put file"); + } + if (res.statusCode !== 200) { + logger.err({bucketName, key, fsPath}, "non 200 response from s3 putting file"); + return callback("non 200 response from s3 on put file"); + } + logger.log({res, bucketName, key, fsPath},"file uploaded to s3"); + return callback(err); + }); + putEventEmiter.on("error", function(err){ + logger.err({err, bucketName, key, fsPath}, "error emmited on put of file"); + return callback(err); + }); + return putEventEmiter.on("progress", progress => uploaded = progress.written); + }, - sendStream: (bucketName, key, readStream, callback)-> - logger.log bucketName:bucketName, key:key, "sending file to s3" - readStream.on "error", (err)-> - logger.err bucketName:bucketName, key:key, "error on stream to send to s3" - LocalFileWriter.writeStream readStream, null, (err, fsPath)=> - if err? - logger.err bucketName:bucketName, key:key, fsPath:fsPath, err:err, "something went wrong writing stream to disk" - return callback(err) - @sendFile bucketName, key, fsPath, (err) -> - # delete the temporary file created above and return the original error - LocalFileWriter.deleteFile fsPath, () -> - callback(err) + sendStream(bucketName, key, readStream, callback){ + logger.log({bucketName, key}, "sending file to s3"); + readStream.on("error", err => logger.err({bucketName, key}, "error on stream to send to s3")); + return LocalFileWriter.writeStream(readStream, null, (err, fsPath)=> { + if (err != null) { + logger.err({bucketName, key, fsPath, err}, "something went wrong writing stream to disk"); + return callback(err); + } + return this.sendFile(bucketName, key, fsPath, err => // delete the temporary file created above and return the original error + LocalFileWriter.deleteFile(fsPath, () => callback(err))); + }); + }, - # opts may be {start: Number, end: Number} - getFileStream: (bucketName, key, opts, callback = (err, res)->)-> - opts = opts || {} - callback = _.once(callback) - logger.log bucketName:bucketName, key:key, "getting file from s3" + // opts may be {start: Number, end: Number} + getFileStream(bucketName, key, opts, callback){ + if (callback == null) { callback = function(err, res){}; } + opts = opts || {}; + callback = _.once(callback); + logger.log({bucketName, key}, "getting file from s3"); - s3 = getS3Client(opts.credentials) - s3Params = { - Bucket: bucketName + const s3 = getS3Client(opts.credentials); + const s3Params = { + Bucket: bucketName, Key: key + }; + if ((opts.start != null) && (opts.end != null)) { + s3Params['Range'] = `bytes=${opts.start}-${opts.end}`; } - if opts.start? and opts.end? - s3Params['Range'] = "bytes=#{opts.start}-#{opts.end}" - s3Request = s3.getObject(s3Params) + const s3Request = s3.getObject(s3Params); - s3Request.on 'httpHeaders', (statusCode, headers, response, statusMessage) => - if statusCode in [403, 404] - # S3 returns a 403 instead of a 404 when the user doesn't have - # permission to list the bucket contents. - logger.log({ bucketName: bucketName, key: key }, "file not found in s3") - return callback(new Errors.NotFoundError("File not found in S3: #{bucketName}:#{key}"), null) - if statusCode not in [200, 206] - logger.log({bucketName: bucketName, key: key }, "error getting file from s3: #{statusCode}") - return callback(new Error("Got non-200 response from S3: #{statusCode} #{statusMessage}"), null) - stream = response.httpResponse.createUnbufferedStream() - stream.on 'data', (data) -> - metrics.count 's3.ingress', data.byteLength + s3Request.on('httpHeaders', (statusCode, headers, response, statusMessage) => { + if ([403, 404].includes(statusCode)) { + // S3 returns a 403 instead of a 404 when the user doesn't have + // permission to list the bucket contents. + logger.log({ bucketName, key }, "file not found in s3"); + return callback(new Errors.NotFoundError(`File not found in S3: ${bucketName}:${key}`), null); + } + if (![200, 206].includes(statusCode)) { + logger.log({bucketName, key }, `error getting file from s3: ${statusCode}`); + return callback(new Error(`Got non-200 response from S3: ${statusCode} ${statusMessage}`), null); + } + const stream = response.httpResponse.createUnbufferedStream(); + stream.on('data', data => metrics.count('s3.ingress', data.byteLength)); - callback(null, stream) + return callback(null, stream); + }); - s3Request.on 'error', (err) => - logger.err({ err: err, bucketName: bucketName, key: key }, "error getting file stream from s3") - callback(err) + s3Request.on('error', err => { + logger.err({ err, bucketName, key }, "error getting file stream from s3"); + return callback(err); + }); - s3Request.send() + return s3Request.send(); + }, - getFileSize: (bucketName, key, callback) -> - logger.log({ bucketName: bucketName, key: key }, "getting file size from S3") - s3 = getS3Client() - s3.headObject { Bucket: bucketName, Key: key }, (err, data) -> - if err? - if err.statusCode in [403, 404] - # S3 returns a 403 instead of a 404 when the user doesn't have - # permission to list the bucket contents. + getFileSize(bucketName, key, callback) { + logger.log({ bucketName, key }, "getting file size from S3"); + const s3 = getS3Client(); + return s3.headObject({ Bucket: bucketName, Key: key }, function(err, data) { + if (err != null) { + if ([403, 404].includes(err.statusCode)) { + // S3 returns a 403 instead of a 404 when the user doesn't have + // permission to list the bucket contents. logger.log({ - bucketName: bucketName, - key: key - }, "file not found in s3") + bucketName, + key + }, "file not found in s3"); callback( - new Errors.NotFoundError("File not found in S3: #{bucketName}:#{key}") - ) - else + new Errors.NotFoundError(`File not found in S3: ${bucketName}:${key}`) + ); + } else { logger.err({ - bucketName: bucketName, - key: key, - err: err - }, "error performing S3 HeadObject") - callback(err) - return - callback(null, data.ContentLength) + bucketName, + key, + err + }, "error performing S3 HeadObject"); + callback(err); + } + return; + } + return callback(null, data.ContentLength); + }); + }, - copyFile: (bucketName, sourceKey, destKey, callback)-> - logger.log bucketName:bucketName, sourceKey:sourceKey, destKey: destKey, "copying file in s3" - source = bucketName + '/' + sourceKey - # use the AWS SDK instead of knox due to problems with error handling (https://github.com/Automattic/knox/issues/114) - s3 = getS3Client() - s3.copyObject {Bucket: bucketName, Key: destKey, CopySource: source}, (err) -> - if err? - if err.code is 'NoSuchKey' - logger.err bucketName:bucketName, sourceKey:sourceKey, "original file not found in s3 when copying" - callback(new Errors.NotFoundError("original file not found in S3 when copying")) - else - logger.err err:err, bucketName:bucketName, sourceKey:sourceKey, destKey:destKey, "something went wrong copying file in aws" - callback(err) - else - callback() + copyFile(bucketName, sourceKey, destKey, callback){ + logger.log({bucketName, sourceKey, destKey}, "copying file in s3"); + const source = bucketName + '/' + sourceKey; + // use the AWS SDK instead of knox due to problems with error handling (https://github.com/Automattic/knox/issues/114) + const s3 = getS3Client(); + return s3.copyObject({Bucket: bucketName, Key: destKey, CopySource: source}, function(err) { + if (err != null) { + if (err.code === 'NoSuchKey') { + logger.err({bucketName, sourceKey}, "original file not found in s3 when copying"); + return callback(new Errors.NotFoundError("original file not found in S3 when copying")); + } else { + logger.err({err, bucketName, sourceKey, destKey}, "something went wrong copying file in aws"); + return callback(err); + } + } else { + return callback(); + } + }); + }, - deleteFile: (bucketName, key, callback)-> - logger.log bucketName:bucketName, key:key, "delete file in s3" - options = buildDefaultOptions(bucketName, "delete", key) - request options, (err, res)-> - if err? - logger.err err:err, res:res, bucketName:bucketName, key:key, "something went wrong deleting file in aws" - callback(err) + deleteFile(bucketName, key, callback){ + logger.log({bucketName, key}, "delete file in s3"); + const options = buildDefaultOptions(bucketName, "delete", key); + return request(options, function(err, res){ + if (err != null) { + logger.err({err, res, bucketName, key}, "something went wrong deleting file in aws"); + } + return callback(err); + }); + }, - deleteDirectory: (bucketName, key, _callback)-> - # deleteMultiple can call the callback multiple times so protect against this. - callback = (args...) -> - _callback(args...) - _callback = () -> + deleteDirectory(bucketName, key, _callback){ + // deleteMultiple can call the callback multiple times so protect against this. + const callback = function(...args) { + _callback(...Array.from(args || [])); + return _callback = function() {}; + }; - logger.log key: key, bucketName: bucketName, "deleting directory" - s3Client = getKnoxClient(bucketName) - s3Client.list prefix:key, (err, data)-> - if err? - logger.err err:err, bucketName:bucketName, key:key, "something went wrong listing prefix in aws" - return callback(err) - keys = _.map data.Contents, (entry)-> - return entry.Key - s3Client.deleteMultiple keys, callback + logger.log({key, bucketName}, "deleting directory"); + const s3Client = getKnoxClient(bucketName); + return s3Client.list({prefix:key}, function(err, data){ + if (err != null) { + logger.err({err, bucketName, key}, "something went wrong listing prefix in aws"); + return callback(err); + } + const keys = _.map(data.Contents, entry => entry.Key); + return s3Client.deleteMultiple(keys, callback); + }); + }, - checkIfFileExists:(bucketName, key, callback)-> - logger.log bucketName:bucketName, key:key, "checking if file exists in s3" - options = buildDefaultOptions(bucketName, "head", key) - request options, (err, res)-> - if err? - logger.err err:err, res:res, bucketName:bucketName, key:key, "something went wrong checking file in aws" - return callback(err) - if !res? - logger.err err:err, res:res, bucketName:bucketName, key:key, "no response object returned when checking if file exists" - err = new Error("no response from s3 #{bucketName} #{key}") - return callback(err) - exists = res.statusCode == 200 - logger.log bucketName:bucketName, key:key, exists:exists, "checked if file exsists in s3" - callback(err, exists) + checkIfFileExists(bucketName, key, callback){ + logger.log({bucketName, key}, "checking if file exists in s3"); + const options = buildDefaultOptions(bucketName, "head", key); + return request(options, function(err, res){ + if (err != null) { + logger.err({err, res, bucketName, key}, "something went wrong checking file in aws"); + return callback(err); + } + if ((res == null)) { + logger.err({err, res, bucketName, key}, "no response object returned when checking if file exists"); + err = new Error(`no response from s3 ${bucketName} ${key}`); + return callback(err); + } + const exists = res.statusCode === 200; + logger.log({bucketName, key, exists}, "checked if file exsists in s3"); + return callback(err, exists); + }); + }, - directorySize:(bucketName, key, callback)-> - logger.log bucketName:bucketName, key:key, "get project size in s3" - s3Client = getKnoxClient(bucketName) - s3Client.list prefix:key, (err, data)-> - if err? - logger.err err:err, bucketName:bucketName, key:key, "something went wrong listing prefix in aws" - return callback(err) - totalSize = 0 - _.each data.Contents, (entry)-> - totalSize += entry.Size - logger.log totalSize:totalSize, "total size" - callback null, totalSize + directorySize(bucketName, key, callback){ + logger.log({bucketName, key}, "get project size in s3"); + const s3Client = getKnoxClient(bucketName); + return s3Client.list({prefix:key}, function(err, data){ + if (err != null) { + logger.err({err, bucketName, key}, "something went wrong listing prefix in aws"); + return callback(err); + } + let totalSize = 0; + _.each(data.Contents, entry => totalSize += entry.Size); + logger.log({totalSize}, "total size"); + return callback(null, totalSize); + }); + } +}; diff --git a/services/filestore/app/coffee/SafeExec.js b/services/filestore/app/coffee/SafeExec.js index 3559d35c95..b64d5376d5 100644 --- a/services/filestore/app/coffee/SafeExec.js +++ b/services/filestore/app/coffee/SafeExec.js @@ -1,48 +1,60 @@ -_ = require("underscore") -logger = require("logger-sharelatex") -child_process = require('child_process') -Settings = require "settings-sharelatex" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const _ = require("underscore"); +const logger = require("logger-sharelatex"); +const child_process = require('child_process'); +const Settings = require("settings-sharelatex"); -# execute a command in the same way as 'exec' but with a timeout that -# kills all child processes -# -# we spawn the command with 'detached:true' to make a new process -# group, then we can kill everything in that process group. +// execute a command in the same way as 'exec' but with a timeout that +// kills all child processes +// +// we spawn the command with 'detached:true' to make a new process +// group, then we can kill everything in that process group. -module.exports = (command, options, callback = (err, stdout, stderr) ->) -> - if !Settings.enableConversions - error = new Error("Image conversions are disabled") - return callback(error) +module.exports = function(command, options, callback) { + if (callback == null) { callback = function(err, stdout, stderr) {}; } + if (!Settings.enableConversions) { + const error = new Error("Image conversions are disabled"); + return callback(error); + } - # options are {timeout: number-of-milliseconds, killSignal: signal-name} - [cmd, args...] = command + // options are {timeout: number-of-milliseconds, killSignal: signal-name} + const [cmd, ...args] = Array.from(command); - child = child_process.spawn cmd, args, {detached:true} - stdout = "" - stderr = "" + const child = child_process.spawn(cmd, args, {detached:true}); + let stdout = ""; + let stderr = ""; - cleanup = _.once (err) -> - clearTimeout killTimer if killTimer? - callback err, stdout, stderr + const cleanup = _.once(function(err) { + if (killTimer != null) { clearTimeout(killTimer); } + return callback(err, stdout, stderr); + }); - if options.timeout? - killTimer = setTimeout () -> - try - # use negative process id to kill process group - process.kill -child.pid, options.killSignal || "SIGTERM" - catch error - logger.log process: child.pid, kill_error: error, "error killing process" - , options.timeout + if (options.timeout != null) { + var killTimer = setTimeout(function() { + try { + // use negative process id to kill process group + return process.kill(-child.pid, options.killSignal || "SIGTERM"); + } catch (error) { + return logger.log({process: child.pid, kill_error: error}, "error killing process"); + } + } + , options.timeout); + } - child.on 'close', (code, signal) -> - err = if code then new Error("exit status #{code}") else signal - cleanup err + child.on('close', function(code, signal) { + const err = code ? new Error(`exit status ${code}`) : signal; + return cleanup(err); + }); - child.on 'error', (err) -> - cleanup err + child.on('error', err => cleanup(err)); - child.stdout.on 'data', (chunk) -> - stdout += chunk + child.stdout.on('data', chunk => stdout += chunk); - child.stderr.on 'data', (chunk) -> - stderr += chunk + return child.stderr.on('data', chunk => stderr += chunk); +}; From fcbb13b5bfeed7b5953c896414d5b3293275db86 Mon Sep 17 00:00:00 2001 From: decaffeinate Date: Mon, 16 Dec 2019 10:24:37 +0000 Subject: [PATCH 327/555] decaffeinate: Run post-processing cleanups on AWSSDKPersistorManager.coffee and 13 other files --- .../filestore/app/coffee/AWSSDKPersistorManager.js | 8 +++++++- services/filestore/app/coffee/BucketController.js | 5 +++++ services/filestore/app/coffee/Errors.js | 6 ++++++ services/filestore/app/coffee/FSPersistorManager.js | 7 +++++++ services/filestore/app/coffee/FileController.js | 6 ++++++ services/filestore/app/coffee/FileConverter.js | 5 +++++ services/filestore/app/coffee/FileHandler.js | 7 +++++++ .../filestore/app/coffee/HealthCheckController.js | 2 ++ services/filestore/app/coffee/ImageOptimiser.js | 5 +++++ services/filestore/app/coffee/KeyBuilder.js | 7 +++++++ services/filestore/app/coffee/LocalFileWriter.js | 5 +++++ services/filestore/app/coffee/PersistorManager.js | 2 ++ services/filestore/app/coffee/S3PersistorManager.js | 12 +++++++++++- services/filestore/app/coffee/SafeExec.js | 7 +++++++ 14 files changed, 82 insertions(+), 2 deletions(-) diff --git a/services/filestore/app/coffee/AWSSDKPersistorManager.js b/services/filestore/app/coffee/AWSSDKPersistorManager.js index c263936073..8220af4871 100644 --- a/services/filestore/app/coffee/AWSSDKPersistorManager.js +++ b/services/filestore/app/coffee/AWSSDKPersistorManager.js @@ -1,3 +1,9 @@ +/* eslint-disable + handle-callback-err, + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns @@ -48,7 +54,7 @@ module.exports = { Key: key }; if ((opts.start != null) && (opts.end != null)) { - params['Range'] = `bytes=${opts.start}-${opts.end}`; + params.Range = `bytes=${opts.start}-${opts.end}`; } const request = s3.getObject(params); const stream = request.createReadStream(); diff --git a/services/filestore/app/coffee/BucketController.js b/services/filestore/app/coffee/BucketController.js index ecd4f9a54f..d2c4c06fae 100644 --- a/services/filestore/app/coffee/BucketController.js +++ b/services/filestore/app/coffee/BucketController.js @@ -1,3 +1,8 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/services/filestore/app/coffee/Errors.js b/services/filestore/app/coffee/Errors.js index c4f8f7004c..4a9f65bb4b 100644 --- a/services/filestore/app/coffee/Errors.js +++ b/services/filestore/app/coffee/Errors.js @@ -1,3 +1,9 @@ +/* eslint-disable + no-proto, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. let Errors; var NotFoundError = function(message) { const error = new Error(message); diff --git a/services/filestore/app/coffee/FSPersistorManager.js b/services/filestore/app/coffee/FSPersistorManager.js index c4792e6215..c90d4b6dfe 100644 --- a/services/filestore/app/coffee/FSPersistorManager.js +++ b/services/filestore/app/coffee/FSPersistorManager.js @@ -1,3 +1,10 @@ +/* eslint-disable + handle-callback-err, + no-unreachable, + node/no-deprecated-api, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/services/filestore/app/coffee/FileController.js b/services/filestore/app/coffee/FileController.js index f40d4a0317..51251ba010 100644 --- a/services/filestore/app/coffee/FileController.js +++ b/services/filestore/app/coffee/FileController.js @@ -1,3 +1,9 @@ +/* eslint-disable + camelcase, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/services/filestore/app/coffee/FileConverter.js b/services/filestore/app/coffee/FileConverter.js index b35265c22b..e241cd8275 100644 --- a/services/filestore/app/coffee/FileConverter.js +++ b/services/filestore/app/coffee/FileConverter.js @@ -1,3 +1,8 @@ +/* eslint-disable + camelcase, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/services/filestore/app/coffee/FileHandler.js b/services/filestore/app/coffee/FileHandler.js index 548e206a12..c22a276b65 100644 --- a/services/filestore/app/coffee/FileHandler.js +++ b/services/filestore/app/coffee/FileHandler.js @@ -1,3 +1,10 @@ +/* eslint-disable + camelcase, + no-self-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/services/filestore/app/coffee/HealthCheckController.js b/services/filestore/app/coffee/HealthCheckController.js index 4f49f4a19a..e360891609 100644 --- a/services/filestore/app/coffee/HealthCheckController.js +++ b/services/filestore/app/coffee/HealthCheckController.js @@ -1,3 +1,5 @@ +// TODO: This file was created by bulk-decaffeinate. +// Sanity-check the conversion and remove this comment. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/services/filestore/app/coffee/ImageOptimiser.js b/services/filestore/app/coffee/ImageOptimiser.js index 570730618d..0126786167 100644 --- a/services/filestore/app/coffee/ImageOptimiser.js +++ b/services/filestore/app/coffee/ImageOptimiser.js @@ -1,3 +1,8 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/services/filestore/app/coffee/KeyBuilder.js b/services/filestore/app/coffee/KeyBuilder.js index f5c8b7d5da..5d3a36e6b1 100644 --- a/services/filestore/app/coffee/KeyBuilder.js +++ b/services/filestore/app/coffee/KeyBuilder.js @@ -1,3 +1,10 @@ +/* eslint-disable + camelcase, + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/services/filestore/app/coffee/LocalFileWriter.js b/services/filestore/app/coffee/LocalFileWriter.js index a6457de526..b0bfedcc04 100644 --- a/services/filestore/app/coffee/LocalFileWriter.js +++ b/services/filestore/app/coffee/LocalFileWriter.js @@ -1,3 +1,8 @@ +/* eslint-disable + handle-callback-err, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/services/filestore/app/coffee/PersistorManager.js b/services/filestore/app/coffee/PersistorManager.js index 993a874d6a..95a4122591 100644 --- a/services/filestore/app/coffee/PersistorManager.js +++ b/services/filestore/app/coffee/PersistorManager.js @@ -1,3 +1,5 @@ +// TODO: This file was created by bulk-decaffeinate. +// Sanity-check the conversion and remove this comment. /* * decaffeinate suggestions: * DS103: Rewrite code to no longer use __guard__ diff --git a/services/filestore/app/coffee/S3PersistorManager.js b/services/filestore/app/coffee/S3PersistorManager.js index a8edbc6844..0face2de53 100644 --- a/services/filestore/app/coffee/S3PersistorManager.js +++ b/services/filestore/app/coffee/S3PersistorManager.js @@ -1,3 +1,13 @@ +/* eslint-disable + handle-callback-err, + new-cap, + no-return-assign, + no-unused-vars, + node/no-deprecated-api, + standard/no-callback-literal, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -146,7 +156,7 @@ module.exports = { Key: key }; if ((opts.start != null) && (opts.end != null)) { - s3Params['Range'] = `bytes=${opts.start}-${opts.end}`; + s3Params.Range = `bytes=${opts.start}-${opts.end}`; } const s3Request = s3.getObject(s3Params); diff --git a/services/filestore/app/coffee/SafeExec.js b/services/filestore/app/coffee/SafeExec.js index b64d5376d5..4f13785460 100644 --- a/services/filestore/app/coffee/SafeExec.js +++ b/services/filestore/app/coffee/SafeExec.js @@ -1,3 +1,10 @@ +/* eslint-disable + camelcase, + handle-callback-err, + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from From 3718c90c7a5b8fa676fd00b047c5edbed930d911 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 16 Dec 2019 10:24:40 +0000 Subject: [PATCH 328/555] Rename app/coffee dir to app/js --- services/filestore/.gitignore | 17 ----------------- .../{coffee => js}/AWSSDKPersistorManager.js | 0 .../app/{coffee => js}/BucketController.js | 0 services/filestore/app/{coffee => js}/Errors.js | 0 .../app/{coffee => js}/FSPersistorManager.js | 0 .../app/{coffee => js}/FileController.js | 0 .../app/{coffee => js}/FileConverter.js | 0 .../filestore/app/{coffee => js}/FileHandler.js | 0 .../app/{coffee => js}/HealthCheckController.js | 0 .../app/{coffee => js}/ImageOptimiser.js | 0 .../filestore/app/{coffee => js}/KeyBuilder.js | 0 .../app/{coffee => js}/LocalFileWriter.js | 0 .../app/{coffee => js}/PersistorManager.js | 0 .../app/{coffee => js}/S3PersistorManager.js | 0 .../filestore/app/{coffee => js}/SafeExec.js | 0 15 files changed, 17 deletions(-) rename services/filestore/app/{coffee => js}/AWSSDKPersistorManager.js (100%) rename services/filestore/app/{coffee => js}/BucketController.js (100%) rename services/filestore/app/{coffee => js}/Errors.js (100%) rename services/filestore/app/{coffee => js}/FSPersistorManager.js (100%) rename services/filestore/app/{coffee => js}/FileController.js (100%) rename services/filestore/app/{coffee => js}/FileConverter.js (100%) rename services/filestore/app/{coffee => js}/FileHandler.js (100%) rename services/filestore/app/{coffee => js}/HealthCheckController.js (100%) rename services/filestore/app/{coffee => js}/ImageOptimiser.js (100%) rename services/filestore/app/{coffee => js}/KeyBuilder.js (100%) rename services/filestore/app/{coffee => js}/LocalFileWriter.js (100%) rename services/filestore/app/{coffee => js}/PersistorManager.js (100%) rename services/filestore/app/{coffee => js}/S3PersistorManager.js (100%) rename services/filestore/app/{coffee => js}/SafeExec.js (100%) diff --git a/services/filestore/.gitignore b/services/filestore/.gitignore index 5d1086933c..3c0b90c20d 100644 --- a/services/filestore/.gitignore +++ b/services/filestore/.gitignore @@ -36,28 +36,11 @@ Icon? Thumbs.db /node_modules/* -test/IntergrationTests/js/* data/*/* -app.js **/*.map -cluster.js -app/js/* -test/IntergrationTests/js/* -test/UnitTests/js/* cookies.txt uploads/* -public/js/editor.js -public/js/home.js -public/js/forms.js -public/js/gui.js -public/js/admin.js -public/stylesheets/mainStyle.css -public/minjs/ -test/unit/js/ -test/acceptence/js -cluster.js -test/acceptance/js/ user_files/* template_files/* diff --git a/services/filestore/app/coffee/AWSSDKPersistorManager.js b/services/filestore/app/js/AWSSDKPersistorManager.js similarity index 100% rename from services/filestore/app/coffee/AWSSDKPersistorManager.js rename to services/filestore/app/js/AWSSDKPersistorManager.js diff --git a/services/filestore/app/coffee/BucketController.js b/services/filestore/app/js/BucketController.js similarity index 100% rename from services/filestore/app/coffee/BucketController.js rename to services/filestore/app/js/BucketController.js diff --git a/services/filestore/app/coffee/Errors.js b/services/filestore/app/js/Errors.js similarity index 100% rename from services/filestore/app/coffee/Errors.js rename to services/filestore/app/js/Errors.js diff --git a/services/filestore/app/coffee/FSPersistorManager.js b/services/filestore/app/js/FSPersistorManager.js similarity index 100% rename from services/filestore/app/coffee/FSPersistorManager.js rename to services/filestore/app/js/FSPersistorManager.js diff --git a/services/filestore/app/coffee/FileController.js b/services/filestore/app/js/FileController.js similarity index 100% rename from services/filestore/app/coffee/FileController.js rename to services/filestore/app/js/FileController.js diff --git a/services/filestore/app/coffee/FileConverter.js b/services/filestore/app/js/FileConverter.js similarity index 100% rename from services/filestore/app/coffee/FileConverter.js rename to services/filestore/app/js/FileConverter.js diff --git a/services/filestore/app/coffee/FileHandler.js b/services/filestore/app/js/FileHandler.js similarity index 100% rename from services/filestore/app/coffee/FileHandler.js rename to services/filestore/app/js/FileHandler.js diff --git a/services/filestore/app/coffee/HealthCheckController.js b/services/filestore/app/js/HealthCheckController.js similarity index 100% rename from services/filestore/app/coffee/HealthCheckController.js rename to services/filestore/app/js/HealthCheckController.js diff --git a/services/filestore/app/coffee/ImageOptimiser.js b/services/filestore/app/js/ImageOptimiser.js similarity index 100% rename from services/filestore/app/coffee/ImageOptimiser.js rename to services/filestore/app/js/ImageOptimiser.js diff --git a/services/filestore/app/coffee/KeyBuilder.js b/services/filestore/app/js/KeyBuilder.js similarity index 100% rename from services/filestore/app/coffee/KeyBuilder.js rename to services/filestore/app/js/KeyBuilder.js diff --git a/services/filestore/app/coffee/LocalFileWriter.js b/services/filestore/app/js/LocalFileWriter.js similarity index 100% rename from services/filestore/app/coffee/LocalFileWriter.js rename to services/filestore/app/js/LocalFileWriter.js diff --git a/services/filestore/app/coffee/PersistorManager.js b/services/filestore/app/js/PersistorManager.js similarity index 100% rename from services/filestore/app/coffee/PersistorManager.js rename to services/filestore/app/js/PersistorManager.js diff --git a/services/filestore/app/coffee/S3PersistorManager.js b/services/filestore/app/js/S3PersistorManager.js similarity index 100% rename from services/filestore/app/coffee/S3PersistorManager.js rename to services/filestore/app/js/S3PersistorManager.js diff --git a/services/filestore/app/coffee/SafeExec.js b/services/filestore/app/js/SafeExec.js similarity index 100% rename from services/filestore/app/coffee/SafeExec.js rename to services/filestore/app/js/SafeExec.js From 601861ee58951611c1885a2af1946f93ab22948f Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 16 Dec 2019 10:34:51 +0000 Subject: [PATCH 329/555] Remove js from dockerignore --- services/filestore/.dockerignore | 2 -- 1 file changed, 2 deletions(-) diff --git a/services/filestore/.dockerignore b/services/filestore/.dockerignore index 386f26df30..ba1c3442de 100644 --- a/services/filestore/.dockerignore +++ b/services/filestore/.dockerignore @@ -5,5 +5,3 @@ gitrev .npm .nvmrc nodemon.json -app.js -**/js/* From 6225f2f236db327a56440f97de5e70f511217190 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 16 Dec 2019 10:42:31 +0000 Subject: [PATCH 330/555] Prettier: convert app/js decaffeinated files to Prettier format --- .../app/js/AWSSDKPersistorManager.js | 294 +++++---- services/filestore/app/js/BucketController.js | 70 ++- services/filestore/app/js/Errors.js | 17 +- .../filestore/app/js/FSPersistorManager.js | 239 +++++--- services/filestore/app/js/FileController.js | 284 +++++---- services/filestore/app/js/FileConverter.js | 182 ++++-- services/filestore/app/js/FileHandler.js | 359 ++++++----- .../filestore/app/js/HealthCheckController.js | 132 ++-- services/filestore/app/js/ImageOptimiser.js | 64 +- services/filestore/app/js/KeyBuilder.js | 106 ++-- services/filestore/app/js/LocalFileWriter.js | 142 +++-- services/filestore/app/js/PersistorManager.js | 54 +- .../filestore/app/js/S3PersistorManager.js | 580 ++++++++++-------- services/filestore/app/js/SafeExec.js | 80 +-- 14 files changed, 1464 insertions(+), 1139 deletions(-) diff --git a/services/filestore/app/js/AWSSDKPersistorManager.js b/services/filestore/app/js/AWSSDKPersistorManager.js index 8220af4871..4dbc836280 100644 --- a/services/filestore/app/js/AWSSDKPersistorManager.js +++ b/services/filestore/app/js/AWSSDKPersistorManager.js @@ -15,137 +15,183 @@ // latest aws-sdk and delete this module so that PersistorManager would load the // same backend for both the 's3' and 'aws-sdk' options. -const logger = require("logger-sharelatex"); -const aws = require("aws-sdk"); -const _ = require("underscore"); -const fs = require("fs"); -const Errors = require("./Errors"); +const logger = require('logger-sharelatex') +const aws = require('aws-sdk') +const _ = require('underscore') +const fs = require('fs') +const Errors = require('./Errors') -const s3 = new aws.S3(); +const s3 = new aws.S3() module.exports = { - sendFile(bucketName, key, fsPath, callback){ - logger.log({bucketName, key}, "send file data to s3"); - const stream = fs.createReadStream(fsPath); - return s3.upload({Bucket: bucketName, Key: key, Body: stream}, function(err, data) { - if (err != null) { - logger.err({err, Bucket: bucketName, Key: key}, "error sending file data to s3"); - } - return callback(err); - }); - }, + sendFile(bucketName, key, fsPath, callback) { + logger.log({ bucketName, key }, 'send file data to s3') + const stream = fs.createReadStream(fsPath) + return s3.upload({ Bucket: bucketName, Key: key, Body: stream }, function( + err, + data + ) { + if (err != null) { + logger.err( + { err, Bucket: bucketName, Key: key }, + 'error sending file data to s3' + ) + } + return callback(err) + }) + }, - sendStream(bucketName, key, stream, callback){ - logger.log({bucketName, key}, "send file stream to s3"); - return s3.upload({Bucket: bucketName, Key: key, Body: stream}, function(err, data) { - if (err != null) { - logger.err({err, Bucket: bucketName, Key: key}, "error sending file stream to s3"); - } - return callback(err); - }); - }, + sendStream(bucketName, key, stream, callback) { + logger.log({ bucketName, key }, 'send file stream to s3') + return s3.upload({ Bucket: bucketName, Key: key, Body: stream }, function( + err, + data + ) { + if (err != null) { + logger.err( + { err, Bucket: bucketName, Key: key }, + 'error sending file stream to s3' + ) + } + return callback(err) + }) + }, - getFileStream(bucketName, key, opts, callback){ - if (callback == null) { callback = function(err, res){}; } - logger.log({bucketName, key}, "get file stream from s3"); - callback = _.once(callback); - const params = { - Bucket:bucketName, - Key: key - }; - if ((opts.start != null) && (opts.end != null)) { - params.Range = `bytes=${opts.start}-${opts.end}`; - } - const request = s3.getObject(params); - const stream = request.createReadStream(); - stream.on('readable', () => callback(null, stream)); - return stream.on('error', function(err) { - logger.err({err, bucketName, key}, "error getting file stream from s3"); - if (err.code === 'NoSuchKey') { - return callback(new Errors.NotFoundError(`File not found in S3: ${bucketName}:${key}`)); - } - return callback(err); - }); - }, + getFileStream(bucketName, key, opts, callback) { + if (callback == null) { + callback = function(err, res) {} + } + logger.log({ bucketName, key }, 'get file stream from s3') + callback = _.once(callback) + const params = { + Bucket: bucketName, + Key: key + } + if (opts.start != null && opts.end != null) { + params.Range = `bytes=${opts.start}-${opts.end}` + } + const request = s3.getObject(params) + const stream = request.createReadStream() + stream.on('readable', () => callback(null, stream)) + return stream.on('error', function(err) { + logger.err({ err, bucketName, key }, 'error getting file stream from s3') + if (err.code === 'NoSuchKey') { + return callback( + new Errors.NotFoundError(`File not found in S3: ${bucketName}:${key}`) + ) + } + return callback(err) + }) + }, - copyFile(bucketName, sourceKey, destKey, callback){ - logger.log({bucketName, sourceKey, destKey}, "copying file in s3"); - const source = bucketName + '/' + sourceKey; - return s3.copyObject({Bucket: bucketName, Key: destKey, CopySource: source}, function(err) { - if (err != null) { - logger.err({err, bucketName, sourceKey, destKey}, "something went wrong copying file in s3"); - } - return callback(err); - }); - }, + copyFile(bucketName, sourceKey, destKey, callback) { + logger.log({ bucketName, sourceKey, destKey }, 'copying file in s3') + const source = bucketName + '/' + sourceKey + return s3.copyObject( + { Bucket: bucketName, Key: destKey, CopySource: source }, + function(err) { + if (err != null) { + logger.err( + { err, bucketName, sourceKey, destKey }, + 'something went wrong copying file in s3' + ) + } + return callback(err) + } + ) + }, - deleteFile(bucketName, key, callback){ - logger.log({bucketName, key}, "delete file in s3"); - return s3.deleteObject({Bucket: bucketName, Key: key}, function(err) { - if (err != null) { - logger.err({err, bucketName, key}, "something went wrong deleting file in s3"); - } - return callback(err); - }); - }, + deleteFile(bucketName, key, callback) { + logger.log({ bucketName, key }, 'delete file in s3') + return s3.deleteObject({ Bucket: bucketName, Key: key }, function(err) { + if (err != null) { + logger.err( + { err, bucketName, key }, + 'something went wrong deleting file in s3' + ) + } + return callback(err) + }) + }, - deleteDirectory(bucketName, key, callback){ - logger.log({bucketName, key}, "delete directory in s3"); - return s3.listObjects({Bucket: bucketName, Prefix: key}, function(err, data) { - if (err != null) { - logger.err({err, bucketName, key}, "something went wrong listing prefix in s3"); - return callback(err); - } - if (data.Contents.length === 0) { - logger.log({bucketName, key}, "the directory is empty"); - return callback(); - } - const keys = _.map(data.Contents, entry => ({ - Key: entry.Key - })); - return s3.deleteObjects({ - Bucket: bucketName, - Delete: { - Objects: keys, - Quiet: true - } - } - , function(err) { - if (err != null) { - logger.err({err, bucketName, key:keys}, "something went wrong deleting directory in s3"); - } - return callback(err); - }); - }); - }, + deleteDirectory(bucketName, key, callback) { + logger.log({ bucketName, key }, 'delete directory in s3') + return s3.listObjects({ Bucket: bucketName, Prefix: key }, function( + err, + data + ) { + if (err != null) { + logger.err( + { err, bucketName, key }, + 'something went wrong listing prefix in s3' + ) + return callback(err) + } + if (data.Contents.length === 0) { + logger.log({ bucketName, key }, 'the directory is empty') + return callback() + } + const keys = _.map(data.Contents, entry => ({ + Key: entry.Key + })) + return s3.deleteObjects( + { + Bucket: bucketName, + Delete: { + Objects: keys, + Quiet: true + } + }, + function(err) { + if (err != null) { + logger.err( + { err, bucketName, key: keys }, + 'something went wrong deleting directory in s3' + ) + } + return callback(err) + } + ) + }) + }, - checkIfFileExists(bucketName, key, callback){ - logger.log({bucketName, key}, "check file existence in s3"); - return s3.headObject({Bucket: bucketName, Key: key}, function(err, data) { - if (err != null) { - if (err.code === 'NotFound') { return (callback(null, false)); } - logger.err({err, bucketName, key}, "something went wrong checking head in s3"); - return callback(err); - } - return callback(null, (data.ETag != null)); - }); - }, - - directorySize(bucketName, key, callback){ - logger.log({bucketName, key}, "get project size in s3"); - return s3.listObjects({Bucket: bucketName, Prefix: key}, function(err, data) { - if (err != null) { - logger.err({err, bucketName, key}, "something went wrong listing prefix in s3"); - return callback(err); - } - if (data.Contents.length === 0) { - logger.log({bucketName, key}, "the directory is empty"); - return callback(); - } - let totalSize = 0; - _.each(data.Contents, entry => totalSize += entry.Size); - return callback(null, totalSize); - }); - } -}; + checkIfFileExists(bucketName, key, callback) { + logger.log({ bucketName, key }, 'check file existence in s3') + return s3.headObject({ Bucket: bucketName, Key: key }, function(err, data) { + if (err != null) { + if (err.code === 'NotFound') { + return callback(null, false) + } + logger.err( + { err, bucketName, key }, + 'something went wrong checking head in s3' + ) + return callback(err) + } + return callback(null, data.ETag != null) + }) + }, + directorySize(bucketName, key, callback) { + logger.log({ bucketName, key }, 'get project size in s3') + return s3.listObjects({ Bucket: bucketName, Prefix: key }, function( + err, + data + ) { + if (err != null) { + logger.err( + { err, bucketName, key }, + 'something went wrong listing prefix in s3' + ) + return callback(err) + } + if (data.Contents.length === 0) { + logger.log({ bucketName, key }, 'the directory is empty') + return callback() + } + let totalSize = 0 + _.each(data.Contents, entry => (totalSize += entry.Size)) + return callback(null, totalSize) + }) + } +} diff --git a/services/filestore/app/js/BucketController.js b/services/filestore/app/js/BucketController.js index d2c4c06fae..46f69679aa 100644 --- a/services/filestore/app/js/BucketController.js +++ b/services/filestore/app/js/BucketController.js @@ -9,38 +9,40 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let BucketController; -const settings = require("settings-sharelatex"); -const logger = require("logger-sharelatex"); -const FileHandler = require("./FileHandler"); -const metrics = require("metrics-sharelatex"); -const Errors = require('./Errors'); +let BucketController +const settings = require('settings-sharelatex') +const logger = require('logger-sharelatex') +const FileHandler = require('./FileHandler') +const metrics = require('metrics-sharelatex') +const Errors = require('./Errors') -module.exports = (BucketController = { - - getFile(req, res){ - const {bucket} = req.params; - const key = req.params[0]; - const credentials = settings.filestore.s3BucketCreds != null ? settings.filestore.s3BucketCreds[bucket] : undefined; - const options = { - key, - bucket, - credentials - }; - metrics.inc(`${bucket}.getFile`); - logger.log({key, bucket}, "receiving request to get file from bucket"); - return FileHandler.getFile(bucket, key, options, function(err, fileStream){ - if (err != null) { - logger.err({err, key, bucket}, "problem getting file from bucket"); - if (err instanceof Errors.NotFoundError) { - return res.send(404); - } else { - return res.send(500); - } - } else { - logger.log({key, bucket}, "sending bucket file to response"); - return fileStream.pipe(res); - } - }); - } -}); +module.exports = BucketController = { + getFile(req, res) { + const { bucket } = req.params + const key = req.params[0] + const credentials = + settings.filestore.s3BucketCreds != null + ? settings.filestore.s3BucketCreds[bucket] + : undefined + const options = { + key, + bucket, + credentials + } + metrics.inc(`${bucket}.getFile`) + logger.log({ key, bucket }, 'receiving request to get file from bucket') + return FileHandler.getFile(bucket, key, options, function(err, fileStream) { + if (err != null) { + logger.err({ err, key, bucket }, 'problem getting file from bucket') + if (err instanceof Errors.NotFoundError) { + return res.send(404) + } else { + return res.send(500) + } + } else { + logger.log({ key, bucket }, 'sending bucket file to response') + return fileStream.pipe(res) + } + }) + } +} diff --git a/services/filestore/app/js/Errors.js b/services/filestore/app/js/Errors.js index 4a9f65bb4b..f3bc8e37eb 100644 --- a/services/filestore/app/js/Errors.js +++ b/services/filestore/app/js/Errors.js @@ -4,14 +4,13 @@ */ // TODO: This file was created by bulk-decaffeinate. // Fix any style issues and re-enable lint. -let Errors; +let Errors var NotFoundError = function(message) { - const error = new Error(message); - error.name = "NotFoundError"; - error.__proto__ = NotFoundError.prototype; - return error; -}; -NotFoundError.prototype.__proto__ = Error.prototype; + const error = new Error(message) + error.name = 'NotFoundError' + error.__proto__ = NotFoundError.prototype + return error +} +NotFoundError.prototype.__proto__ = Error.prototype -module.exports = (Errors = - {NotFoundError}); +module.exports = Errors = { NotFoundError } diff --git a/services/filestore/app/js/FSPersistorManager.js b/services/filestore/app/js/FSPersistorManager.js index c90d4b6dfe..2e0de65632 100644 --- a/services/filestore/app/js/FSPersistorManager.js +++ b/services/filestore/app/js/FSPersistorManager.js @@ -11,161 +11,196 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const logger = require("logger-sharelatex"); -const fs = require("fs"); -const path = require("path"); -const LocalFileWriter = require("./LocalFileWriter"); -const Errors = require('./Errors'); -const rimraf = require("rimraf"); -const _ = require("underscore"); - -const filterName = key => key.replace(/\//g, "_"); +const logger = require('logger-sharelatex') +const fs = require('fs') +const path = require('path') +const LocalFileWriter = require('./LocalFileWriter') +const Errors = require('./Errors') +const rimraf = require('rimraf') +const _ = require('underscore') +const filterName = key => key.replace(/\//g, '_') module.exports = { - sendFile( location, target, source, callback) { - if (callback == null) { callback = function(err){}; } - const filteredTarget = filterName(target); - logger.log({location, target:filteredTarget, source}, "sending file"); + sendFile(location, target, source, callback) { + if (callback == null) { + callback = function(err) {} + } + const filteredTarget = filterName(target) + logger.log({ location, target: filteredTarget, source }, 'sending file') const done = _.once(function(err) { if (err != null) { - logger.err({err, location, target:filteredTarget, source}, "Error on put of file"); + logger.err( + { err, location, target: filteredTarget, source }, + 'Error on put of file' + ) } - return callback(err); - }); + return callback(err) + }) // actually copy the file (instead of moving it) to maintain consistent behaviour // between the different implementations - const sourceStream = fs.createReadStream(source); - sourceStream.on('error', done); - const targetStream = fs.createWriteStream(`${location}/${filteredTarget}`); - targetStream.on('error', done); - targetStream.on('finish', () => done()); - return sourceStream.pipe(targetStream); + const sourceStream = fs.createReadStream(source) + sourceStream.on('error', done) + const targetStream = fs.createWriteStream(`${location}/${filteredTarget}`) + targetStream.on('error', done) + targetStream.on('finish', () => done()) + return sourceStream.pipe(targetStream) }, - sendStream( location, target, sourceStream, callback) { - if (callback == null) { callback = function(err){}; } - logger.log({location, target}, "sending file stream"); - sourceStream.on("error", err => logger.err({location, target, err:err("error on stream to send")})); - return LocalFileWriter.writeStream(sourceStream, null, (err, fsPath)=> { + sendStream(location, target, sourceStream, callback) { + if (callback == null) { + callback = function(err) {} + } + logger.log({ location, target }, 'sending file stream') + sourceStream.on('error', err => + logger.err({ location, target, err: err('error on stream to send') }) + ) + return LocalFileWriter.writeStream(sourceStream, null, (err, fsPath) => { if (err != null) { - logger.err({location, target, fsPath, err}, "something went wrong writing stream to disk"); - return callback(err); + logger.err( + { location, target, fsPath, err }, + 'something went wrong writing stream to disk' + ) + return callback(err) } - return this.sendFile(location, target, fsPath, err => // delete the temporary file created above and return the original error - LocalFileWriter.deleteFile(fsPath, () => callback(err))); - }); + return this.sendFile(location, target, fsPath, ( + err // delete the temporary file created above and return the original error + ) => LocalFileWriter.deleteFile(fsPath, () => callback(err))) + }) }, // opts may be {start: Number, end: Number} getFileStream(location, name, opts, callback) { - if (callback == null) { callback = function(err, res){}; } - const filteredName = filterName(name); - logger.log({location, filteredName}, "getting file"); + if (callback == null) { + callback = function(err, res) {} + } + const filteredName = filterName(name) + logger.log({ location, filteredName }, 'getting file') return fs.open(`${location}/${filteredName}`, 'r', function(err, fd) { if (err != null) { - logger.err({err, location, filteredName:name}, "Error reading from file"); + logger.err( + { err, location, filteredName: name }, + 'Error reading from file' + ) } if (err.code === 'ENOENT') { - return callback(new Errors.NotFoundError(err.message), null); + return callback(new Errors.NotFoundError(err.message), null) } else { - return callback(err, null); + return callback(err, null) } - opts.fd = fd; - const sourceStream = fs.createReadStream(null, opts); - return callback(null, sourceStream); - }); + opts.fd = fd + const sourceStream = fs.createReadStream(null, opts) + return callback(null, sourceStream) + }) }, getFileSize(location, filename, callback) { - const fullPath = path.join(location, filterName(filename)); + const fullPath = path.join(location, filterName(filename)) return fs.stat(fullPath, function(err, stats) { if (err != null) { if (err.code === 'ENOENT') { - logger.log({location, filename}, "file not found"); - callback(new Errors.NotFoundError(err.message)); + logger.log({ location, filename }, 'file not found') + callback(new Errors.NotFoundError(err.message)) } else { - logger.err({err, location, filename}, "failed to stat file"); - callback(err); + logger.err({ err, location, filename }, 'failed to stat file') + callback(err) } - return; + return } - return callback(null, stats.size); - }); + return callback(null, stats.size) + }) }, - copyFile(location, fromName, toName, callback){ - if (callback == null) { callback = function(err){}; } - const filteredFromName=filterName(fromName); - const filteredToName=filterName(toName); - logger.log({location, fromName:filteredFromName, toName:filteredToName}, "copying file"); - const sourceStream = fs.createReadStream(`${location}/${filteredFromName}`); + copyFile(location, fromName, toName, callback) { + if (callback == null) { + callback = function(err) {} + } + const filteredFromName = filterName(fromName) + const filteredToName = filterName(toName) + logger.log( + { location, fromName: filteredFromName, toName: filteredToName }, + 'copying file' + ) + const sourceStream = fs.createReadStream(`${location}/${filteredFromName}`) sourceStream.on('error', function(err) { - logger.err({err, location, key:filteredFromName}, "Error reading from file"); - return callback(err); - }); - const targetStream = fs.createWriteStream(`${location}/${filteredToName}`); + logger.err( + { err, location, key: filteredFromName }, + 'Error reading from file' + ) + return callback(err) + }) + const targetStream = fs.createWriteStream(`${location}/${filteredToName}`) targetStream.on('error', function(err) { - logger.err({err, location, key:filteredToName}, "Error writing to file"); - return callback(err); - }); - targetStream.on('finish', () => callback(null)); - return sourceStream.pipe(targetStream); + logger.err( + { err, location, key: filteredToName }, + 'Error writing to file' + ) + return callback(err) + }) + targetStream.on('finish', () => callback(null)) + return sourceStream.pipe(targetStream) }, - deleteFile(location, name, callback){ - const filteredName = filterName(name); - logger.log({location, filteredName}, "delete file"); + deleteFile(location, name, callback) { + const filteredName = filterName(name) + logger.log({ location, filteredName }, 'delete file') return fs.unlink(`${location}/${filteredName}`, function(err) { if (err != null) { - logger.err({err, location, filteredName}, "Error on delete."); - return callback(err); + logger.err({ err, location, filteredName }, 'Error on delete.') + return callback(err) } else { - return callback(); + return callback() } - }); + }) }, - deleteDirectory(location, name, callback){ - if (callback == null) { callback = function(err){}; } - const filteredName = filterName(name.replace(/\/$/,'')); + deleteDirectory(location, name, callback) { + if (callback == null) { + callback = function(err) {} + } + const filteredName = filterName(name.replace(/\/$/, '')) return rimraf(`${location}/${filteredName}`, function(err) { if (err != null) { - logger.err({err, location, filteredName}, "Error on rimraf rmdir."); - return callback(err); + logger.err({ err, location, filteredName }, 'Error on rimraf rmdir.') + return callback(err) } else { - return callback(); + return callback() } - }); + }) }, - checkIfFileExists(location, name, callback){ - if (callback == null) { callback = function(err,exists){}; } - const filteredName = filterName(name); - logger.log({location, filteredName}, "checking if file exists"); + checkIfFileExists(location, name, callback) { + if (callback == null) { + callback = function(err, exists) {} + } + const filteredName = filterName(name) + logger.log({ location, filteredName }, 'checking if file exists') return fs.exists(`${location}/${filteredName}`, function(exists) { - logger.log({location, filteredName, exists}, "checked if file exists"); - return callback(null, exists); - }); + logger.log({ location, filteredName, exists }, 'checked if file exists') + return callback(null, exists) + }) }, - directorySize(location, name, callback){ - const filteredName = filterName(name.replace(/\/$/,'')); - logger.log({location, filteredName}, "get project size in file system"); + directorySize(location, name, callback) { + const filteredName = filterName(name.replace(/\/$/, '')) + logger.log({ location, filteredName }, 'get project size in file system') return fs.readdir(`${location}/${filteredName}`, function(err, files) { if (err != null) { - logger.err({err, location, filteredName}, "something went wrong listing prefix in aws"); - return callback(err); + logger.err( + { err, location, filteredName }, + 'something went wrong listing prefix in aws' + ) + return callback(err) } - let totalSize = 0; - _.each(files, function(entry){ - const fd = fs.openSync(`${location}/${filteredName}/${entry}`, 'r'); - const fileStats = fs.fstatSync(fd); - totalSize += fileStats.size; - return fs.closeSync(fd); - }); - logger.log({totalSize}, "total size", {files}); - return callback(null, totalSize); - }); + let totalSize = 0 + _.each(files, function(entry) { + const fd = fs.openSync(`${location}/${filteredName}/${entry}`, 'r') + const fileStats = fs.fstatSync(fd) + totalSize += fileStats.size + return fs.closeSync(fd) + }) + logger.log({ totalSize }, 'total size', { files }) + return callback(null, totalSize) + }) } -}; +} diff --git a/services/filestore/app/js/FileController.js b/services/filestore/app/js/FileController.js index 51251ba010..ea41f3d813 100644 --- a/services/filestore/app/js/FileController.js +++ b/services/filestore/app/js/FileController.js @@ -10,142 +10,164 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let FileController; -const PersistorManager = require("./PersistorManager"); -const settings = require("settings-sharelatex"); -const logger = require("logger-sharelatex"); -const FileHandler = require("./FileHandler"); -const metrics = require("metrics-sharelatex"); -const parseRange = require('range-parser'); -const Errors = require('./Errors'); +let FileController +const PersistorManager = require('./PersistorManager') +const settings = require('settings-sharelatex') +const logger = require('logger-sharelatex') +const FileHandler = require('./FileHandler') +const metrics = require('metrics-sharelatex') +const parseRange = require('range-parser') +const Errors = require('./Errors') -const oneDayInSeconds = 60 * 60 * 24; -const maxSizeInBytes = 1024 * 1024 * 1024; // 1GB +const oneDayInSeconds = 60 * 60 * 24 +const maxSizeInBytes = 1024 * 1024 * 1024 // 1GB -module.exports = (FileController = { +module.exports = FileController = { + getFile(req, res) { + const { key, bucket } = req + const { format, style } = req.query + const options = { + key, + bucket, + format, + style + } + metrics.inc('getFile') + logger.log({ key, bucket, format, style }, 'receiving request to get file') + if (req.headers.range != null) { + const range = FileController._get_range(req.headers.range) + options.start = range.start + options.end = range.end + logger.log( + { start: range.start, end: range.end }, + 'getting range of bytes from file' + ) + } + return FileHandler.getFile(bucket, key, options, function(err, fileStream) { + if (err != null) { + if (err instanceof Errors.NotFoundError) { + return res.send(404) + } else { + logger.err( + { err, key, bucket, format, style }, + 'problem getting file' + ) + return res.send(500) + } + } else if (req.query.cacheWarm) { + logger.log( + { key, bucket, format, style }, + 'request is only for cache warm so not sending stream' + ) + return res.send(200) + } else { + logger.log({ key, bucket, format, style }, 'sending file to response') + return fileStream.pipe(res) + } + }) + }, - getFile(req, res){ - const {key, bucket} = req; - const {format, style} = req.query; - const options = { - key, - bucket, - format, - style, - }; - metrics.inc("getFile"); - logger.log({key, bucket, format, style}, "receiving request to get file"); - if (req.headers.range != null) { - const range = FileController._get_range(req.headers.range); - options.start = range.start; - options.end = range.end; - logger.log({start: range.start, end: range.end}, "getting range of bytes from file"); - } - return FileHandler.getFile(bucket, key, options, function(err, fileStream){ - if (err != null) { - if (err instanceof Errors.NotFoundError) { - return res.send(404); - } else { - logger.err({err, key, bucket, format, style}, "problem getting file"); - return res.send(500); - } - } else if (req.query.cacheWarm) { - logger.log({key, bucket, format, style}, "request is only for cache warm so not sending stream"); - return res.send(200); - } else { - logger.log({key, bucket, format, style}, "sending file to response"); - return fileStream.pipe(res); - } - }); - }, + getFileHead(req, res) { + const { key, bucket } = req + metrics.inc('getFileSize') + logger.log({ key, bucket }, 'receiving request to get file metadata') + return FileHandler.getFileSize(bucket, key, function(err, fileSize) { + if (err != null) { + if (err instanceof Errors.NotFoundError) { + res.status(404).end() + } else { + res.status(500).end() + } + return + } + res.set('Content-Length', fileSize) + return res.status(200).end() + }) + }, - getFileHead(req, res) { - const {key, bucket} = req; - metrics.inc("getFileSize"); - logger.log({ key, bucket }, "receiving request to get file metadata"); - return FileHandler.getFileSize(bucket, key, function(err, fileSize) { - if (err != null) { - if (err instanceof Errors.NotFoundError) { - res.status(404).end(); - } else { - res.status(500).end(); - } - return; - } - res.set("Content-Length", fileSize); - return res.status(200).end(); - }); - }, + insertFile(req, res) { + metrics.inc('insertFile') + const { key, bucket } = req + logger.log({ key, bucket }, 'receiving request to insert file') + return FileHandler.insertFile(bucket, key, req, function(err) { + if (err != null) { + logger.log({ err, key, bucket }, 'error inserting file') + return res.send(500) + } else { + return res.send(200) + } + }) + }, - insertFile(req, res){ - metrics.inc("insertFile"); - const {key, bucket} = req; - logger.log({key, bucket}, "receiving request to insert file"); - return FileHandler.insertFile(bucket, key, req, function(err){ - if (err != null) { - logger.log({err, key, bucket}, "error inserting file"); - return res.send(500); - } else { - return res.send(200); - } - }); - }, + copyFile(req, res) { + metrics.inc('copyFile') + const { key, bucket } = req + const oldProject_id = req.body.source.project_id + const oldFile_id = req.body.source.file_id + logger.log( + { key, bucket, oldProject_id, oldFile_id }, + 'receiving request to copy file' + ) + return PersistorManager.copyFile( + bucket, + `${oldProject_id}/${oldFile_id}`, + key, + function(err) { + if (err != null) { + if (err instanceof Errors.NotFoundError) { + return res.send(404) + } else { + logger.log( + { err, oldProject_id, oldFile_id }, + 'something went wrong copying file' + ) + return res.send(500) + } + } else { + return res.send(200) + } + } + ) + }, - copyFile(req, res){ - metrics.inc("copyFile"); - const {key, bucket} = req; - const oldProject_id = req.body.source.project_id; - const oldFile_id = req.body.source.file_id; - logger.log({key, bucket, oldProject_id, oldFile_id}, "receiving request to copy file"); - return PersistorManager.copyFile(bucket, `${oldProject_id}/${oldFile_id}`, key, function(err){ - if (err != null) { - if (err instanceof Errors.NotFoundError) { - return res.send(404); - } else { - logger.log({err, oldProject_id, oldFile_id}, "something went wrong copying file"); - return res.send(500); - } - } else { - return res.send(200); - } - }); - }, + deleteFile(req, res) { + metrics.inc('deleteFile') + const { key, bucket } = req + logger.log({ key, bucket }, 'receiving request to delete file') + return FileHandler.deleteFile(bucket, key, function(err) { + if (err != null) { + logger.log({ err, key, bucket }, 'something went wrong deleting file') + return res.send(500) + } else { + return res.send(204) + } + }) + }, - deleteFile(req, res){ - metrics.inc("deleteFile"); - const {key, bucket} = req; - logger.log({key, bucket}, "receiving request to delete file"); - return FileHandler.deleteFile(bucket, key, function(err){ - if (err != null) { - logger.log({err, key, bucket}, "something went wrong deleting file"); - return res.send(500); - } else { - return res.send(204); - } - }); - }, + _get_range(header) { + const parsed = parseRange(maxSizeInBytes, header) + if (parsed === -1 || parsed === -2 || parsed.type !== 'bytes') { + return null + } else { + const range = parsed[0] + return { start: range.start, end: range.end } + } + }, - _get_range(header) { - const parsed = parseRange(maxSizeInBytes, header); - if ((parsed === -1) || (parsed === -2) || (parsed.type !== 'bytes')) { - return null; - } else { - const range = parsed[0]; - return {start: range.start, end: range.end}; - } - }, - - directorySize(req, res){ - metrics.inc("projectSize"); - const {project_id, bucket} = req; - logger.log({project_id, bucket}, "receiving request to project size"); - return FileHandler.getDirectorySize(bucket, project_id, function(err, size){ - if (err != null) { - logger.log({err, project_id, bucket}, "error inserting file"); - return res.send(500); - } else { - return res.json({'total bytes' : size}); - } - }); - } -}); + directorySize(req, res) { + metrics.inc('projectSize') + const { project_id, bucket } = req + logger.log({ project_id, bucket }, 'receiving request to project size') + return FileHandler.getDirectorySize(bucket, project_id, function( + err, + size + ) { + if (err != null) { + logger.log({ err, project_id, bucket }, 'error inserting file') + return res.send(500) + } else { + return res.json({ 'total bytes': size }) + } + }) + } +} diff --git a/services/filestore/app/js/FileConverter.js b/services/filestore/app/js/FileConverter.js index e241cd8275..04b599728a 100644 --- a/services/filestore/app/js/FileConverter.js +++ b/services/filestore/app/js/FileConverter.js @@ -9,77 +9,125 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const _ = require("underscore"); -const metrics = require("metrics-sharelatex"); -const logger = require("logger-sharelatex"); -const safe_exec = require("./SafeExec"); -const approvedFormats = ["png"]; -const Settings = require("settings-sharelatex"); +const _ = require('underscore') +const metrics = require('metrics-sharelatex') +const logger = require('logger-sharelatex') +const safe_exec = require('./SafeExec') +const approvedFormats = ['png'] +const Settings = require('settings-sharelatex') -const fourtySeconds = 40 * 1000; +const fourtySeconds = 40 * 1000 const childProcessOpts = { - killSignal: "SIGTERM", - timeout: fourtySeconds -}; - + killSignal: 'SIGTERM', + timeout: fourtySeconds +} module.exports = { + convert(sourcePath, requestedFormat, callback) { + logger.log({ sourcePath, requestedFormat }, 'converting file format') + const timer = new metrics.Timer('imageConvert') + const destPath = `${sourcePath}.${requestedFormat}` + sourcePath = `${sourcePath}[0]` + if (!_.include(approvedFormats, requestedFormat)) { + const err = new Error('invalid format requested') + return callback(err) + } + const width = '600x' + let command = [ + 'convert', + '-define', + `pdf:fit-page=${width}`, + '-flatten', + '-density', + '300', + sourcePath, + destPath + ] + command = Settings.commands.convertCommandPrefix.concat(command) + return safe_exec(command, childProcessOpts, function(err, stdout, stderr) { + timer.done() + if (err != null) { + logger.err( + { err, stderr, sourcePath, requestedFormat, destPath }, + 'something went wrong converting file' + ) + } else { + logger.log( + { sourcePath, requestedFormat, destPath }, + 'finished converting file' + ) + } + return callback(err, destPath) + }) + }, - convert(sourcePath, requestedFormat, callback){ - logger.log({sourcePath, requestedFormat}, "converting file format"); - const timer = new metrics.Timer("imageConvert"); - const destPath = `${sourcePath}.${requestedFormat}`; - sourcePath = `${sourcePath}[0]`; - if (!_.include(approvedFormats, requestedFormat)) { - const err = new Error("invalid format requested"); - return callback(err); - } - const width = "600x"; - let command = ["convert", "-define", `pdf:fit-page=${width}`, "-flatten", "-density", "300", sourcePath, destPath]; - command = Settings.commands.convertCommandPrefix.concat(command); - return safe_exec(command, childProcessOpts, function(err, stdout, stderr){ - timer.done(); - if (err != null) { - logger.err({err, stderr, sourcePath, requestedFormat, destPath}, "something went wrong converting file"); - } else { - logger.log({sourcePath, requestedFormat, destPath}, "finished converting file"); - } - return callback(err, destPath); - }); - }, + thumbnail(sourcePath, callback) { + const destPath = `${sourcePath}.png` + sourcePath = `${sourcePath}[0]` + const width = '260x' + let command = [ + 'convert', + '-flatten', + '-background', + 'white', + '-density', + '300', + '-define', + `pdf:fit-page=${width}`, + sourcePath, + '-resize', + width, + destPath + ] + logger.log({ sourcePath, destPath, command }, 'thumbnail convert file') + command = Settings.commands.convertCommandPrefix.concat(command) + return safe_exec(command, childProcessOpts, function(err, stdout, stderr) { + if (err != null) { + logger.err( + { err, stderr, sourcePath }, + 'something went wrong converting file to thumbnail' + ) + } else { + logger.log({ sourcePath, destPath }, 'finished thumbnailing file') + } + return callback(err, destPath) + }) + }, - thumbnail(sourcePath, callback){ - const destPath = `${sourcePath}.png`; - sourcePath = `${sourcePath}[0]`; - const width = "260x"; - let command = ["convert", "-flatten", "-background", "white", "-density", "300", "-define", `pdf:fit-page=${width}`, sourcePath, "-resize", width, destPath]; - logger.log({sourcePath, destPath, command}, "thumbnail convert file"); - command = Settings.commands.convertCommandPrefix.concat(command); - return safe_exec(command, childProcessOpts, function(err, stdout, stderr){ - if (err != null) { - logger.err({err, stderr, sourcePath}, "something went wrong converting file to thumbnail"); - } else { - logger.log({sourcePath, destPath}, "finished thumbnailing file"); - } - return callback(err, destPath); - }); - }, - - preview(sourcePath, callback){ - logger.log({sourcePath}, "preview convert file"); - const destPath = `${sourcePath}.png`; - sourcePath = `${sourcePath}[0]`; - const width = "548x"; - let command = ["convert", "-flatten", "-background", "white", "-density", "300", "-define", `pdf:fit-page=${width}`, sourcePath, "-resize", width, destPath]; - command = Settings.commands.convertCommandPrefix.concat(command); - return safe_exec(command, childProcessOpts, function(err, stdout, stderr){ - if (err != null) { - logger.err({err, stderr, sourcePath, destPath}, "something went wrong converting file to preview"); - } else { - logger.log({sourcePath, destPath}, "finished converting file to preview"); - } - return callback(err, destPath); - }); - } -}; + preview(sourcePath, callback) { + logger.log({ sourcePath }, 'preview convert file') + const destPath = `${sourcePath}.png` + sourcePath = `${sourcePath}[0]` + const width = '548x' + let command = [ + 'convert', + '-flatten', + '-background', + 'white', + '-density', + '300', + '-define', + `pdf:fit-page=${width}`, + sourcePath, + '-resize', + width, + destPath + ] + command = Settings.commands.convertCommandPrefix.concat(command) + return safe_exec(command, childProcessOpts, function(err, stdout, stderr) { + if (err != null) { + logger.err( + { err, stderr, sourcePath, destPath }, + 'something went wrong converting file to preview' + ) + } else { + logger.log( + { sourcePath, destPath }, + 'finished converting file to preview' + ) + } + return callback(err, destPath) + }) + } +} diff --git a/services/filestore/app/js/FileHandler.js b/services/filestore/app/js/FileHandler.js index c22a276b65..e63c813167 100644 --- a/services/filestore/app/js/FileHandler.js +++ b/services/filestore/app/js/FileHandler.js @@ -11,166 +11,225 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let FileHandler; -const settings = require("settings-sharelatex"); -const PersistorManager = require("./PersistorManager"); -const LocalFileWriter = require("./LocalFileWriter"); -const logger = require("logger-sharelatex"); -const FileConverter = require("./FileConverter"); -const KeyBuilder = require("./KeyBuilder"); -const async = require("async"); -const ImageOptimiser = require("./ImageOptimiser"); -const Errors = require('./Errors'); +let FileHandler +const settings = require('settings-sharelatex') +const PersistorManager = require('./PersistorManager') +const LocalFileWriter = require('./LocalFileWriter') +const logger = require('logger-sharelatex') +const FileConverter = require('./FileConverter') +const KeyBuilder = require('./KeyBuilder') +const async = require('async') +const ImageOptimiser = require('./ImageOptimiser') +const Errors = require('./Errors') -module.exports = (FileHandler = { +module.exports = FileHandler = { + insertFile(bucket, key, stream, callback) { + const convertedKey = KeyBuilder.getConvertedFolderKey(key) + return PersistorManager.deleteDirectory(bucket, convertedKey, function( + error + ) { + if (error != null) { + return callback(error) + } + return PersistorManager.sendStream(bucket, key, stream, callback) + }) + }, - insertFile(bucket, key, stream, callback){ - const convertedKey = KeyBuilder.getConvertedFolderKey(key); - return PersistorManager.deleteDirectory(bucket, convertedKey, function(error) { - if (error != null) { return callback(error); } - return PersistorManager.sendStream(bucket, key, stream, callback); - }); - }, + deleteFile(bucket, key, callback) { + const convertedKey = KeyBuilder.getConvertedFolderKey(key) + return async.parallel( + [ + done => PersistorManager.deleteFile(bucket, key, done), + done => PersistorManager.deleteDirectory(bucket, convertedKey, done) + ], + callback + ) + }, - deleteFile(bucket, key, callback){ - const convertedKey = KeyBuilder.getConvertedFolderKey(key); - return async.parallel([ - done => PersistorManager.deleteFile(bucket, key, done), - done => PersistorManager.deleteDirectory(bucket, convertedKey, done) - ], callback); - }, + getFile(bucket, key, opts, callback) { + // In this call, opts can contain credentials + if (opts == null) { + opts = {} + } + logger.log({ bucket, key, opts: this._scrubSecrets(opts) }, 'getting file') + if (opts.format == null && opts.style == null) { + return this._getStandardFile(bucket, key, opts, callback) + } else { + return this._getConvertedFile(bucket, key, opts, callback) + } + }, - getFile(bucket, key, opts, callback){ - // In this call, opts can contain credentials - if (opts == null) { opts = {}; } - logger.log({bucket, key, opts:this._scrubSecrets(opts)}, "getting file"); - if ((opts.format == null) && (opts.style == null)) { - return this._getStandardFile(bucket, key, opts, callback); - } else { - return this._getConvertedFile(bucket, key, opts, callback); - } - }, + getFileSize(bucket, key, callback) { + return PersistorManager.getFileSize(bucket, key, callback) + }, - getFileSize(bucket, key, callback) { - return PersistorManager.getFileSize(bucket, key, callback); - }, + _getStandardFile(bucket, key, opts, callback) { + return PersistorManager.getFileStream(bucket, key, opts, function( + err, + fileStream + ) { + if (err != null && !(err instanceof Errors.NotFoundError)) { + logger.err( + { bucket, key, opts: FileHandler._scrubSecrets(opts) }, + 'error getting fileStream' + ) + } + return callback(err, fileStream) + }) + }, - _getStandardFile(bucket, key, opts, callback){ - return PersistorManager.getFileStream(bucket, key, opts, function(err, fileStream){ - if ((err != null) && !(err instanceof Errors.NotFoundError)) { - logger.err({bucket, key, opts:FileHandler._scrubSecrets(opts)}, "error getting fileStream"); - } - return callback(err, fileStream); - }); - }, + _getConvertedFile(bucket, key, opts, callback) { + const convertedKey = KeyBuilder.addCachingToKey(key, opts) + return PersistorManager.checkIfFileExists( + bucket, + convertedKey, + (err, exists) => { + if (err != null) { + return callback(err) + } + if (exists) { + return PersistorManager.getFileStream( + bucket, + convertedKey, + opts, + callback + ) + } else { + return this._getConvertedFileAndCache( + bucket, + key, + convertedKey, + opts, + callback + ) + } + } + ) + }, - _getConvertedFile(bucket, key, opts, callback){ - const convertedKey = KeyBuilder.addCachingToKey(key, opts); - return PersistorManager.checkIfFileExists(bucket, convertedKey, (err, exists)=> { - if (err != null) { - return callback(err); - } - if (exists) { - return PersistorManager.getFileStream(bucket, convertedKey, opts, callback); - } else { - return this._getConvertedFileAndCache(bucket, key, convertedKey, opts, callback); - } - }); - }, + _getConvertedFileAndCache(bucket, key, convertedKey, opts, callback) { + let convertedFsPath = '' + const originalFsPath = '' + return async.series( + [ + cb => { + return this._convertFile(bucket, key, opts, function( + err, + fileSystemPath, + originalFsPath + ) { + convertedFsPath = fileSystemPath + originalFsPath = originalFsPath + return cb(err) + }) + }, + cb => ImageOptimiser.compressPng(convertedFsPath, cb), + cb => + PersistorManager.sendFile(bucket, convertedKey, convertedFsPath, cb) + ], + function(err) { + if (err != null) { + LocalFileWriter.deleteFile(convertedFsPath, function() {}) + LocalFileWriter.deleteFile(originalFsPath, function() {}) + return callback(err) + } + // Send back the converted file from the local copy to avoid problems + // with the file not being present in S3 yet. As described in the + // documentation below, we have already made a 'HEAD' request in + // checkIfFileExists so we only have "eventual consistency" if we try + // to stream it from S3 here. This was a cause of many 403 errors. + // + // "Amazon S3 provides read-after-write consistency for PUTS of new + // objects in your S3 bucket in all regions with one caveat. The + // caveat is that if you make a HEAD or GET request to the key name + // (to find if the object exists) before creating the object, Amazon + // S3 provides eventual consistency for read-after-write."" + // https://docs.aws.amazon.com/AmazonS3/latest/dev/Introduction.html#ConsistencyModel + return LocalFileWriter.getStream(convertedFsPath, function( + err, + readStream + ) { + if (err != null) { + return callback(err) + } + readStream.on('end', function() { + logger.log({ convertedFsPath }, 'deleting temporary file') + return LocalFileWriter.deleteFile(convertedFsPath, function() {}) + }) + return callback(null, readStream) + }) + } + ) + }, - _getConvertedFileAndCache(bucket, key, convertedKey, opts, callback){ - let convertedFsPath = ""; - const originalFsPath = ""; - return async.series([ - cb => { - return this._convertFile(bucket, key, opts, function(err, fileSystemPath, originalFsPath) { - convertedFsPath = fileSystemPath; - originalFsPath = originalFsPath; - return cb(err); - }); - }, - cb => ImageOptimiser.compressPng(convertedFsPath, cb), - cb => PersistorManager.sendFile(bucket, convertedKey, convertedFsPath, cb) - ], function(err){ - if (err != null) { - LocalFileWriter.deleteFile(convertedFsPath, function() {}); - LocalFileWriter.deleteFile(originalFsPath, function() {}); - return callback(err); - } - // Send back the converted file from the local copy to avoid problems - // with the file not being present in S3 yet. As described in the - // documentation below, we have already made a 'HEAD' request in - // checkIfFileExists so we only have "eventual consistency" if we try - // to stream it from S3 here. This was a cause of many 403 errors. - // - // "Amazon S3 provides read-after-write consistency for PUTS of new - // objects in your S3 bucket in all regions with one caveat. The - // caveat is that if you make a HEAD or GET request to the key name - // (to find if the object exists) before creating the object, Amazon - // S3 provides eventual consistency for read-after-write."" - // https://docs.aws.amazon.com/AmazonS3/latest/dev/Introduction.html#ConsistencyModel - return LocalFileWriter.getStream(convertedFsPath, function(err, readStream) { - if (err != null) { return callback(err); } - readStream.on('end', function() { - logger.log({convertedFsPath}, "deleting temporary file"); - return LocalFileWriter.deleteFile(convertedFsPath, function() {}); - }); - return callback(null, readStream); - }); - }); - }, + _convertFile(bucket, originalKey, opts, callback) { + return this._writeS3FileToDisk(bucket, originalKey, opts, function( + err, + originalFsPath + ) { + if (err != null) { + return callback(err) + } + const done = function(err, destPath) { + if (err != null) { + logger.err( + { err, bucket, originalKey, opts: FileHandler._scrubSecrets(opts) }, + 'error converting file' + ) + return callback(err) + } + LocalFileWriter.deleteFile(originalFsPath, function() {}) + return callback(err, destPath, originalFsPath) + } - _convertFile(bucket, originalKey, opts, callback){ - return this._writeS3FileToDisk(bucket, originalKey, opts, function(err, originalFsPath){ - if (err != null) { - return callback(err); - } - const done = function(err, destPath){ - if (err != null) { - logger.err({err, bucket, originalKey, opts:FileHandler._scrubSecrets(opts)}, "error converting file"); - return callback(err); - } - LocalFileWriter.deleteFile(originalFsPath, function() {}); - return callback(err, destPath, originalFsPath); - }; + logger.log({ opts }, 'converting file depending on opts') - logger.log({opts}, "converting file depending on opts"); - - if (opts.format != null) { - return FileConverter.convert(originalFsPath, opts.format, done); - } else if (opts.style === "thumbnail") { - return FileConverter.thumbnail(originalFsPath, done); - } else if (opts.style === "preview") { - return FileConverter.preview(originalFsPath, done); - } else { - return callback(new Error(`should have specified opts to convert file with ${JSON.stringify(opts)}`)); - } - }); - }, + if (opts.format != null) { + return FileConverter.convert(originalFsPath, opts.format, done) + } else if (opts.style === 'thumbnail') { + return FileConverter.thumbnail(originalFsPath, done) + } else if (opts.style === 'preview') { + return FileConverter.preview(originalFsPath, done) + } else { + return callback( + new Error( + `should have specified opts to convert file with ${JSON.stringify( + opts + )}` + ) + ) + } + }) + }, + _writeS3FileToDisk(bucket, key, opts, callback) { + return PersistorManager.getFileStream(bucket, key, opts, function( + err, + fileStream + ) { + if (err != null) { + return callback(err) + } + return LocalFileWriter.writeStream(fileStream, key, callback) + }) + }, - _writeS3FileToDisk(bucket, key, opts, callback){ - return PersistorManager.getFileStream(bucket, key, opts, function(err, fileStream){ - if (err != null) { - return callback(err); - } - return LocalFileWriter.writeStream(fileStream, key, callback); - }); - }, + getDirectorySize(bucket, project_id, callback) { + logger.log({ bucket, project_id }, 'getting project size') + return PersistorManager.directorySize(bucket, project_id, function( + err, + size + ) { + if (err != null) { + logger.err({ bucket, project_id }, 'error getting size') + } + return callback(err, size) + }) + }, - getDirectorySize(bucket, project_id, callback){ - logger.log({bucket, project_id}, "getting project size"); - return PersistorManager.directorySize(bucket, project_id, function(err, size){ - if (err != null) { - logger.err({bucket, project_id}, "error getting size"); - } - return callback(err, size); - }); - }, - - _scrubSecrets(opts){ - const safe = Object.assign({}, opts); - delete safe.credentials; - return safe; - } -}); + _scrubSecrets(opts) { + const safe = Object.assign({}, opts) + delete safe.credentials + return safe + } +} diff --git a/services/filestore/app/js/HealthCheckController.js b/services/filestore/app/js/HealthCheckController.js index e360891609..eecadb00d9 100644 --- a/services/filestore/app/js/HealthCheckController.js +++ b/services/filestore/app/js/HealthCheckController.js @@ -6,71 +6,75 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const fs = require("fs-extra"); -const path = require("path"); -const async = require("async"); -const fileConverter = require("./FileConverter"); -const keyBuilder = require("./KeyBuilder"); -const fileController = require("./FileController"); -const logger = require('logger-sharelatex'); -const settings = require("settings-sharelatex"); -const streamBuffers = require("stream-buffers"); -const _ = require('underscore'); +const fs = require('fs-extra') +const path = require('path') +const async = require('async') +const fileConverter = require('./FileConverter') +const keyBuilder = require('./KeyBuilder') +const fileController = require('./FileController') +const logger = require('logger-sharelatex') +const settings = require('settings-sharelatex') +const streamBuffers = require('stream-buffers') +const _ = require('underscore') +const checkCanStoreFiles = function(callback) { + callback = _.once(callback) + const req = { params: {}, query: {}, headers: {} } + req.params.project_id = settings.health_check.project_id + req.params.file_id = settings.health_check.file_id + const myWritableStreamBuffer = new streamBuffers.WritableStreamBuffer({ + initialSize: 100 + }) + const res = { + send(code) { + if (code !== 200) { + return callback(new Error(`non-200 code from getFile: ${code}`)) + } + } + } + myWritableStreamBuffer.send = res.send + return keyBuilder.userFileKey(req, res, function() { + fileController.getFile(req, myWritableStreamBuffer) + return myWritableStreamBuffer.on('close', function() { + if (myWritableStreamBuffer.size() > 0) { + return callback() + } else { + const err = 'no data in write stream buffer for health check' + logger.err({ err }, 'error performing health check') + return callback(err) + } + }) + }) +} -const checkCanStoreFiles = function(callback){ - callback = _.once(callback); - const req = {params:{}, query:{}, headers:{}}; - req.params.project_id = settings.health_check.project_id; - req.params.file_id = settings.health_check.file_id; - const myWritableStreamBuffer = new streamBuffers.WritableStreamBuffer({initialSize: 100}); - const res = { - send(code) { - if (code !== 200) { - return callback(new Error(`non-200 code from getFile: ${code}`)); - } - } - }; - myWritableStreamBuffer.send = res.send; - return keyBuilder.userFileKey(req, res, function() { - fileController.getFile(req, myWritableStreamBuffer); - return myWritableStreamBuffer.on("close", function() { - if (myWritableStreamBuffer.size() > 0) { - return callback(); - } else { - const err = "no data in write stream buffer for health check"; - logger.err({err,}, "error performing health check"); - return callback(err); - } - }); - }); -}; - -const checkFileConvert = function(callback){ - if (!settings.enableConversions) { - return callback(); - } - const imgPath = path.join(settings.path.uploadFolder, "/tiny.pdf"); - return async.waterfall([ - cb => fs.copy("./tiny.pdf", imgPath, cb), - cb => fileConverter.thumbnail(imgPath, cb), - (resultPath, cb) => fs.unlink(resultPath, cb), - cb => fs.unlink(imgPath, cb) - ], callback); -}; - +const checkFileConvert = function(callback) { + if (!settings.enableConversions) { + return callback() + } + const imgPath = path.join(settings.path.uploadFolder, '/tiny.pdf') + return async.waterfall( + [ + cb => fs.copy('./tiny.pdf', imgPath, cb), + cb => fileConverter.thumbnail(imgPath, cb), + (resultPath, cb) => fs.unlink(resultPath, cb), + cb => fs.unlink(imgPath, cb) + ], + callback + ) +} module.exports = { - - check(req, res) { - logger.log({}, "performing health check"); - return async.parallel([checkFileConvert, checkCanStoreFiles], function(err){ - if (err != null) { - logger.err({err}, "Health check: error running"); - return res.send(500); - } else { - return res.send(200); - } - }); - } -}; + check(req, res) { + logger.log({}, 'performing health check') + return async.parallel([checkFileConvert, checkCanStoreFiles], function( + err + ) { + if (err != null) { + logger.err({ err }, 'Health check: error running') + return res.send(500) + } else { + return res.send(200) + } + }) + } +} diff --git a/services/filestore/app/js/ImageOptimiser.js b/services/filestore/app/js/ImageOptimiser.js index 0126786167..99f1eb21f6 100644 --- a/services/filestore/app/js/ImageOptimiser.js +++ b/services/filestore/app/js/ImageOptimiser.js @@ -9,36 +9,36 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const { - exec -} = require('child_process'); -const logger = require("logger-sharelatex"); -const Settings = require("settings-sharelatex"); +const { exec } = require('child_process') +const logger = require('logger-sharelatex') +const Settings = require('settings-sharelatex') -module.exports = { - - compressPng(localPath, callback){ - const startTime = new Date(); - logger.log({localPath}, "optimising png path"); - const args = `optipng ${localPath}`; - const opts = { - timeout: 30 * 1000, - killSignal: "SIGKILL" - }; - if (!Settings.enableConversions) { - const error = new Error("Image conversions are disabled"); - return callback(error); - } - return exec(args, opts,function(err, stdout, stderr){ - if ((err != null) && (err.signal === 'SIGKILL')) { - logger.warn({err, stderr, localPath}, "optimiser timeout reached"); - err = null; - } else if (err != null) { - logger.err({err, stderr, localPath}, "something went wrong converting compressPng"); - } else { - logger.log({localPath}, "finished compressPng file"); - } - return callback(err); - }); - } -}; +module.exports = { + compressPng(localPath, callback) { + const startTime = new Date() + logger.log({ localPath }, 'optimising png path') + const args = `optipng ${localPath}` + const opts = { + timeout: 30 * 1000, + killSignal: 'SIGKILL' + } + if (!Settings.enableConversions) { + const error = new Error('Image conversions are disabled') + return callback(error) + } + return exec(args, opts, function(err, stdout, stderr) { + if (err != null && err.signal === 'SIGKILL') { + logger.warn({ err, stderr, localPath }, 'optimiser timeout reached') + err = null + } else if (err != null) { + logger.err( + { err, stderr, localPath }, + 'something went wrong converting compressPng' + ) + } else { + logger.log({ localPath }, 'finished compressPng file') + } + return callback(err) + }) + } +} diff --git a/services/filestore/app/js/KeyBuilder.js b/services/filestore/app/js/KeyBuilder.js index 5d3a36e6b1..110900c991 100644 --- a/services/filestore/app/js/KeyBuilder.js +++ b/services/filestore/app/js/KeyBuilder.js @@ -11,65 +11,61 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const settings = require("settings-sharelatex"); +const settings = require('settings-sharelatex') module.exports = { + getConvertedFolderKey(key) { + return (key = `${key}-converted-cache/`) + }, + addCachingToKey(key, opts) { + key = this.getConvertedFolderKey(key) + if (opts.format != null && opts.style == null) { + key = `${key}format-${opts.format}` + } + if (opts.style != null && opts.format == null) { + key = `${key}style-${opts.style}` + } + if (opts.style != null && opts.format != null) { + key = `${key}format-${opts.format}-style-${opts.style}` + } + return key + }, - getConvertedFolderKey(key){ - return key = `${key}-converted-cache/`; - }, + userFileKey(req, res, next) { + const { project_id, file_id } = req.params + req.key = `${project_id}/${file_id}` + req.bucket = settings.filestore.stores.user_files + return next() + }, - addCachingToKey(key, opts){ - key = this.getConvertedFolderKey(key); - if ((opts.format != null) && (opts.style == null)) { - key = `${key}format-${opts.format}`; - } - if ((opts.style != null) && (opts.format == null)) { - key = `${key}style-${opts.style}`; - } - if ((opts.style != null) && (opts.format != null)) { - key = `${key}format-${opts.format}-style-${opts.style}`; - } - return key; - }, + publicFileKey(req, res, next) { + const { project_id, public_file_id } = req.params + if (settings.filestore.stores.public_files == null) { + return res.status(501).send('public files not available') + } else { + req.key = `${project_id}/${public_file_id}` + req.bucket = settings.filestore.stores.public_files + return next() + } + }, + templateFileKey(req, res, next) { + const { template_id, format, version, sub_type } = req.params + req.key = `${template_id}/v/${version}/${format}` + if (sub_type != null) { + req.key = `${req.key}/${sub_type}` + } + req.bucket = settings.filestore.stores.template_files + req.version = version + const opts = req.query + return next() + }, - userFileKey(req, res, next){ - const {project_id, file_id} = req.params; - req.key = `${project_id}/${file_id}`; - req.bucket = settings.filestore.stores.user_files; - return next(); - }, - - publicFileKey(req, res, next){ - const {project_id, public_file_id} = req.params; - if ((settings.filestore.stores.public_files == null)) { - return res.status(501).send("public files not available"); - } else { - req.key = `${project_id}/${public_file_id}`; - req.bucket = settings.filestore.stores.public_files; - return next(); - } - }, - - templateFileKey(req, res, next){ - const {template_id, format, version, sub_type} = req.params; - req.key = `${template_id}/v/${version}/${format}`; - if (sub_type != null) { - req.key = `${req.key}/${sub_type}`; - } - req.bucket = settings.filestore.stores.template_files; - req.version = version; - const opts = req.query; - return next(); - }, - - publicProjectKey(req, res, next){ - const {project_id} = req.params; - req.project_id = project_id; - req.bucket = settings.filestore.stores.user_files; - return next(); - } -}; - + publicProjectKey(req, res, next) { + const { project_id } = req.params + req.project_id = project_id + req.bucket = settings.filestore.stores.user_files + return next() + } +} diff --git a/services/filestore/app/js/LocalFileWriter.js b/services/filestore/app/js/LocalFileWriter.js index b0bfedcc04..8a541a35e9 100644 --- a/services/filestore/app/js/LocalFileWriter.js +++ b/services/filestore/app/js/LocalFileWriter.js @@ -9,73 +9,83 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const fs = require("fs"); -const uuid = require('node-uuid'); -const path = require("path"); -const _ = require("underscore"); -const logger = require("logger-sharelatex"); -const metrics = require("metrics-sharelatex"); -const Settings = require("settings-sharelatex"); -const Errors = require("./Errors"); +const fs = require('fs') +const uuid = require('node-uuid') +const path = require('path') +const _ = require('underscore') +const logger = require('logger-sharelatex') +const metrics = require('metrics-sharelatex') +const Settings = require('settings-sharelatex') +const Errors = require('./Errors') -module.exports = { +module.exports = { + writeStream(stream, key, callback) { + const timer = new metrics.Timer('writingFile') + callback = _.once(callback) + const fsPath = this._getPath(key) + logger.log({ fsPath }, 'writing file locally') + const writeStream = fs.createWriteStream(fsPath) + writeStream.on('finish', function() { + timer.done() + logger.log({ fsPath }, 'finished writing file locally') + return callback(null, fsPath) + }) + writeStream.on('error', function(err) { + logger.err( + { err, fsPath }, + 'problem writing file locally, with write stream' + ) + return callback(err) + }) + stream.on('error', function(err) { + logger.log( + { err, fsPath }, + 'problem writing file locally, with read stream' + ) + return callback(err) + }) + return stream.pipe(writeStream) + }, - writeStream(stream, key, callback){ - const timer = new metrics.Timer("writingFile"); - callback = _.once(callback); - const fsPath = this._getPath(key); - logger.log({fsPath}, "writing file locally"); - const writeStream = fs.createWriteStream(fsPath); - writeStream.on("finish", function() { - timer.done(); - logger.log({fsPath}, "finished writing file locally"); - return callback(null, fsPath); - }); - writeStream.on("error", function(err){ - logger.err({err, fsPath}, "problem writing file locally, with write stream"); - return callback(err); - }); - stream.on("error", function(err){ - logger.log({err, fsPath}, "problem writing file locally, with read stream"); - return callback(err); - }); - return stream.pipe(writeStream); - }, + getStream(fsPath, _callback) { + if (_callback == null) { + _callback = function(err, res) {} + } + const callback = _.once(_callback) + const timer = new metrics.Timer('readingFile') + logger.log({ fsPath }, 'reading file locally') + const readStream = fs.createReadStream(fsPath) + readStream.on('end', function() { + timer.done() + return logger.log({ fsPath }, 'finished reading file locally') + }) + readStream.on('error', function(err) { + logger.err( + { err, fsPath }, + 'problem reading file locally, with read stream' + ) + if (err.code === 'ENOENT') { + return callback(new Errors.NotFoundError(err.message), null) + } else { + return callback(err) + } + }) + return callback(null, readStream) + }, - getStream(fsPath, _callback) { - if (_callback == null) { _callback = function(err, res){}; } - const callback = _.once(_callback); - const timer = new metrics.Timer("readingFile"); - logger.log({fsPath}, "reading file locally"); - const readStream = fs.createReadStream(fsPath); - readStream.on("end", function() { - timer.done(); - return logger.log({fsPath}, "finished reading file locally"); - }); - readStream.on("error", function(err){ - logger.err({err, fsPath}, "problem reading file locally, with read stream"); - if (err.code === 'ENOENT') { - return callback(new Errors.NotFoundError(err.message), null); - } else { - return callback(err); - } - }); - return callback(null, readStream); - }, + deleteFile(fsPath, callback) { + if (fsPath == null || fsPath === '') { + return callback() + } + logger.log({ fsPath }, 'removing local temp file') + return fs.unlink(fsPath, callback) + }, - deleteFile(fsPath, callback){ - if ((fsPath == null) || (fsPath === "")) { - return callback(); - } - logger.log({fsPath}, "removing local temp file"); - return fs.unlink(fsPath, callback); - }, - - _getPath(key){ - if ((key == null)) { - key = uuid.v1(); - } - key = key.replace(/\//g,"-"); - return path.join(Settings.path.uploadFolder, key); - } -}; + _getPath(key) { + if (key == null) { + key = uuid.v1() + } + key = key.replace(/\//g, '-') + return path.join(Settings.path.uploadFolder, key) + } +} diff --git a/services/filestore/app/js/PersistorManager.js b/services/filestore/app/js/PersistorManager.js index 95a4122591..182e39b085 100644 --- a/services/filestore/app/js/PersistorManager.js +++ b/services/filestore/app/js/PersistorManager.js @@ -7,24 +7,46 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const settings = require("settings-sharelatex"); -const logger = require("logger-sharelatex"); +const settings = require('settings-sharelatex') +const logger = require('logger-sharelatex') // assume s3 if none specified -__guard__(settings != null ? settings.filestore : undefined, x => x.backend || (settings.filestore.backend = "s3")); +__guard__( + settings != null ? settings.filestore : undefined, + x => x.backend || (settings.filestore.backend = 's3') +) -logger.log({backend:__guard__(settings != null ? settings.filestore : undefined, x1 => x1.backend)}, "Loading backend"); -module.exports = (() => { switch (__guard__(settings != null ? settings.filestore : undefined, x2 => x2.backend)) { - case "aws-sdk": - return require("./AWSSDKPersistorManager"); - case "s3": - return require("./S3PersistorManager"); - case "fs": - return require("./FSPersistorManager"); - default: - throw new Error( `Unknown filestore backend: ${settings.filestore.backend}` ); -} })(); +logger.log( + { + backend: __guard__( + settings != null ? settings.filestore : undefined, + x1 => x1.backend + ) + }, + 'Loading backend' +) +module.exports = (() => { + switch ( + __guard__( + settings != null ? settings.filestore : undefined, + x2 => x2.backend + ) + ) { + case 'aws-sdk': + return require('./AWSSDKPersistorManager') + case 's3': + return require('./S3PersistorManager') + case 'fs': + return require('./FSPersistorManager') + default: + throw new Error( + `Unknown filestore backend: ${settings.filestore.backend}` + ) + } +})() function __guard__(value, transform) { - return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; -} \ No newline at end of file + return typeof value !== 'undefined' && value !== null + ? transform(value) + : undefined +} diff --git a/services/filestore/app/js/S3PersistorManager.js b/services/filestore/app/js/S3PersistorManager.js index 0face2de53..cadf38172a 100644 --- a/services/filestore/app/js/S3PersistorManager.js +++ b/services/filestore/app/js/S3PersistorManager.js @@ -19,282 +19,358 @@ // to use aws-sdk throughout, see the comments in AWSSDKPersistorManager for // details. The knox library is unmaintained and has bugs. -const http = require('http'); -http.globalAgent.maxSockets = 300; -const https = require('https'); -https.globalAgent.maxSockets = 300; -const settings = require("settings-sharelatex"); -const request = require("request"); -const logger = require("logger-sharelatex"); -const metrics = require("metrics-sharelatex"); -const fs = require("fs"); -const knox = require("knox"); -const path = require("path"); -const LocalFileWriter = require("./LocalFileWriter"); -const Errors = require("./Errors"); -const _ = require("underscore"); -const awsS3 = require("aws-sdk/clients/s3"); -const URL = require('url'); +const http = require('http') +http.globalAgent.maxSockets = 300 +const https = require('https') +https.globalAgent.maxSockets = 300 +const settings = require('settings-sharelatex') +const request = require('request') +const logger = require('logger-sharelatex') +const metrics = require('metrics-sharelatex') +const fs = require('fs') +const knox = require('knox') +const path = require('path') +const LocalFileWriter = require('./LocalFileWriter') +const Errors = require('./Errors') +const _ = require('underscore') +const awsS3 = require('aws-sdk/clients/s3') +const URL = require('url') -const thirtySeconds = 30 * 1000; +const thirtySeconds = 30 * 1000 -const buildDefaultOptions = function(bucketName, method, key){ - let endpoint; - if (settings.filestore.s3.endpoint) { - endpoint = `${settings.filestore.s3.endpoint}/${bucketName}`; - } else { - endpoint = `https://${bucketName}.s3.amazonaws.com`; - } - return { - aws: { - key: settings.filestore.s3.key, - secret: settings.filestore.s3.secret, - bucket: bucketName - }, - method, - timeout: thirtySeconds, - uri:`${endpoint}/${key}` - }; -}; +const buildDefaultOptions = function(bucketName, method, key) { + let endpoint + if (settings.filestore.s3.endpoint) { + endpoint = `${settings.filestore.s3.endpoint}/${bucketName}` + } else { + endpoint = `https://${bucketName}.s3.amazonaws.com` + } + return { + aws: { + key: settings.filestore.s3.key, + secret: settings.filestore.s3.secret, + bucket: bucketName + }, + method, + timeout: thirtySeconds, + uri: `${endpoint}/${key}` + } +} const getS3Options = function(credentials) { - const options = { - credentials: { - accessKeyId: credentials.auth_key, - secretAccessKey: credentials.auth_secret - } - }; + const options = { + credentials: { + accessKeyId: credentials.auth_key, + secretAccessKey: credentials.auth_secret + } + } - if (settings.filestore.s3.endpoint) { - const endpoint = URL.parse(settings.filestore.s3.endpoint); - options.endpoint = settings.filestore.s3.endpoint; - options.sslEnabled = endpoint.protocol === 'https'; - } + if (settings.filestore.s3.endpoint) { + const endpoint = URL.parse(settings.filestore.s3.endpoint) + options.endpoint = settings.filestore.s3.endpoint + options.sslEnabled = endpoint.protocol === 'https' + } - return options; -}; + return options +} -const defaultS3Client = new awsS3(getS3Options({ - auth_key: settings.filestore.s3.key, - auth_secret: settings.filestore.s3.secret -})); +const defaultS3Client = new awsS3( + getS3Options({ + auth_key: settings.filestore.s3.key, + auth_secret: settings.filestore.s3.secret + }) +) const getS3Client = function(credentials) { - if (credentials != null) { - return new awsS3(getS3Options(credentials)); - } else { - return defaultS3Client; - } -}; + if (credentials != null) { + return new awsS3(getS3Options(credentials)) + } else { + return defaultS3Client + } +} const getKnoxClient = bucketName => { - const options = { - key: settings.filestore.s3.key, - secret: settings.filestore.s3.secret, - bucket: bucketName - }; - if (settings.filestore.s3.endpoint) { - const endpoint = URL.parse(settings.filestore.s3.endpoint); - options.endpoint = endpoint.hostname; - options.port = endpoint.port; - } - return knox.createClient(options); -}; + const options = { + key: settings.filestore.s3.key, + secret: settings.filestore.s3.secret, + bucket: bucketName + } + if (settings.filestore.s3.endpoint) { + const endpoint = URL.parse(settings.filestore.s3.endpoint) + options.endpoint = endpoint.hostname + options.port = endpoint.port + } + return knox.createClient(options) +} module.exports = { + sendFile(bucketName, key, fsPath, callback) { + const s3Client = getKnoxClient(bucketName) + let uploaded = 0 + const putEventEmiter = s3Client.putFile(fsPath, key, function(err, res) { + metrics.count('s3.egress', uploaded) + if (err != null) { + logger.err( + { err, bucketName, key, fsPath }, + 'something went wrong uploading file to s3' + ) + return callback(err) + } + if (res == null) { + logger.err( + { err, res, bucketName, key, fsPath }, + 'no response from s3 put file' + ) + return callback('no response from put file') + } + if (res.statusCode !== 200) { + logger.err( + { bucketName, key, fsPath }, + 'non 200 response from s3 putting file' + ) + return callback('non 200 response from s3 on put file') + } + logger.log({ res, bucketName, key, fsPath }, 'file uploaded to s3') + return callback(err) + }) + putEventEmiter.on('error', function(err) { + logger.err( + { err, bucketName, key, fsPath }, + 'error emmited on put of file' + ) + return callback(err) + }) + return putEventEmiter.on( + 'progress', + progress => (uploaded = progress.written) + ) + }, - sendFile(bucketName, key, fsPath, callback){ - const s3Client = getKnoxClient(bucketName); - let uploaded = 0; - const putEventEmiter = s3Client.putFile(fsPath, key, function(err, res){ - metrics.count('s3.egress', uploaded); - if (err != null) { - logger.err({err, bucketName, key, fsPath},"something went wrong uploading file to s3"); - return callback(err); - } - if ((res == null)) { - logger.err({err, res, bucketName, key, fsPath}, "no response from s3 put file"); - return callback("no response from put file"); - } - if (res.statusCode !== 200) { - logger.err({bucketName, key, fsPath}, "non 200 response from s3 putting file"); - return callback("non 200 response from s3 on put file"); - } - logger.log({res, bucketName, key, fsPath},"file uploaded to s3"); - return callback(err); - }); - putEventEmiter.on("error", function(err){ - logger.err({err, bucketName, key, fsPath}, "error emmited on put of file"); - return callback(err); - }); - return putEventEmiter.on("progress", progress => uploaded = progress.written); - }, + sendStream(bucketName, key, readStream, callback) { + logger.log({ bucketName, key }, 'sending file to s3') + readStream.on('error', err => + logger.err({ bucketName, key }, 'error on stream to send to s3') + ) + return LocalFileWriter.writeStream(readStream, null, (err, fsPath) => { + if (err != null) { + logger.err( + { bucketName, key, fsPath, err }, + 'something went wrong writing stream to disk' + ) + return callback(err) + } + return this.sendFile(bucketName, key, fsPath, ( + err // delete the temporary file created above and return the original error + ) => LocalFileWriter.deleteFile(fsPath, () => callback(err))) + }) + }, - sendStream(bucketName, key, readStream, callback){ - logger.log({bucketName, key}, "sending file to s3"); - readStream.on("error", err => logger.err({bucketName, key}, "error on stream to send to s3")); - return LocalFileWriter.writeStream(readStream, null, (err, fsPath)=> { - if (err != null) { - logger.err({bucketName, key, fsPath, err}, "something went wrong writing stream to disk"); - return callback(err); - } - return this.sendFile(bucketName, key, fsPath, err => // delete the temporary file created above and return the original error - LocalFileWriter.deleteFile(fsPath, () => callback(err))); - }); - }, + // opts may be {start: Number, end: Number} + getFileStream(bucketName, key, opts, callback) { + if (callback == null) { + callback = function(err, res) {} + } + opts = opts || {} + callback = _.once(callback) + logger.log({ bucketName, key }, 'getting file from s3') - // opts may be {start: Number, end: Number} - getFileStream(bucketName, key, opts, callback){ - if (callback == null) { callback = function(err, res){}; } - opts = opts || {}; - callback = _.once(callback); - logger.log({bucketName, key}, "getting file from s3"); + const s3 = getS3Client(opts.credentials) + const s3Params = { + Bucket: bucketName, + Key: key + } + if (opts.start != null && opts.end != null) { + s3Params.Range = `bytes=${opts.start}-${opts.end}` + } + const s3Request = s3.getObject(s3Params) - const s3 = getS3Client(opts.credentials); - const s3Params = { - Bucket: bucketName, - Key: key - }; - if ((opts.start != null) && (opts.end != null)) { - s3Params.Range = `bytes=${opts.start}-${opts.end}`; - } - const s3Request = s3.getObject(s3Params); + s3Request.on( + 'httpHeaders', + (statusCode, headers, response, statusMessage) => { + if ([403, 404].includes(statusCode)) { + // S3 returns a 403 instead of a 404 when the user doesn't have + // permission to list the bucket contents. + logger.log({ bucketName, key }, 'file not found in s3') + return callback( + new Errors.NotFoundError( + `File not found in S3: ${bucketName}:${key}` + ), + null + ) + } + if (![200, 206].includes(statusCode)) { + logger.log( + { bucketName, key }, + `error getting file from s3: ${statusCode}` + ) + return callback( + new Error( + `Got non-200 response from S3: ${statusCode} ${statusMessage}` + ), + null + ) + } + const stream = response.httpResponse.createUnbufferedStream() + stream.on('data', data => metrics.count('s3.ingress', data.byteLength)) - s3Request.on('httpHeaders', (statusCode, headers, response, statusMessage) => { - if ([403, 404].includes(statusCode)) { - // S3 returns a 403 instead of a 404 when the user doesn't have - // permission to list the bucket contents. - logger.log({ bucketName, key }, "file not found in s3"); - return callback(new Errors.NotFoundError(`File not found in S3: ${bucketName}:${key}`), null); - } - if (![200, 206].includes(statusCode)) { - logger.log({bucketName, key }, `error getting file from s3: ${statusCode}`); - return callback(new Error(`Got non-200 response from S3: ${statusCode} ${statusMessage}`), null); - } - const stream = response.httpResponse.createUnbufferedStream(); - stream.on('data', data => metrics.count('s3.ingress', data.byteLength)); + return callback(null, stream) + } + ) - return callback(null, stream); - }); + s3Request.on('error', err => { + logger.err({ err, bucketName, key }, 'error getting file stream from s3') + return callback(err) + }) - s3Request.on('error', err => { - logger.err({ err, bucketName, key }, "error getting file stream from s3"); - return callback(err); - }); + return s3Request.send() + }, - return s3Request.send(); - }, + getFileSize(bucketName, key, callback) { + logger.log({ bucketName, key }, 'getting file size from S3') + const s3 = getS3Client() + return s3.headObject({ Bucket: bucketName, Key: key }, function(err, data) { + if (err != null) { + if ([403, 404].includes(err.statusCode)) { + // S3 returns a 403 instead of a 404 when the user doesn't have + // permission to list the bucket contents. + logger.log( + { + bucketName, + key + }, + 'file not found in s3' + ) + callback( + new Errors.NotFoundError( + `File not found in S3: ${bucketName}:${key}` + ) + ) + } else { + logger.err( + { + bucketName, + key, + err + }, + 'error performing S3 HeadObject' + ) + callback(err) + } + return + } + return callback(null, data.ContentLength) + }) + }, - getFileSize(bucketName, key, callback) { - logger.log({ bucketName, key }, "getting file size from S3"); - const s3 = getS3Client(); - return s3.headObject({ Bucket: bucketName, Key: key }, function(err, data) { - if (err != null) { - if ([403, 404].includes(err.statusCode)) { - // S3 returns a 403 instead of a 404 when the user doesn't have - // permission to list the bucket contents. - logger.log({ - bucketName, - key - }, "file not found in s3"); - callback( - new Errors.NotFoundError(`File not found in S3: ${bucketName}:${key}`) - ); - } else { - logger.err({ - bucketName, - key, - err - }, "error performing S3 HeadObject"); - callback(err); - } - return; - } - return callback(null, data.ContentLength); - }); - }, + copyFile(bucketName, sourceKey, destKey, callback) { + logger.log({ bucketName, sourceKey, destKey }, 'copying file in s3') + const source = bucketName + '/' + sourceKey + // use the AWS SDK instead of knox due to problems with error handling (https://github.com/Automattic/knox/issues/114) + const s3 = getS3Client() + return s3.copyObject( + { Bucket: bucketName, Key: destKey, CopySource: source }, + function(err) { + if (err != null) { + if (err.code === 'NoSuchKey') { + logger.err( + { bucketName, sourceKey }, + 'original file not found in s3 when copying' + ) + return callback( + new Errors.NotFoundError( + 'original file not found in S3 when copying' + ) + ) + } else { + logger.err( + { err, bucketName, sourceKey, destKey }, + 'something went wrong copying file in aws' + ) + return callback(err) + } + } else { + return callback() + } + } + ) + }, - copyFile(bucketName, sourceKey, destKey, callback){ - logger.log({bucketName, sourceKey, destKey}, "copying file in s3"); - const source = bucketName + '/' + sourceKey; - // use the AWS SDK instead of knox due to problems with error handling (https://github.com/Automattic/knox/issues/114) - const s3 = getS3Client(); - return s3.copyObject({Bucket: bucketName, Key: destKey, CopySource: source}, function(err) { - if (err != null) { - if (err.code === 'NoSuchKey') { - logger.err({bucketName, sourceKey}, "original file not found in s3 when copying"); - return callback(new Errors.NotFoundError("original file not found in S3 when copying")); - } else { - logger.err({err, bucketName, sourceKey, destKey}, "something went wrong copying file in aws"); - return callback(err); - } - } else { - return callback(); - } - }); - }, + deleteFile(bucketName, key, callback) { + logger.log({ bucketName, key }, 'delete file in s3') + const options = buildDefaultOptions(bucketName, 'delete', key) + return request(options, function(err, res) { + if (err != null) { + logger.err( + { err, res, bucketName, key }, + 'something went wrong deleting file in aws' + ) + } + return callback(err) + }) + }, - deleteFile(bucketName, key, callback){ - logger.log({bucketName, key}, "delete file in s3"); - const options = buildDefaultOptions(bucketName, "delete", key); - return request(options, function(err, res){ - if (err != null) { - logger.err({err, res, bucketName, key}, "something went wrong deleting file in aws"); - } - return callback(err); - }); - }, + deleteDirectory(bucketName, key, _callback) { + // deleteMultiple can call the callback multiple times so protect against this. + const callback = function(...args) { + _callback(...Array.from(args || [])) + return (_callback = function() {}) + } - deleteDirectory(bucketName, key, _callback){ - // deleteMultiple can call the callback multiple times so protect against this. - const callback = function(...args) { - _callback(...Array.from(args || [])); - return _callback = function() {}; - }; + logger.log({ key, bucketName }, 'deleting directory') + const s3Client = getKnoxClient(bucketName) + return s3Client.list({ prefix: key }, function(err, data) { + if (err != null) { + logger.err( + { err, bucketName, key }, + 'something went wrong listing prefix in aws' + ) + return callback(err) + } + const keys = _.map(data.Contents, entry => entry.Key) + return s3Client.deleteMultiple(keys, callback) + }) + }, - logger.log({key, bucketName}, "deleting directory"); - const s3Client = getKnoxClient(bucketName); - return s3Client.list({prefix:key}, function(err, data){ - if (err != null) { - logger.err({err, bucketName, key}, "something went wrong listing prefix in aws"); - return callback(err); - } - const keys = _.map(data.Contents, entry => entry.Key); - return s3Client.deleteMultiple(keys, callback); - }); - }, + checkIfFileExists(bucketName, key, callback) { + logger.log({ bucketName, key }, 'checking if file exists in s3') + const options = buildDefaultOptions(bucketName, 'head', key) + return request(options, function(err, res) { + if (err != null) { + logger.err( + { err, res, bucketName, key }, + 'something went wrong checking file in aws' + ) + return callback(err) + } + if (res == null) { + logger.err( + { err, res, bucketName, key }, + 'no response object returned when checking if file exists' + ) + err = new Error(`no response from s3 ${bucketName} ${key}`) + return callback(err) + } + const exists = res.statusCode === 200 + logger.log({ bucketName, key, exists }, 'checked if file exsists in s3') + return callback(err, exists) + }) + }, - checkIfFileExists(bucketName, key, callback){ - logger.log({bucketName, key}, "checking if file exists in s3"); - const options = buildDefaultOptions(bucketName, "head", key); - return request(options, function(err, res){ - if (err != null) { - logger.err({err, res, bucketName, key}, "something went wrong checking file in aws"); - return callback(err); - } - if ((res == null)) { - logger.err({err, res, bucketName, key}, "no response object returned when checking if file exists"); - err = new Error(`no response from s3 ${bucketName} ${key}`); - return callback(err); - } - const exists = res.statusCode === 200; - logger.log({bucketName, key, exists}, "checked if file exsists in s3"); - return callback(err, exists); - }); - }, - - directorySize(bucketName, key, callback){ - logger.log({bucketName, key}, "get project size in s3"); - const s3Client = getKnoxClient(bucketName); - return s3Client.list({prefix:key}, function(err, data){ - if (err != null) { - logger.err({err, bucketName, key}, "something went wrong listing prefix in aws"); - return callback(err); - } - let totalSize = 0; - _.each(data.Contents, entry => totalSize += entry.Size); - logger.log({totalSize}, "total size"); - return callback(null, totalSize); - }); - } -}; + directorySize(bucketName, key, callback) { + logger.log({ bucketName, key }, 'get project size in s3') + const s3Client = getKnoxClient(bucketName) + return s3Client.list({ prefix: key }, function(err, data) { + if (err != null) { + logger.err( + { err, bucketName, key }, + 'something went wrong listing prefix in aws' + ) + return callback(err) + } + let totalSize = 0 + _.each(data.Contents, entry => (totalSize += entry.Size)) + logger.log({ totalSize }, 'total size') + return callback(null, totalSize) + }) + } +} diff --git a/services/filestore/app/js/SafeExec.js b/services/filestore/app/js/SafeExec.js index 4f13785460..dbc1576a88 100644 --- a/services/filestore/app/js/SafeExec.js +++ b/services/filestore/app/js/SafeExec.js @@ -12,10 +12,10 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const _ = require("underscore"); -const logger = require("logger-sharelatex"); -const child_process = require('child_process'); -const Settings = require("settings-sharelatex"); +const _ = require('underscore') +const logger = require('logger-sharelatex') +const child_process = require('child_process') +const Settings = require('settings-sharelatex') // execute a command in the same way as 'exec' but with a timeout that // kills all child processes @@ -24,44 +24,50 @@ const Settings = require("settings-sharelatex"); // group, then we can kill everything in that process group. module.exports = function(command, options, callback) { - if (callback == null) { callback = function(err, stdout, stderr) {}; } - if (!Settings.enableConversions) { - const error = new Error("Image conversions are disabled"); - return callback(error); - } + if (callback == null) { + callback = function(err, stdout, stderr) {} + } + if (!Settings.enableConversions) { + const error = new Error('Image conversions are disabled') + return callback(error) + } - // options are {timeout: number-of-milliseconds, killSignal: signal-name} - const [cmd, ...args] = Array.from(command); + // options are {timeout: number-of-milliseconds, killSignal: signal-name} + const [cmd, ...args] = Array.from(command) - const child = child_process.spawn(cmd, args, {detached:true}); - let stdout = ""; - let stderr = ""; + const child = child_process.spawn(cmd, args, { detached: true }) + let stdout = '' + let stderr = '' - const cleanup = _.once(function(err) { - if (killTimer != null) { clearTimeout(killTimer); } - return callback(err, stdout, stderr); - }); + const cleanup = _.once(function(err) { + if (killTimer != null) { + clearTimeout(killTimer) + } + return callback(err, stdout, stderr) + }) - if (options.timeout != null) { - var killTimer = setTimeout(function() { - try { - // use negative process id to kill process group - return process.kill(-child.pid, options.killSignal || "SIGTERM"); - } catch (error) { - return logger.log({process: child.pid, kill_error: error}, "error killing process"); - } - } - , options.timeout); - } + if (options.timeout != null) { + var killTimer = setTimeout(function() { + try { + // use negative process id to kill process group + return process.kill(-child.pid, options.killSignal || 'SIGTERM') + } catch (error) { + return logger.log( + { process: child.pid, kill_error: error }, + 'error killing process' + ) + } + }, options.timeout) + } - child.on('close', function(code, signal) { - const err = code ? new Error(`exit status ${code}`) : signal; - return cleanup(err); - }); + child.on('close', function(code, signal) { + const err = code ? new Error(`exit status ${code}`) : signal + return cleanup(err) + }) - child.on('error', err => cleanup(err)); + child.on('error', err => cleanup(err)) - child.stdout.on('data', chunk => stdout += chunk); + child.stdout.on('data', chunk => (stdout += chunk)) - return child.stderr.on('data', chunk => stderr += chunk); -}; + return child.stderr.on('data', chunk => (stderr += chunk)) +} From 1c7d1af4cae906efc6887c5e05759ed1dc9fdad5 Mon Sep 17 00:00:00 2001 From: decaffeinate Date: Mon, 16 Dec 2019 10:42:35 +0000 Subject: [PATCH 331/555] decaffeinate: Rename FilestoreApp.coffee and 1 other file from .coffee to .js --- .../acceptance/coffee/{FilestoreApp.coffee => FilestoreApp.js} | 0 .../coffee/{SendingFileTest.coffee => SendingFileTest.js} | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename services/filestore/test/acceptance/coffee/{FilestoreApp.coffee => FilestoreApp.js} (100%) rename services/filestore/test/acceptance/coffee/{SendingFileTest.coffee => SendingFileTest.js} (100%) diff --git a/services/filestore/test/acceptance/coffee/FilestoreApp.coffee b/services/filestore/test/acceptance/coffee/FilestoreApp.js similarity index 100% rename from services/filestore/test/acceptance/coffee/FilestoreApp.coffee rename to services/filestore/test/acceptance/coffee/FilestoreApp.js diff --git a/services/filestore/test/acceptance/coffee/SendingFileTest.coffee b/services/filestore/test/acceptance/coffee/SendingFileTest.js similarity index 100% rename from services/filestore/test/acceptance/coffee/SendingFileTest.coffee rename to services/filestore/test/acceptance/coffee/SendingFileTest.js From e6e75d4e19f697b5274ba96f1e9d48e1752e24b2 Mon Sep 17 00:00:00 2001 From: decaffeinate Date: Mon, 16 Dec 2019 10:42:36 +0000 Subject: [PATCH 332/555] decaffeinate: Convert FilestoreApp.coffee and 1 other file to JS --- .../test/acceptance/coffee/FilestoreApp.js | 102 ++-- .../test/acceptance/coffee/SendingFileTest.js | 454 ++++++++++-------- 2 files changed, 332 insertions(+), 224 deletions(-) diff --git a/services/filestore/test/acceptance/coffee/FilestoreApp.js b/services/filestore/test/acceptance/coffee/FilestoreApp.js index 1b4cc38834..255c5a12ac 100644 --- a/services/filestore/test/acceptance/coffee/FilestoreApp.js +++ b/services/filestore/test/acceptance/coffee/FilestoreApp.js @@ -1,45 +1,73 @@ -app = require('../../../app') -require("logger-sharelatex").logger.level("info") -logger = require("logger-sharelatex") -Settings = require("settings-sharelatex") -request = require('request') +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS103: Rewrite code to no longer use __guard__ + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const app = require('../../../app'); +require("logger-sharelatex").logger.level("info"); +const logger = require("logger-sharelatex"); +const Settings = require("settings-sharelatex"); +const request = require('request'); -S3_TRIES = 30 +const S3_TRIES = 30; -module.exports = - running: false - initing: false - callbacks: [] - ensureRunning: (callback = (error) ->) -> - if @running - return callback() - else if @initing - @callbacks.push callback - else - @initing = true - @callbacks.push callback - app.listen Settings.internal?.filestore?.port, "localhost", (error) => - throw error if error? - @running = true - logger.log("filestore running in dev mode") +module.exports = { + running: false, + initing: false, + callbacks: [], + ensureRunning(callback) { + if (callback == null) { callback = function(error) {}; } + if (this.running) { + return callback(); + } else if (this.initing) { + return this.callbacks.push(callback); + } else { + this.initing = true; + this.callbacks.push(callback); + return app.listen(__guard__(Settings.internal != null ? Settings.internal.filestore : undefined, x => x.port), "localhost", error => { + if (error != null) { throw error; } + this.running = true; + logger.log("filestore running in dev mode"); - for callback in @callbacks - callback() + return (() => { + const result = []; + for (callback of Array.from(this.callbacks)) { + result.push(callback()); + } + return result; + })(); + }); + } + }, - waitForS3: (callback, tries) -> - return callback() unless Settings.filestore.s3?.endpoint - tries = 1 unless tries + waitForS3(callback, tries) { + if (!(Settings.filestore.s3 != null ? Settings.filestore.s3.endpoint : undefined)) { return callback(); } + if (!tries) { tries = 1; } - request.get "#{Settings.filestore.s3.endpoint}/", (err, response) => - console.log(err, response?.statusCode, tries) - if !err && [200, 404].includes(response?.statusCode) - return callback() + return request.get(`${Settings.filestore.s3.endpoint}/`, (err, response) => { + console.log(err, response != null ? response.statusCode : undefined, tries); + if (!err && [200, 404].includes(response != null ? response.statusCode : undefined)) { + return callback(); + } - if tries == S3_TRIES - return callback('timed out waiting for S3') + if (tries === S3_TRIES) { + return callback('timed out waiting for S3'); + } - setTimeout( - () => - @waitForS3 callback, tries + 1 + return setTimeout( + () => { + return this.waitForS3(callback, tries + 1); + }, 1000 - ) + ); + }); + } +}; + +function __guard__(value, transform) { + return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; +} \ No newline at end of file diff --git a/services/filestore/test/acceptance/coffee/SendingFileTest.js b/services/filestore/test/acceptance/coffee/SendingFileTest.js index 4e9443fd88..c45ef4d63d 100644 --- a/services/filestore/test/acceptance/coffee/SendingFileTest.js +++ b/services/filestore/test/acceptance/coffee/SendingFileTest.js @@ -1,226 +1,306 @@ -assert = require("chai").assert -sinon = require('sinon') -chai = require('chai') -should = chai.should() -expect = chai.expect -modulePath = "../../../app/js/LocalFileWriter.js" -SandboxedModule = require('sandboxed-module') -fs = require("fs") -request = require("request") -settings = require("settings-sharelatex") -FilestoreApp = require "./FilestoreApp" -async = require('async') +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS103: Rewrite code to no longer use __guard__ + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const { + assert +} = require("chai"); +const sinon = require('sinon'); +const chai = require('chai'); +const should = chai.should(); +const { + expect +} = chai; +const modulePath = "../../../app/js/LocalFileWriter.js"; +const SandboxedModule = require('sandboxed-module'); +const fs = require("fs"); +const request = require("request"); +const settings = require("settings-sharelatex"); +const FilestoreApp = require("./FilestoreApp"); +const async = require('async'); -getMetric = (filestoreUrl, metric, cb) -> - request.get "#{filestoreUrl}/metrics", (err, res) -> - expect(res.statusCode).to.equal 200 - metricRegex = new RegExp("^#{metric}{[^}]+} ([0-9]+)$", "m") - cb(parseInt(metricRegex.exec(res.body)?[1] || '0')) +const getMetric = (filestoreUrl, metric, cb) => request.get(`${filestoreUrl}/metrics`, function(err, res) { + expect(res.statusCode).to.equal(200); + const metricRegex = new RegExp(`^${metric}{[^}]+} ([0-9]+)$`, "m"); + return cb(parseInt(__guard__(metricRegex.exec(res.body), x => x[1]) || '0')); +}); -describe "Filestore", -> - before (done)-> - @localFileReadPath = "/tmp/filestore_acceptence_tests_file_read.txt" - @localFileWritePath = "/tmp/filestore_acceptence_tests_file_write.txt" +describe("Filestore", function() { + before(function(done){ + this.localFileReadPath = "/tmp/filestore_acceptence_tests_file_read.txt"; + this.localFileWritePath = "/tmp/filestore_acceptence_tests_file_write.txt"; - @constantFileContent = [ - "hello world" - "line 2 goes here #{Math.random()}" + this.constantFileContent = [ + "hello world", + `line 2 goes here ${Math.random()}`, "there are 3 lines in all" - ].join("\n") + ].join("\n"); - @filestoreUrl = "http://localhost:#{settings.internal.filestore.port}" - fs.writeFile @localFileReadPath, @constantFileContent, (err) -> - return done(err) if err - FilestoreApp.waitForS3(done) + this.filestoreUrl = `http://localhost:${settings.internal.filestore.port}`; + return fs.writeFile(this.localFileReadPath, this.constantFileContent, function(err) { + if (err) { return done(err); } + return FilestoreApp.waitForS3(done); + }); + }); - beforeEach (done)-> - FilestoreApp.ensureRunning => - async.parallel [ - (cb) => - fs.unlink @localFileWritePath, () -> - cb() - (cb) => - getMetric @filestoreUrl, 's3_egress', (metric) => - @previousEgress = metric - cb() - (cb) => - getMetric @filestoreUrl, 's3_ingress', (metric) => - @previousIngress = metric - cb() - ], done + beforeEach(function(done){ + return FilestoreApp.ensureRunning(() => { + return async.parallel([ + cb => { + return fs.unlink(this.localFileWritePath, () => cb()); + }, + cb => { + return getMetric(this.filestoreUrl, 's3_egress', metric => { + this.previousEgress = metric; + return cb(); + }); + }, + cb => { + return getMetric(this.filestoreUrl, 's3_ingress', metric => { + this.previousIngress = metric; + return cb(); + }); + } + ], done); + }); + }); - it "should send a 200 for status endpoint", (done)-> - request "#{@filestoreUrl}/status", (err, response, body)-> - response.statusCode.should.equal 200 - body.indexOf("filestore").should.not.equal -1 - body.indexOf("up").should.not.equal -1 - done() + it("should send a 200 for status endpoint", function(done){ + return request(`${this.filestoreUrl}/status`, function(err, response, body){ + response.statusCode.should.equal(200); + body.indexOf("filestore").should.not.equal(-1); + body.indexOf("up").should.not.equal(-1); + return done(); + }); + }); - describe "with a file on the server", -> + describe("with a file on the server", function() { - beforeEach (done)-> - @timeout(1000 * 10) - @file_id = Math.random() - @fileUrl = "#{@filestoreUrl}/project/acceptence_tests/file/#{@file_id}" + beforeEach(function(done){ + this.timeout(1000 * 10); + this.file_id = Math.random(); + this.fileUrl = `${this.filestoreUrl}/project/acceptence_tests/file/${this.file_id}`; - writeStream = request.post(@fileUrl) + const writeStream = request.post(this.fileUrl); - writeStream.on "end", done - fs.createReadStream(@localFileReadPath).pipe writeStream + writeStream.on("end", done); + return fs.createReadStream(this.localFileReadPath).pipe(writeStream); + }); - it "should return 404 for a non-existant id", (done) -> - @timeout(1000 * 20) - options = - uri: @fileUrl + '___this_is_clearly_wrong___' - request.get options, (err, response, body) => - response.statusCode.should.equal 404 - done() + it("should return 404 for a non-existant id", function(done) { + this.timeout(1000 * 20); + const options = + {uri: this.fileUrl + '___this_is_clearly_wrong___'}; + return request.get(options, (err, response, body) => { + response.statusCode.should.equal(404); + return done(); + }); + }); - it 'should record an egress metric for the upload', (done) -> - getMetric @filestoreUrl, 's3_egress', (metric) => - expect(metric - @previousEgress).to.equal @constantFileContent.length - done() + it('should record an egress metric for the upload', function(done) { + return getMetric(this.filestoreUrl, 's3_egress', metric => { + expect(metric - this.previousEgress).to.equal(this.constantFileContent.length); + return done(); + }); + }); - it "should return the file size on a HEAD request", (done) -> - expectedLength = Buffer.byteLength(@constantFileContent) - request.head @fileUrl, (err, res) => - expect(res.statusCode).to.equal(200) - expect(res.headers['content-length']).to.equal(expectedLength.toString()) - done() + it("should return the file size on a HEAD request", function(done) { + const expectedLength = Buffer.byteLength(this.constantFileContent); + return request.head(this.fileUrl, (err, res) => { + expect(res.statusCode).to.equal(200); + expect(res.headers['content-length']).to.equal(expectedLength.toString()); + return done(); + }); + }); - it "should be able get the file back", (done)-> - @timeout(1000 * 10) - request.get @fileUrl, (err, response, body)=> - body.should.equal @constantFileContent - done() + it("should be able get the file back", function(done){ + this.timeout(1000 * 10); + return request.get(this.fileUrl, (err, response, body)=> { + body.should.equal(this.constantFileContent); + return done(); + }); + }); - it "should record an ingress metric when downloading the file", (done)-> - @timeout(1000 * 10) - request.get @fileUrl, () => - getMetric @filestoreUrl, 's3_ingress', (metric) => - expect(metric - @previousIngress).to.equal @constantFileContent.length - done() + it("should record an ingress metric when downloading the file", function(done){ + this.timeout(1000 * 10); + return request.get(this.fileUrl, () => { + return getMetric(this.filestoreUrl, 's3_ingress', metric => { + expect(metric - this.previousIngress).to.equal(this.constantFileContent.length); + return done(); + }); + }); + }); - it "should be able to get back the first 9 bytes of the file", (done) -> - @timeout(1000 * 10) - options = - uri: @fileUrl - headers: + it("should be able to get back the first 9 bytes of the file", function(done) { + this.timeout(1000 * 10); + const options = { + uri: this.fileUrl, + headers: { 'Range': 'bytes=0-8' - request.get options, (err, response, body)=> - body.should.equal 'hello wor' - done() + } + }; + return request.get(options, (err, response, body)=> { + body.should.equal('hello wor'); + return done(); + }); + }); - it "should record an ingress metric for a partial download", (done)-> - @timeout(1000 * 10) - options = - uri: @fileUrl - headers: + it("should record an ingress metric for a partial download", function(done){ + this.timeout(1000 * 10); + const options = { + uri: this.fileUrl, + headers: { 'Range': 'bytes=0-8' - request.get options, ()=> - getMetric @filestoreUrl, 's3_ingress', (metric) => - expect(metric - @previousIngress).to.equal 9 - done() + } + }; + return request.get(options, ()=> { + return getMetric(this.filestoreUrl, 's3_ingress', metric => { + expect(metric - this.previousIngress).to.equal(9); + return done(); + }); + }); + }); - it "should be able to get back bytes 4 through 10 of the file", (done) -> - @timeout(1000 * 10) - options = - uri: @fileUrl - headers: + it("should be able to get back bytes 4 through 10 of the file", function(done) { + this.timeout(1000 * 10); + const options = { + uri: this.fileUrl, + headers: { 'Range': 'bytes=4-10' - request.get options, (err, response, body)=> - body.should.equal 'o world' - done() + } + }; + return request.get(options, (err, response, body)=> { + body.should.equal('o world'); + return done(); + }); + }); - it "should be able to delete the file", (done)-> - @timeout(1000 * 20) - request.del @fileUrl, (err, response, body)=> - response.statusCode.should.equal 204 - request.get @fileUrl, (err, response, body)=> - response.statusCode.should.equal 404 - done() + it("should be able to delete the file", function(done){ + this.timeout(1000 * 20); + return request.del(this.fileUrl, (err, response, body)=> { + response.statusCode.should.equal(204); + return request.get(this.fileUrl, (err, response, body)=> { + response.statusCode.should.equal(404); + return done(); + }); + }); + }); - it "should be able to copy files", (done)-> - @timeout(1000 * 20) + return it("should be able to copy files", function(done){ + this.timeout(1000 * 20); - newProjectID = "acceptence_tests_copyied_project" - newFileId = Math.random() - newFileUrl = "#{@filestoreUrl}/project/#{newProjectID}/file/#{newFileId}" - opts = - method: 'put' - uri: newFileUrl - json: - source: - project_id:"acceptence_tests" - file_id: @file_id - request opts, (err, response, body)=> - response.statusCode.should.equal 200 - request.del @fileUrl, (err, response, body)=> - response.statusCode.should.equal 204 - request.get newFileUrl, (err, response, body)=> - body.should.equal @constantFileContent - done() + const newProjectID = "acceptence_tests_copyied_project"; + const newFileId = Math.random(); + const newFileUrl = `${this.filestoreUrl}/project/${newProjectID}/file/${newFileId}`; + const opts = { + method: 'put', + uri: newFileUrl, + json: { + source: { + project_id:"acceptence_tests", + file_id: this.file_id + } + } + }; + return request(opts, (err, response, body)=> { + response.statusCode.should.equal(200); + return request.del(this.fileUrl, (err, response, body)=> { + response.statusCode.should.equal(204); + return request.get(newFileUrl, (err, response, body)=> { + body.should.equal(this.constantFileContent); + return done(); + }); + }); + }); + }); + }); - describe "with a pdf file", -> + return describe("with a pdf file", function() { - beforeEach (done)-> - @timeout(1000 * 10) - @file_id = Math.random() - @fileUrl = "#{@filestoreUrl}/project/acceptence_tests/file/#{@file_id}" - @localFileReadPath = __dirname + '/../../fixtures/test.pdf' - fs.stat @localFileReadPath, (err, stat) => - @localFileSize = stat.size - writeStream = request.post(@fileUrl) + beforeEach(function(done){ + this.timeout(1000 * 10); + this.file_id = Math.random(); + this.fileUrl = `${this.filestoreUrl}/project/acceptence_tests/file/${this.file_id}`; + this.localFileReadPath = __dirname + '/../../fixtures/test.pdf'; + return fs.stat(this.localFileReadPath, (err, stat) => { + this.localFileSize = stat.size; + const writeStream = request.post(this.fileUrl); - writeStream.on "end", done - fs.createReadStream(@localFileReadPath).pipe writeStream + writeStream.on("end", done); + return fs.createReadStream(this.localFileReadPath).pipe(writeStream); + }); + }); - it 'should record an egress metric for the upload', (done) -> - getMetric @filestoreUrl, 's3_egress', (metric) => - expect(metric - @previousEgress).to.equal @localFileSize - done() + it('should record an egress metric for the upload', function(done) { + return getMetric(this.filestoreUrl, 's3_egress', metric => { + expect(metric - this.previousEgress).to.equal(this.localFileSize); + return done(); + }); + }); - it "should be able get the file back", (done)-> - @timeout(1000 * 10) - request.get @fileUrl, (err, response, body)=> - expect(body.substring(0, 8)).to.equal '%PDF-1.5' - done() + it("should be able get the file back", function(done){ + this.timeout(1000 * 10); + return request.get(this.fileUrl, (err, response, body)=> { + expect(body.substring(0, 8)).to.equal('%PDF-1.5'); + return done(); + }); + }); - describe "getting the preview image", -> + describe("getting the preview image", function() { - beforeEach -> - @previewFileUrl = "#{@fileUrl}?style=preview" + beforeEach(function() { + return this.previewFileUrl = `${this.fileUrl}?style=preview`; + }); - it "should not time out", (done) -> - @timeout(1000 * 20) - request.get @previewFileUrl, (err, response, body) => - expect(response).to.not.equal null - done() + it("should not time out", function(done) { + this.timeout(1000 * 20); + return request.get(this.previewFileUrl, (err, response, body) => { + expect(response).to.not.equal(null); + return done(); + }); + }); - it "should respond with image data", (done) -> - # note: this test relies of the imagemagick conversion working - @timeout(1000 * 20) - request.get @previewFileUrl, (err, response, body) => - expect(response.statusCode).to.equal 200 - expect(body.length).to.be.greaterThan 400 - done() + return it("should respond with image data", function(done) { + // note: this test relies of the imagemagick conversion working + this.timeout(1000 * 20); + return request.get(this.previewFileUrl, (err, response, body) => { + expect(response.statusCode).to.equal(200); + expect(body.length).to.be.greaterThan(400); + return done(); + }); + }); + }); - describe "warming the cache", -> + return describe("warming the cache", function() { - beforeEach -> - @fileUrl = @fileUrl + '?style=preview&cacheWarm=true' + beforeEach(function() { + return this.fileUrl = this.fileUrl + '?style=preview&cacheWarm=true'; + }); - it "should not time out", (done) -> - @timeout(1000 * 20) - request.get @fileUrl, (err, response, body) => - expect(response).to.not.equal null - done() + it("should not time out", function(done) { + this.timeout(1000 * 20); + return request.get(this.fileUrl, (err, response, body) => { + expect(response).to.not.equal(null); + return done(); + }); + }); - it "should respond with only an 'OK'", (done) -> - # note: this test relies of the imagemagick conversion working - @timeout(1000 * 20) - request.get @fileUrl, (err, response, body) => - expect(response.statusCode).to.equal 200 - body.should.equal 'OK' - done() + return it("should respond with only an 'OK'", function(done) { + // note: this test relies of the imagemagick conversion working + this.timeout(1000 * 20); + return request.get(this.fileUrl, (err, response, body) => { + expect(response.statusCode).to.equal(200); + body.should.equal('OK'); + return done(); + }); + }); + }); + }); +}); + +function __guard__(value, transform) { + return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; +} \ No newline at end of file From 738e1d821e491250a7e3f8c1ab4f9da8060e5356 Mon Sep 17 00:00:00 2001 From: decaffeinate Date: Mon, 16 Dec 2019 10:42:36 +0000 Subject: [PATCH 333/555] decaffeinate: Run post-processing cleanups on FilestoreApp.coffee and 1 other file --- services/filestore/test/acceptance/coffee/FilestoreApp.js | 6 ++++++ .../filestore/test/acceptance/coffee/SendingFileTest.js | 8 ++++++++ 2 files changed, 14 insertions(+) diff --git a/services/filestore/test/acceptance/coffee/FilestoreApp.js b/services/filestore/test/acceptance/coffee/FilestoreApp.js index 255c5a12ac..467d66ee6e 100644 --- a/services/filestore/test/acceptance/coffee/FilestoreApp.js +++ b/services/filestore/test/acceptance/coffee/FilestoreApp.js @@ -1,3 +1,9 @@ +/* eslint-disable + handle-callback-err, + standard/no-callback-literal, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from diff --git a/services/filestore/test/acceptance/coffee/SendingFileTest.js b/services/filestore/test/acceptance/coffee/SendingFileTest.js index c45ef4d63d..2f31e60b99 100644 --- a/services/filestore/test/acceptance/coffee/SendingFileTest.js +++ b/services/filestore/test/acceptance/coffee/SendingFileTest.js @@ -1,3 +1,11 @@ +/* eslint-disable + handle-callback-err, + no-path-concat, + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns From b8e7abd25eba052017d336f1a7f13b5b3516da11 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 16 Dec 2019 10:42:40 +0000 Subject: [PATCH 334/555] Rename test/acceptance/coffee to test/acceptance/js --- services/filestore/test/acceptance/{coffee => js}/FilestoreApp.js | 0 .../filestore/test/acceptance/{coffee => js}/SendingFileTest.js | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename services/filestore/test/acceptance/{coffee => js}/FilestoreApp.js (100%) rename services/filestore/test/acceptance/{coffee => js}/SendingFileTest.js (100%) diff --git a/services/filestore/test/acceptance/coffee/FilestoreApp.js b/services/filestore/test/acceptance/js/FilestoreApp.js similarity index 100% rename from services/filestore/test/acceptance/coffee/FilestoreApp.js rename to services/filestore/test/acceptance/js/FilestoreApp.js diff --git a/services/filestore/test/acceptance/coffee/SendingFileTest.js b/services/filestore/test/acceptance/js/SendingFileTest.js similarity index 100% rename from services/filestore/test/acceptance/coffee/SendingFileTest.js rename to services/filestore/test/acceptance/js/SendingFileTest.js From e4b110676166dd5898d58231af7860d6ebbf45c7 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 16 Dec 2019 10:42:41 +0000 Subject: [PATCH 335/555] Prettier: convert test/acceptance decaffeinated files to Prettier format --- .../test/acceptance/js/FilestoreApp.js | 138 +++-- .../test/acceptance/js/SendingFileTest.js | 548 +++++++++--------- 2 files changed, 364 insertions(+), 322 deletions(-) diff --git a/services/filestore/test/acceptance/js/FilestoreApp.js b/services/filestore/test/acceptance/js/FilestoreApp.js index 467d66ee6e..3a5103d5f6 100644 --- a/services/filestore/test/acceptance/js/FilestoreApp.js +++ b/services/filestore/test/acceptance/js/FilestoreApp.js @@ -13,67 +13,97 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const app = require('../../../app'); -require("logger-sharelatex").logger.level("info"); -const logger = require("logger-sharelatex"); -const Settings = require("settings-sharelatex"); -const request = require('request'); +const app = require('../../../app') +require('logger-sharelatex').logger.level('info') +const logger = require('logger-sharelatex') +const Settings = require('settings-sharelatex') +const request = require('request') -const S3_TRIES = 30; +const S3_TRIES = 30 module.exports = { - running: false, - initing: false, - callbacks: [], - ensureRunning(callback) { - if (callback == null) { callback = function(error) {}; } - if (this.running) { - return callback(); - } else if (this.initing) { - return this.callbacks.push(callback); - } else { - this.initing = true; - this.callbacks.push(callback); - return app.listen(__guard__(Settings.internal != null ? Settings.internal.filestore : undefined, x => x.port), "localhost", error => { - if (error != null) { throw error; } - this.running = true; - logger.log("filestore running in dev mode"); + running: false, + initing: false, + callbacks: [], + ensureRunning(callback) { + if (callback == null) { + callback = function(error) {} + } + if (this.running) { + return callback() + } else if (this.initing) { + return this.callbacks.push(callback) + } else { + this.initing = true + this.callbacks.push(callback) + return app.listen( + __guard__( + Settings.internal != null ? Settings.internal.filestore : undefined, + x => x.port + ), + 'localhost', + error => { + if (error != null) { + throw error + } + this.running = true + logger.log('filestore running in dev mode') - return (() => { - const result = []; - for (callback of Array.from(this.callbacks)) { - result.push(callback()); - } - return result; - })(); - }); - } - }, + return (() => { + const result = [] + for (callback of Array.from(this.callbacks)) { + result.push(callback()) + } + return result + })() + } + ) + } + }, - waitForS3(callback, tries) { - if (!(Settings.filestore.s3 != null ? Settings.filestore.s3.endpoint : undefined)) { return callback(); } - if (!tries) { tries = 1; } + waitForS3(callback, tries) { + if ( + !(Settings.filestore.s3 != null + ? Settings.filestore.s3.endpoint + : undefined) + ) { + return callback() + } + if (!tries) { + tries = 1 + } - return request.get(`${Settings.filestore.s3.endpoint}/`, (err, response) => { - console.log(err, response != null ? response.statusCode : undefined, tries); - if (!err && [200, 404].includes(response != null ? response.statusCode : undefined)) { - return callback(); - } + return request.get( + `${Settings.filestore.s3.endpoint}/`, + (err, response) => { + console.log( + err, + response != null ? response.statusCode : undefined, + tries + ) + if ( + !err && + [200, 404].includes( + response != null ? response.statusCode : undefined + ) + ) { + return callback() + } - if (tries === S3_TRIES) { - return callback('timed out waiting for S3'); - } + if (tries === S3_TRIES) { + return callback('timed out waiting for S3') + } - return setTimeout( - () => { - return this.waitForS3(callback, tries + 1); - }, - 1000 - ); - }); - } -}; + return setTimeout(() => { + return this.waitForS3(callback, tries + 1) + }, 1000) + } + ) + } +} function __guard__(value, transform) { - return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; -} \ No newline at end of file + return typeof value !== 'undefined' && value !== null + ? transform(value) + : undefined +} diff --git a/services/filestore/test/acceptance/js/SendingFileTest.js b/services/filestore/test/acceptance/js/SendingFileTest.js index 2f31e60b99..c20fa01c42 100644 --- a/services/filestore/test/acceptance/js/SendingFileTest.js +++ b/services/filestore/test/acceptance/js/SendingFileTest.js @@ -12,303 +12,315 @@ * DS103: Rewrite code to no longer use __guard__ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const { - assert -} = require("chai"); -const sinon = require('sinon'); -const chai = require('chai'); -const should = chai.should(); -const { - expect -} = chai; -const modulePath = "../../../app/js/LocalFileWriter.js"; -const SandboxedModule = require('sandboxed-module'); -const fs = require("fs"); -const request = require("request"); -const settings = require("settings-sharelatex"); -const FilestoreApp = require("./FilestoreApp"); -const async = require('async'); +const { assert } = require('chai') +const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() +const { expect } = chai +const modulePath = '../../../app/js/LocalFileWriter.js' +const SandboxedModule = require('sandboxed-module') +const fs = require('fs') +const request = require('request') +const settings = require('settings-sharelatex') +const FilestoreApp = require('./FilestoreApp') +const async = require('async') +const getMetric = (filestoreUrl, metric, cb) => + request.get(`${filestoreUrl}/metrics`, function(err, res) { + expect(res.statusCode).to.equal(200) + const metricRegex = new RegExp(`^${metric}{[^}]+} ([0-9]+)$`, 'm') + return cb(parseInt(__guard__(metricRegex.exec(res.body), x => x[1]) || '0')) + }) -const getMetric = (filestoreUrl, metric, cb) => request.get(`${filestoreUrl}/metrics`, function(err, res) { - expect(res.statusCode).to.equal(200); - const metricRegex = new RegExp(`^${metric}{[^}]+} ([0-9]+)$`, "m"); - return cb(parseInt(__guard__(metricRegex.exec(res.body), x => x[1]) || '0')); -}); +describe('Filestore', function() { + before(function(done) { + this.localFileReadPath = '/tmp/filestore_acceptence_tests_file_read.txt' + this.localFileWritePath = '/tmp/filestore_acceptence_tests_file_write.txt' -describe("Filestore", function() { - before(function(done){ - this.localFileReadPath = "/tmp/filestore_acceptence_tests_file_read.txt"; - this.localFileWritePath = "/tmp/filestore_acceptence_tests_file_write.txt"; + this.constantFileContent = [ + 'hello world', + `line 2 goes here ${Math.random()}`, + 'there are 3 lines in all' + ].join('\n') - this.constantFileContent = [ - "hello world", - `line 2 goes here ${Math.random()}`, - "there are 3 lines in all" - ].join("\n"); + this.filestoreUrl = `http://localhost:${settings.internal.filestore.port}` + return fs.writeFile( + this.localFileReadPath, + this.constantFileContent, + function(err) { + if (err) { + return done(err) + } + return FilestoreApp.waitForS3(done) + } + ) + }) - this.filestoreUrl = `http://localhost:${settings.internal.filestore.port}`; - return fs.writeFile(this.localFileReadPath, this.constantFileContent, function(err) { - if (err) { return done(err); } - return FilestoreApp.waitForS3(done); - }); - }); + beforeEach(function(done) { + return FilestoreApp.ensureRunning(() => { + return async.parallel( + [ + cb => { + return fs.unlink(this.localFileWritePath, () => cb()) + }, + cb => { + return getMetric(this.filestoreUrl, 's3_egress', metric => { + this.previousEgress = metric + return cb() + }) + }, + cb => { + return getMetric(this.filestoreUrl, 's3_ingress', metric => { + this.previousIngress = metric + return cb() + }) + } + ], + done + ) + }) + }) - beforeEach(function(done){ - return FilestoreApp.ensureRunning(() => { - return async.parallel([ - cb => { - return fs.unlink(this.localFileWritePath, () => cb()); - }, - cb => { - return getMetric(this.filestoreUrl, 's3_egress', metric => { - this.previousEgress = metric; - return cb(); - }); - }, - cb => { - return getMetric(this.filestoreUrl, 's3_ingress', metric => { - this.previousIngress = metric; - return cb(); - }); - } - ], done); - }); - }); + it('should send a 200 for status endpoint', function(done) { + return request(`${this.filestoreUrl}/status`, function( + err, + response, + body + ) { + response.statusCode.should.equal(200) + body.indexOf('filestore').should.not.equal(-1) + body.indexOf('up').should.not.equal(-1) + return done() + }) + }) - it("should send a 200 for status endpoint", function(done){ - return request(`${this.filestoreUrl}/status`, function(err, response, body){ - response.statusCode.should.equal(200); - body.indexOf("filestore").should.not.equal(-1); - body.indexOf("up").should.not.equal(-1); - return done(); - }); - }); + describe('with a file on the server', function() { + beforeEach(function(done) { + this.timeout(1000 * 10) + this.file_id = Math.random() + this.fileUrl = `${this.filestoreUrl}/project/acceptence_tests/file/${this.file_id}` - describe("with a file on the server", function() { + const writeStream = request.post(this.fileUrl) - beforeEach(function(done){ - this.timeout(1000 * 10); - this.file_id = Math.random(); - this.fileUrl = `${this.filestoreUrl}/project/acceptence_tests/file/${this.file_id}`; + writeStream.on('end', done) + return fs.createReadStream(this.localFileReadPath).pipe(writeStream) + }) - const writeStream = request.post(this.fileUrl); + it('should return 404 for a non-existant id', function(done) { + this.timeout(1000 * 20) + const options = { uri: this.fileUrl + '___this_is_clearly_wrong___' } + return request.get(options, (err, response, body) => { + response.statusCode.should.equal(404) + return done() + }) + }) - writeStream.on("end", done); - return fs.createReadStream(this.localFileReadPath).pipe(writeStream); - }); + it('should record an egress metric for the upload', function(done) { + return getMetric(this.filestoreUrl, 's3_egress', metric => { + expect(metric - this.previousEgress).to.equal( + this.constantFileContent.length + ) + return done() + }) + }) - it("should return 404 for a non-existant id", function(done) { - this.timeout(1000 * 20); - const options = - {uri: this.fileUrl + '___this_is_clearly_wrong___'}; - return request.get(options, (err, response, body) => { - response.statusCode.should.equal(404); - return done(); - }); - }); + it('should return the file size on a HEAD request', function(done) { + const expectedLength = Buffer.byteLength(this.constantFileContent) + return request.head(this.fileUrl, (err, res) => { + expect(res.statusCode).to.equal(200) + expect(res.headers['content-length']).to.equal( + expectedLength.toString() + ) + return done() + }) + }) - it('should record an egress metric for the upload', function(done) { - return getMetric(this.filestoreUrl, 's3_egress', metric => { - expect(metric - this.previousEgress).to.equal(this.constantFileContent.length); - return done(); - }); - }); + it('should be able get the file back', function(done) { + this.timeout(1000 * 10) + return request.get(this.fileUrl, (err, response, body) => { + body.should.equal(this.constantFileContent) + return done() + }) + }) - it("should return the file size on a HEAD request", function(done) { - const expectedLength = Buffer.byteLength(this.constantFileContent); - return request.head(this.fileUrl, (err, res) => { - expect(res.statusCode).to.equal(200); - expect(res.headers['content-length']).to.equal(expectedLength.toString()); - return done(); - }); - }); + it('should record an ingress metric when downloading the file', function(done) { + this.timeout(1000 * 10) + return request.get(this.fileUrl, () => { + return getMetric(this.filestoreUrl, 's3_ingress', metric => { + expect(metric - this.previousIngress).to.equal( + this.constantFileContent.length + ) + return done() + }) + }) + }) - it("should be able get the file back", function(done){ - this.timeout(1000 * 10); - return request.get(this.fileUrl, (err, response, body)=> { - body.should.equal(this.constantFileContent); - return done(); - }); - }); + it('should be able to get back the first 9 bytes of the file', function(done) { + this.timeout(1000 * 10) + const options = { + uri: this.fileUrl, + headers: { + Range: 'bytes=0-8' + } + } + return request.get(options, (err, response, body) => { + body.should.equal('hello wor') + return done() + }) + }) - it("should record an ingress metric when downloading the file", function(done){ - this.timeout(1000 * 10); - return request.get(this.fileUrl, () => { - return getMetric(this.filestoreUrl, 's3_ingress', metric => { - expect(metric - this.previousIngress).to.equal(this.constantFileContent.length); - return done(); - }); - }); - }); + it('should record an ingress metric for a partial download', function(done) { + this.timeout(1000 * 10) + const options = { + uri: this.fileUrl, + headers: { + Range: 'bytes=0-8' + } + } + return request.get(options, () => { + return getMetric(this.filestoreUrl, 's3_ingress', metric => { + expect(metric - this.previousIngress).to.equal(9) + return done() + }) + }) + }) - it("should be able to get back the first 9 bytes of the file", function(done) { - this.timeout(1000 * 10); - const options = { - uri: this.fileUrl, - headers: { - 'Range': 'bytes=0-8' - } - }; - return request.get(options, (err, response, body)=> { - body.should.equal('hello wor'); - return done(); - }); - }); + it('should be able to get back bytes 4 through 10 of the file', function(done) { + this.timeout(1000 * 10) + const options = { + uri: this.fileUrl, + headers: { + Range: 'bytes=4-10' + } + } + return request.get(options, (err, response, body) => { + body.should.equal('o world') + return done() + }) + }) - it("should record an ingress metric for a partial download", function(done){ - this.timeout(1000 * 10); - const options = { - uri: this.fileUrl, - headers: { - 'Range': 'bytes=0-8' - } - }; - return request.get(options, ()=> { - return getMetric(this.filestoreUrl, 's3_ingress', metric => { - expect(metric - this.previousIngress).to.equal(9); - return done(); - }); - }); - }); + it('should be able to delete the file', function(done) { + this.timeout(1000 * 20) + return request.del(this.fileUrl, (err, response, body) => { + response.statusCode.should.equal(204) + return request.get(this.fileUrl, (err, response, body) => { + response.statusCode.should.equal(404) + return done() + }) + }) + }) - it("should be able to get back bytes 4 through 10 of the file", function(done) { - this.timeout(1000 * 10); - const options = { - uri: this.fileUrl, - headers: { - 'Range': 'bytes=4-10' - } - }; - return request.get(options, (err, response, body)=> { - body.should.equal('o world'); - return done(); - }); - }); + return it('should be able to copy files', function(done) { + this.timeout(1000 * 20) - it("should be able to delete the file", function(done){ - this.timeout(1000 * 20); - return request.del(this.fileUrl, (err, response, body)=> { - response.statusCode.should.equal(204); - return request.get(this.fileUrl, (err, response, body)=> { - response.statusCode.should.equal(404); - return done(); - }); - }); - }); + const newProjectID = 'acceptence_tests_copyied_project' + const newFileId = Math.random() + const newFileUrl = `${this.filestoreUrl}/project/${newProjectID}/file/${newFileId}` + const opts = { + method: 'put', + uri: newFileUrl, + json: { + source: { + project_id: 'acceptence_tests', + file_id: this.file_id + } + } + } + return request(opts, (err, response, body) => { + response.statusCode.should.equal(200) + return request.del(this.fileUrl, (err, response, body) => { + response.statusCode.should.equal(204) + return request.get(newFileUrl, (err, response, body) => { + body.should.equal(this.constantFileContent) + return done() + }) + }) + }) + }) + }) - return it("should be able to copy files", function(done){ - this.timeout(1000 * 20); + return describe('with a pdf file', function() { + beforeEach(function(done) { + this.timeout(1000 * 10) + this.file_id = Math.random() + this.fileUrl = `${this.filestoreUrl}/project/acceptence_tests/file/${this.file_id}` + this.localFileReadPath = __dirname + '/../../fixtures/test.pdf' + return fs.stat(this.localFileReadPath, (err, stat) => { + this.localFileSize = stat.size + const writeStream = request.post(this.fileUrl) - const newProjectID = "acceptence_tests_copyied_project"; - const newFileId = Math.random(); - const newFileUrl = `${this.filestoreUrl}/project/${newProjectID}/file/${newFileId}`; - const opts = { - method: 'put', - uri: newFileUrl, - json: { - source: { - project_id:"acceptence_tests", - file_id: this.file_id - } - } - }; - return request(opts, (err, response, body)=> { - response.statusCode.should.equal(200); - return request.del(this.fileUrl, (err, response, body)=> { - response.statusCode.should.equal(204); - return request.get(newFileUrl, (err, response, body)=> { - body.should.equal(this.constantFileContent); - return done(); - }); - }); - }); - }); - }); + writeStream.on('end', done) + return fs.createReadStream(this.localFileReadPath).pipe(writeStream) + }) + }) - return describe("with a pdf file", function() { + it('should record an egress metric for the upload', function(done) { + return getMetric(this.filestoreUrl, 's3_egress', metric => { + expect(metric - this.previousEgress).to.equal(this.localFileSize) + return done() + }) + }) - beforeEach(function(done){ - this.timeout(1000 * 10); - this.file_id = Math.random(); - this.fileUrl = `${this.filestoreUrl}/project/acceptence_tests/file/${this.file_id}`; - this.localFileReadPath = __dirname + '/../../fixtures/test.pdf'; - return fs.stat(this.localFileReadPath, (err, stat) => { - this.localFileSize = stat.size; - const writeStream = request.post(this.fileUrl); + it('should be able get the file back', function(done) { + this.timeout(1000 * 10) + return request.get(this.fileUrl, (err, response, body) => { + expect(body.substring(0, 8)).to.equal('%PDF-1.5') + return done() + }) + }) - writeStream.on("end", done); - return fs.createReadStream(this.localFileReadPath).pipe(writeStream); - }); - }); + describe('getting the preview image', function() { + beforeEach(function() { + return (this.previewFileUrl = `${this.fileUrl}?style=preview`) + }) - it('should record an egress metric for the upload', function(done) { - return getMetric(this.filestoreUrl, 's3_egress', metric => { - expect(metric - this.previousEgress).to.equal(this.localFileSize); - return done(); - }); - }); + it('should not time out', function(done) { + this.timeout(1000 * 20) + return request.get(this.previewFileUrl, (err, response, body) => { + expect(response).to.not.equal(null) + return done() + }) + }) - it("should be able get the file back", function(done){ - this.timeout(1000 * 10); - return request.get(this.fileUrl, (err, response, body)=> { - expect(body.substring(0, 8)).to.equal('%PDF-1.5'); - return done(); - }); - }); + return it('should respond with image data', function(done) { + // note: this test relies of the imagemagick conversion working + this.timeout(1000 * 20) + return request.get(this.previewFileUrl, (err, response, body) => { + expect(response.statusCode).to.equal(200) + expect(body.length).to.be.greaterThan(400) + return done() + }) + }) + }) - describe("getting the preview image", function() { + return describe('warming the cache', function() { + beforeEach(function() { + return (this.fileUrl = this.fileUrl + '?style=preview&cacheWarm=true') + }) - beforeEach(function() { - return this.previewFileUrl = `${this.fileUrl}?style=preview`; - }); + it('should not time out', function(done) { + this.timeout(1000 * 20) + return request.get(this.fileUrl, (err, response, body) => { + expect(response).to.not.equal(null) + return done() + }) + }) - it("should not time out", function(done) { - this.timeout(1000 * 20); - return request.get(this.previewFileUrl, (err, response, body) => { - expect(response).to.not.equal(null); - return done(); - }); - }); - - return it("should respond with image data", function(done) { - // note: this test relies of the imagemagick conversion working - this.timeout(1000 * 20); - return request.get(this.previewFileUrl, (err, response, body) => { - expect(response.statusCode).to.equal(200); - expect(body.length).to.be.greaterThan(400); - return done(); - }); - }); - }); - - return describe("warming the cache", function() { - - beforeEach(function() { - return this.fileUrl = this.fileUrl + '?style=preview&cacheWarm=true'; - }); - - it("should not time out", function(done) { - this.timeout(1000 * 20); - return request.get(this.fileUrl, (err, response, body) => { - expect(response).to.not.equal(null); - return done(); - }); - }); - - return it("should respond with only an 'OK'", function(done) { - // note: this test relies of the imagemagick conversion working - this.timeout(1000 * 20); - return request.get(this.fileUrl, (err, response, body) => { - expect(response.statusCode).to.equal(200); - body.should.equal('OK'); - return done(); - }); - }); - }); - }); -}); + return it("should respond with only an 'OK'", function(done) { + // note: this test relies of the imagemagick conversion working + this.timeout(1000 * 20) + return request.get(this.fileUrl, (err, response, body) => { + expect(response.statusCode).to.equal(200) + body.should.equal('OK') + return done() + }) + }) + }) + }) +}) function __guard__(value, transform) { - return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; -} \ No newline at end of file + return typeof value !== 'undefined' && value !== null + ? transform(value) + : undefined +} From d70a33c4f57abdd1dc4b647b5cebded3a1cb23a8 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 16 Dec 2019 10:52:34 +0000 Subject: [PATCH 336/555] Rename individual coffee files to js files --- services/filestore/{app.coffee => app.js} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename services/filestore/{app.coffee => app.js} (100%) diff --git a/services/filestore/app.coffee b/services/filestore/app.js similarity index 100% rename from services/filestore/app.coffee rename to services/filestore/app.js From 1d295ef7e4ed74a88508f01355c68617ec479774 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 16 Dec 2019 10:52:40 +0000 Subject: [PATCH 337/555] Decaffeinate: convert individual files to js --- services/filestore/app.js | 254 +++++++++++++++++++++----------------- 1 file changed, 140 insertions(+), 114 deletions(-) diff --git a/services/filestore/app.js b/services/filestore/app.js index 6c21686826..8a5bd9a04a 100644 --- a/services/filestore/app.js +++ b/services/filestore/app.js @@ -1,147 +1,173 @@ -Metrics = require "metrics-sharelatex" -Metrics.initialize("filestore") -express = require('express') -bodyParser = require "body-parser" -logger = require('logger-sharelatex') -logger.initialize("filestore") -settings = require("settings-sharelatex") -request = require("request") -fileController = require("./app/js/FileController") -bucketController = require("./app/js/BucketController") -keyBuilder = require("./app/js/KeyBuilder") -healthCheckController = require("./app/js/HealthCheckController") -domain = require("domain") -appIsOk = true -app = express() +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const Metrics = require("metrics-sharelatex"); +Metrics.initialize("filestore"); +const express = require('express'); +const bodyParser = require("body-parser"); +let logger = require('logger-sharelatex'); +logger.initialize("filestore"); +const settings = require("settings-sharelatex"); +const request = require("request"); +const fileController = require("./app/js/FileController"); +const bucketController = require("./app/js/BucketController"); +const keyBuilder = require("./app/js/KeyBuilder"); +const healthCheckController = require("./app/js/HealthCheckController"); +const domain = require("domain"); +let appIsOk = true; +const app = express(); -if settings.sentry?.dsn? - logger.initializeErrorReporting(settings.sentry.dsn) +if ((settings.sentry != null ? settings.sentry.dsn : undefined) != null) { + logger.initializeErrorReporting(settings.sentry.dsn); +} -Metrics.open_sockets.monitor(logger) -Metrics.event_loop?.monitor(logger) -Metrics.memory.monitor(logger) +Metrics.open_sockets.monitor(logger); +if (Metrics.event_loop != null) { + Metrics.event_loop.monitor(logger); +} +Metrics.memory.monitor(logger); -app.use Metrics.http.monitor(logger) +app.use(Metrics.http.monitor(logger)); -app.use (req, res, next)-> - Metrics.inc "http-request" - next() +app.use(function(req, res, next){ + Metrics.inc("http-request"); + return next(); +}); -app.use (req, res, next) -> - requestDomain = domain.create() - requestDomain.add req - requestDomain.add res - requestDomain.on "error", (err)-> - try - # request a shutdown to prevent memory leaks - beginShutdown() - if !res.headerSent - res.send(500, "uncaught exception") - logger = require('logger-sharelatex') - req = - body:req.body - headers:req.headers - url:req.url - key: req.key +app.use(function(req, res, next) { + const requestDomain = domain.create(); + requestDomain.add(req); + requestDomain.add(res); + requestDomain.on("error", function(err){ + try { + // request a shutdown to prevent memory leaks + beginShutdown(); + if (!res.headerSent) { + res.send(500, "uncaught exception"); + } + logger = require('logger-sharelatex'); + req = { + body:req.body, + headers:req.headers, + url:req.url, + key: req.key, statusCode: req.statusCode - err = - message: err.message - stack: err.stack - name: err.name - type: err.type + }; + err = { + message: err.message, + stack: err.stack, + name: err.name, + type: err.type, arguments: err.arguments - logger.err err:err, req:req, res:res, "uncaught exception thrown on request" - catch exception - logger.err err: exception, "exception in request domain handler" - requestDomain.run next + }; + return logger.err({err, req, res}, "uncaught exception thrown on request"); + } catch (exception) { + return logger.err({err: exception}, "exception in request domain handler"); + } + }); + return requestDomain.run(next); +}); -app.use (req, res, next) -> - if not appIsOk - # when shutting down, close any HTTP keep-alive connections - res.set 'Connection', 'close' - next() +app.use(function(req, res, next) { + if (!appIsOk) { + // when shutting down, close any HTTP keep-alive connections + res.set('Connection', 'close'); + } + return next(); +}); -Metrics.injectMetricsRoute(app) +Metrics.injectMetricsRoute(app); -app.head "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.getFileHead -app.get "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.getFile -app.post "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.insertFile -app.put "/project/:project_id/file/:file_id", keyBuilder.userFileKey, bodyParser.json(), fileController.copyFile -app.del "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.deleteFile +app.head("/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.getFileHead); +app.get("/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.getFile); +app.post("/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.insertFile); +app.put("/project/:project_id/file/:file_id", keyBuilder.userFileKey, bodyParser.json(), fileController.copyFile); +app.del("/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.deleteFile); -app.head "/template/:template_id/v/:version/:format", keyBuilder.templateFileKey, fileController.getFileHead -app.get "/template/:template_id/v/:version/:format", keyBuilder.templateFileKey, fileController.getFile -app.get "/template/:template_id/v/:version/:format/:sub_type", keyBuilder.templateFileKey, fileController.getFile -app.post "/template/:template_id/v/:version/:format", keyBuilder.templateFileKey, fileController.insertFile +app.head("/template/:template_id/v/:version/:format", keyBuilder.templateFileKey, fileController.getFileHead); +app.get("/template/:template_id/v/:version/:format", keyBuilder.templateFileKey, fileController.getFile); +app.get("/template/:template_id/v/:version/:format/:sub_type", keyBuilder.templateFileKey, fileController.getFile); +app.post("/template/:template_id/v/:version/:format", keyBuilder.templateFileKey, fileController.insertFile); -app.head "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.getFileHead -app.get "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.getFile -app.post "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.insertFile -app.put "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, bodyParser.json(), fileController.copyFile -app.del "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.deleteFile +app.head("/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.getFileHead); +app.get("/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.getFile); +app.post("/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.insertFile); +app.put("/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, bodyParser.json(), fileController.copyFile); +app.del("/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.deleteFile); -app.get "/project/:project_id/size", keyBuilder.publicProjectKey, fileController.directorySize +app.get("/project/:project_id/size", keyBuilder.publicProjectKey, fileController.directorySize); -app.get "/bucket/:bucket/key/*", bucketController.getFile +app.get("/bucket/:bucket/key/*", bucketController.getFile); -app.get "/heapdump", (req, res)-> - require('heapdump').writeSnapshot '/tmp/' + Date.now() + '.filestore.heapsnapshot', (err, filename)-> - res.send filename +app.get("/heapdump", (req, res) => require('heapdump').writeSnapshot('/tmp/' + Date.now() + '.filestore.heapsnapshot', (err, filename) => res.send(filename))); -app.post "/shutdown", (req, res)-> - appIsOk = false - res.send() +app.post("/shutdown", function(req, res){ + appIsOk = false; + return res.send(); +}); -app.get '/status', (req, res)-> - if appIsOk - res.send('filestore sharelatex up') - else - logger.log "app is not ok - shutting down" - res.send("server is being shut down", 500) +app.get('/status', function(req, res){ + if (appIsOk) { + return res.send('filestore sharelatex up'); + } else { + logger.log("app is not ok - shutting down"); + return res.send("server is being shut down", 500); + } +}); -app.get "/health_check", healthCheckController.check +app.get("/health_check", healthCheckController.check); -app.get '*', (req, res)-> - res.send 404 +app.get('*', (req, res) => res.send(404)); -beginShutdown = () -> - if appIsOk - appIsOk = false - # hard-terminate this process if graceful shutdown fails - killTimer = setTimeout () -> - process.exit 1 - , 120*1000 - killTimer.unref?() # prevent timer from keeping process alive - server.close () -> - logger.log "closed all connections" - Metrics.close() - process.disconnect?() - logger.log "server will stop accepting connections" +var beginShutdown = function() { + if (appIsOk) { + appIsOk = false; + // hard-terminate this process if graceful shutdown fails + const killTimer = setTimeout(() => process.exit(1) + , 120*1000); + if (typeof killTimer.unref === 'function') { + killTimer.unref(); + } // prevent timer from keeping process alive + server.close(function() { + logger.log("closed all connections"); + Metrics.close(); + return (typeof process.disconnect === 'function' ? process.disconnect() : undefined); + }); + return logger.log("server will stop accepting connections"); + } +}; -port = settings.internal.filestore.port or 3009 -host = "0.0.0.0" +const port = settings.internal.filestore.port || 3009; +const host = "0.0.0.0"; -if !module.parent # Called directly - server = app.listen port, host, (error) -> - logger.info "Filestore starting up, listening on #{host}:#{port}" +if (!module.parent) { // Called directly + var server = app.listen(port, host, error => logger.info(`Filestore starting up, listening on ${host}:${port}`)); +} -module.exports = app +module.exports = app; -process.on 'SIGTERM', () -> - logger.log("filestore got SIGTERM, shutting down gracefully") - beginShutdown() +process.on('SIGTERM', function() { + logger.log("filestore got SIGTERM, shutting down gracefully"); + return beginShutdown(); +}); -if global.gc? - gcTimer = setInterval () -> - global.gc() - logger.log process.memoryUsage(), "global.gc" - , 3 * oneMinute = 60 * 1000 - gcTimer.unref() +if (global.gc != null) { + let oneMinute; + const gcTimer = setInterval(function() { + global.gc(); + return logger.log(process.memoryUsage(), "global.gc"); + } + , 3 * (oneMinute = 60 * 1000)); + gcTimer.unref(); +} From 9943e2cc8f198b05661fd2c4a095816db22059c2 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 16 Dec 2019 10:52:42 +0000 Subject: [PATCH 338/555] Prettier: convert individual decaffeinated files to Prettier format --- services/filestore/app.js | 336 +++++++++++++++++++++++--------------- 1 file changed, 202 insertions(+), 134 deletions(-) diff --git a/services/filestore/app.js b/services/filestore/app.js index 8a5bd9a04a..cbe13e8076 100644 --- a/services/filestore/app.js +++ b/services/filestore/app.js @@ -4,170 +4,238 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const Metrics = require("metrics-sharelatex"); -Metrics.initialize("filestore"); -const express = require('express'); -const bodyParser = require("body-parser"); -let logger = require('logger-sharelatex'); -logger.initialize("filestore"); -const settings = require("settings-sharelatex"); -const request = require("request"); -const fileController = require("./app/js/FileController"); -const bucketController = require("./app/js/BucketController"); -const keyBuilder = require("./app/js/KeyBuilder"); -const healthCheckController = require("./app/js/HealthCheckController"); -const domain = require("domain"); -let appIsOk = true; -const app = express(); +const Metrics = require('metrics-sharelatex') +Metrics.initialize('filestore') +const express = require('express') +const bodyParser = require('body-parser') +let logger = require('logger-sharelatex') +logger.initialize('filestore') +const settings = require('settings-sharelatex') +const request = require('request') +const fileController = require('./app/js/FileController') +const bucketController = require('./app/js/BucketController') +const keyBuilder = require('./app/js/KeyBuilder') +const healthCheckController = require('./app/js/HealthCheckController') +const domain = require('domain') +let appIsOk = true +const app = express() if ((settings.sentry != null ? settings.sentry.dsn : undefined) != null) { - logger.initializeErrorReporting(settings.sentry.dsn); + logger.initializeErrorReporting(settings.sentry.dsn) } -Metrics.open_sockets.monitor(logger); +Metrics.open_sockets.monitor(logger) if (Metrics.event_loop != null) { - Metrics.event_loop.monitor(logger); + Metrics.event_loop.monitor(logger) } -Metrics.memory.monitor(logger); +Metrics.memory.monitor(logger) -app.use(Metrics.http.monitor(logger)); - -app.use(function(req, res, next){ - Metrics.inc("http-request"); - return next(); -}); +app.use(Metrics.http.monitor(logger)) app.use(function(req, res, next) { - const requestDomain = domain.create(); - requestDomain.add(req); - requestDomain.add(res); - requestDomain.on("error", function(err){ - try { - // request a shutdown to prevent memory leaks - beginShutdown(); - if (!res.headerSent) { - res.send(500, "uncaught exception"); - } - logger = require('logger-sharelatex'); - req = { - body:req.body, - headers:req.headers, - url:req.url, - key: req.key, - statusCode: req.statusCode - }; - err = { - message: err.message, - stack: err.stack, - name: err.name, - type: err.type, - arguments: err.arguments - }; - return logger.err({err, req, res}, "uncaught exception thrown on request"); - } catch (exception) { - return logger.err({err: exception}, "exception in request domain handler"); - } - }); - return requestDomain.run(next); -}); + Metrics.inc('http-request') + return next() +}) app.use(function(req, res, next) { - if (!appIsOk) { - // when shutting down, close any HTTP keep-alive connections - res.set('Connection', 'close'); - } - return next(); -}); + const requestDomain = domain.create() + requestDomain.add(req) + requestDomain.add(res) + requestDomain.on('error', function(err) { + try { + // request a shutdown to prevent memory leaks + beginShutdown() + if (!res.headerSent) { + res.send(500, 'uncaught exception') + } + logger = require('logger-sharelatex') + req = { + body: req.body, + headers: req.headers, + url: req.url, + key: req.key, + statusCode: req.statusCode + } + err = { + message: err.message, + stack: err.stack, + name: err.name, + type: err.type, + arguments: err.arguments + } + return logger.err( + { err, req, res }, + 'uncaught exception thrown on request' + ) + } catch (exception) { + return logger.err( + { err: exception }, + 'exception in request domain handler' + ) + } + }) + return requestDomain.run(next) +}) -Metrics.injectMetricsRoute(app); +app.use(function(req, res, next) { + if (!appIsOk) { + // when shutting down, close any HTTP keep-alive connections + res.set('Connection', 'close') + } + return next() +}) -app.head("/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.getFileHead); -app.get("/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.getFile); -app.post("/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.insertFile); -app.put("/project/:project_id/file/:file_id", keyBuilder.userFileKey, bodyParser.json(), fileController.copyFile); -app.del("/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.deleteFile); +Metrics.injectMetricsRoute(app) -app.head("/template/:template_id/v/:version/:format", keyBuilder.templateFileKey, fileController.getFileHead); -app.get("/template/:template_id/v/:version/:format", keyBuilder.templateFileKey, fileController.getFile); -app.get("/template/:template_id/v/:version/:format/:sub_type", keyBuilder.templateFileKey, fileController.getFile); -app.post("/template/:template_id/v/:version/:format", keyBuilder.templateFileKey, fileController.insertFile); +app.head( + '/project/:project_id/file/:file_id', + keyBuilder.userFileKey, + fileController.getFileHead +) +app.get( + '/project/:project_id/file/:file_id', + keyBuilder.userFileKey, + fileController.getFile +) +app.post( + '/project/:project_id/file/:file_id', + keyBuilder.userFileKey, + fileController.insertFile +) +app.put( + '/project/:project_id/file/:file_id', + keyBuilder.userFileKey, + bodyParser.json(), + fileController.copyFile +) +app.del( + '/project/:project_id/file/:file_id', + keyBuilder.userFileKey, + fileController.deleteFile +) +app.head( + '/template/:template_id/v/:version/:format', + keyBuilder.templateFileKey, + fileController.getFileHead +) +app.get( + '/template/:template_id/v/:version/:format', + keyBuilder.templateFileKey, + fileController.getFile +) +app.get( + '/template/:template_id/v/:version/:format/:sub_type', + keyBuilder.templateFileKey, + fileController.getFile +) +app.post( + '/template/:template_id/v/:version/:format', + keyBuilder.templateFileKey, + fileController.insertFile +) -app.head("/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.getFileHead); -app.get("/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.getFile); -app.post("/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.insertFile); -app.put("/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, bodyParser.json(), fileController.copyFile); -app.del("/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.deleteFile); +app.head( + '/project/:project_id/public/:public_file_id', + keyBuilder.publicFileKey, + fileController.getFileHead +) +app.get( + '/project/:project_id/public/:public_file_id', + keyBuilder.publicFileKey, + fileController.getFile +) +app.post( + '/project/:project_id/public/:public_file_id', + keyBuilder.publicFileKey, + fileController.insertFile +) +app.put( + '/project/:project_id/public/:public_file_id', + keyBuilder.publicFileKey, + bodyParser.json(), + fileController.copyFile +) +app.del( + '/project/:project_id/public/:public_file_id', + keyBuilder.publicFileKey, + fileController.deleteFile +) -app.get("/project/:project_id/size", keyBuilder.publicProjectKey, fileController.directorySize); +app.get( + '/project/:project_id/size', + keyBuilder.publicProjectKey, + fileController.directorySize +) -app.get("/bucket/:bucket/key/*", bucketController.getFile); +app.get('/bucket/:bucket/key/*', bucketController.getFile) -app.get("/heapdump", (req, res) => require('heapdump').writeSnapshot('/tmp/' + Date.now() + '.filestore.heapsnapshot', (err, filename) => res.send(filename))); +app.get('/heapdump', (req, res) => + require('heapdump').writeSnapshot( + '/tmp/' + Date.now() + '.filestore.heapsnapshot', + (err, filename) => res.send(filename) + ) +) -app.post("/shutdown", function(req, res){ - appIsOk = false; - return res.send(); -}); - -app.get('/status', function(req, res){ - if (appIsOk) { - return res.send('filestore sharelatex up'); - } else { - logger.log("app is not ok - shutting down"); - return res.send("server is being shut down", 500); - } -}); - - -app.get("/health_check", healthCheckController.check); - - -app.get('*', (req, res) => res.send(404)); +app.post('/shutdown', function(req, res) { + appIsOk = false + return res.send() +}) +app.get('/status', function(req, res) { + if (appIsOk) { + return res.send('filestore sharelatex up') + } else { + logger.log('app is not ok - shutting down') + return res.send('server is being shut down', 500) + } +}) +app.get('/health_check', healthCheckController.check) +app.get('*', (req, res) => res.send(404)) var beginShutdown = function() { - if (appIsOk) { - appIsOk = false; - // hard-terminate this process if graceful shutdown fails - const killTimer = setTimeout(() => process.exit(1) - , 120*1000); - if (typeof killTimer.unref === 'function') { - killTimer.unref(); - } // prevent timer from keeping process alive - server.close(function() { - logger.log("closed all connections"); - Metrics.close(); - return (typeof process.disconnect === 'function' ? process.disconnect() : undefined); - }); - return logger.log("server will stop accepting connections"); - } -}; - - -const port = settings.internal.filestore.port || 3009; -const host = "0.0.0.0"; - -if (!module.parent) { // Called directly - var server = app.listen(port, host, error => logger.info(`Filestore starting up, listening on ${host}:${port}`)); + if (appIsOk) { + appIsOk = false + // hard-terminate this process if graceful shutdown fails + const killTimer = setTimeout(() => process.exit(1), 120 * 1000) + if (typeof killTimer.unref === 'function') { + killTimer.unref() + } // prevent timer from keeping process alive + server.close(function() { + logger.log('closed all connections') + Metrics.close() + return typeof process.disconnect === 'function' + ? process.disconnect() + : undefined + }) + return logger.log('server will stop accepting connections') + } } +const port = settings.internal.filestore.port || 3009 +const host = '0.0.0.0' -module.exports = app; +if (!module.parent) { + // Called directly + var server = app.listen(port, host, error => + logger.info(`Filestore starting up, listening on ${host}:${port}`) + ) +} + +module.exports = app process.on('SIGTERM', function() { - logger.log("filestore got SIGTERM, shutting down gracefully"); - return beginShutdown(); -}); + logger.log('filestore got SIGTERM, shutting down gracefully') + return beginShutdown() +}) if (global.gc != null) { - let oneMinute; - const gcTimer = setInterval(function() { - global.gc(); - return logger.log(process.memoryUsage(), "global.gc"); - } - , 3 * (oneMinute = 60 * 1000)); - gcTimer.unref(); + let oneMinute + const gcTimer = setInterval(function() { + global.gc() + return logger.log(process.memoryUsage(), 'global.gc') + }, 3 * (oneMinute = 60 * 1000)) + gcTimer.unref() } From 0b6e725137e3cb5996843acaee06a74869fef398 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 16 Dec 2019 11:16:27 +0000 Subject: [PATCH 339/555] Remove coffeescript compiles from package.json --- services/filestore/package.json | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/services/filestore/package.json b/services/filestore/package.json index 708656e0d6..40aabd8279 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -8,18 +8,13 @@ }, "scripts": { "test:acceptance:run": "mocha --recursive --reporter spec --timeout 15000 $@ test/acceptance/js", - "test:acceptance": "npm run compile:app && npm run compile:acceptance_tests && npm run test:acceptance:_run -- --grep=$MOCHA_GREP", + "test:acceptance": "npm run test:acceptance:_run -- --grep=$MOCHA_GREP", "test:unit:run": "mocha --recursive --reporter spec $@ test/unit/js", - "test:unit": "npm run compile:app && npm run compile:unit_tests && npm run test:unit:_run -- --grep=$MOCHA_GREP", - "compile:unit_tests": "[ ! -e test/unit/coffee ] && echo 'No unit tests to compile' || coffee -o test/unit/js -c test/unit/coffee", - "compile:acceptance_tests": "[ ! -e test/acceptance/coffee ] && echo 'No acceptance tests to compile' || coffee -o test/acceptance/js -c test/acceptance/coffee", - "compile:app": "([ -e app/coffee ] && coffee -m $COFFEE_OPTIONS -o app/js -c app/coffee || echo 'No CoffeeScript folder to compile') && ( [ -e app.coffee ] && coffee -m $COFFEE_OPTIONS -c app.coffee || echo 'No CoffeeScript app to compile')", - "compile:all": "npm run compile:app && npm run compile:unit_tests && npm run compile:acceptance_tests && npm run compile:smoke_tests", - "start": "npm run compile:app && node $NODE_APP_OPTIONS app.js", + "test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP", + "start": "node $NODE_APP_OPTIONS app.js", "nodemon": "nodemon --config nodemon.json", "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 30000 --exit $@ test/acceptance/js", - "test:unit:_run": "mocha --recursive --reporter spec --exit $@ test/unit/js", - "compile:smoke_tests": "[ ! -e test/smoke/coffee ] && echo 'No smoke tests to compile' || coffee -o test/smoke/js -c test/smoke/coffee" + "test:unit:_run": "mocha --recursive --reporter spec --exit $@ test/unit/js" }, "dependencies": { "async": "~0.2.10", From e06c03e536b4b28acd6afc9d8d7ecc77d966cfa5 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 16 Dec 2019 11:16:37 +0000 Subject: [PATCH 340/555] Fix decaf error in FSPersistorManager --- services/filestore/app/js/FSPersistorManager.js | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/services/filestore/app/js/FSPersistorManager.js b/services/filestore/app/js/FSPersistorManager.js index 2e0de65632..d11d839df7 100644 --- a/services/filestore/app/js/FSPersistorManager.js +++ b/services/filestore/app/js/FSPersistorManager.js @@ -82,11 +82,11 @@ module.exports = { { err, location, filteredName: name }, 'Error reading from file' ) - } - if (err.code === 'ENOENT') { - return callback(new Errors.NotFoundError(err.message), null) - } else { - return callback(err, null) + if (err.code === 'ENOENT') { + return callback(new Errors.NotFoundError(err.message), null) + } else { + return callback(err, null) + } } opts.fd = fd const sourceStream = fs.createReadStream(null, opts) From 72c83bdafff276a68e742e100ecbdb6c2a41e3fb Mon Sep 17 00:00:00 2001 From: decaffeinate Date: Mon, 16 Dec 2019 11:20:20 +0000 Subject: [PATCH 341/555] decaffeinate: Rename AWSSDKPersistorManagerTests.coffee and 12 other files from .coffee to .js --- ...ersistorManagerTests.coffee => AWSSDKPersistorManagerTests.js} | 0 .../{BucketControllerTests.coffee => BucketControllerTests.js} | 0 ...{FSPersistorManagerTests.coffee => FSPersistorManagerTests.js} | 0 .../coffee/{FileControllerTests.coffee => FileControllerTests.js} | 0 .../coffee/{FileConverterTests.coffee => FileConverterTests.js} | 0 .../unit/coffee/{FileHandlerTests.coffee => FileHandlerTests.js} | 0 .../coffee/{ImageOptimiserTests.coffee => ImageOptimiserTests.js} | 0 .../unit/coffee/{KeybuilderTests.coffee => KeybuilderTests.js} | 0 .../{LocalFileWriterTests.coffee => LocalFileWriterTests.js} | 0 .../{PersistorManagerTests.coffee => PersistorManagerTests.js} | 0 ...{S3PersistorManagerTests.coffee => S3PersistorManagerTests.js} | 0 .../test/unit/coffee/{SafeExecTests.coffee => SafeExecTests.js} | 0 .../test/unit/coffee/{SettingsTests.coffee => SettingsTests.js} | 0 13 files changed, 0 insertions(+), 0 deletions(-) rename services/filestore/test/unit/coffee/{AWSSDKPersistorManagerTests.coffee => AWSSDKPersistorManagerTests.js} (100%) rename services/filestore/test/unit/coffee/{BucketControllerTests.coffee => BucketControllerTests.js} (100%) rename services/filestore/test/unit/coffee/{FSPersistorManagerTests.coffee => FSPersistorManagerTests.js} (100%) rename services/filestore/test/unit/coffee/{FileControllerTests.coffee => FileControllerTests.js} (100%) rename services/filestore/test/unit/coffee/{FileConverterTests.coffee => FileConverterTests.js} (100%) rename services/filestore/test/unit/coffee/{FileHandlerTests.coffee => FileHandlerTests.js} (100%) rename services/filestore/test/unit/coffee/{ImageOptimiserTests.coffee => ImageOptimiserTests.js} (100%) rename services/filestore/test/unit/coffee/{KeybuilderTests.coffee => KeybuilderTests.js} (100%) rename services/filestore/test/unit/coffee/{LocalFileWriterTests.coffee => LocalFileWriterTests.js} (100%) rename services/filestore/test/unit/coffee/{PersistorManagerTests.coffee => PersistorManagerTests.js} (100%) rename services/filestore/test/unit/coffee/{S3PersistorManagerTests.coffee => S3PersistorManagerTests.js} (100%) rename services/filestore/test/unit/coffee/{SafeExecTests.coffee => SafeExecTests.js} (100%) rename services/filestore/test/unit/coffee/{SettingsTests.coffee => SettingsTests.js} (100%) diff --git a/services/filestore/test/unit/coffee/AWSSDKPersistorManagerTests.coffee b/services/filestore/test/unit/coffee/AWSSDKPersistorManagerTests.js similarity index 100% rename from services/filestore/test/unit/coffee/AWSSDKPersistorManagerTests.coffee rename to services/filestore/test/unit/coffee/AWSSDKPersistorManagerTests.js diff --git a/services/filestore/test/unit/coffee/BucketControllerTests.coffee b/services/filestore/test/unit/coffee/BucketControllerTests.js similarity index 100% rename from services/filestore/test/unit/coffee/BucketControllerTests.coffee rename to services/filestore/test/unit/coffee/BucketControllerTests.js diff --git a/services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee b/services/filestore/test/unit/coffee/FSPersistorManagerTests.js similarity index 100% rename from services/filestore/test/unit/coffee/FSPersistorManagerTests.coffee rename to services/filestore/test/unit/coffee/FSPersistorManagerTests.js diff --git a/services/filestore/test/unit/coffee/FileControllerTests.coffee b/services/filestore/test/unit/coffee/FileControllerTests.js similarity index 100% rename from services/filestore/test/unit/coffee/FileControllerTests.coffee rename to services/filestore/test/unit/coffee/FileControllerTests.js diff --git a/services/filestore/test/unit/coffee/FileConverterTests.coffee b/services/filestore/test/unit/coffee/FileConverterTests.js similarity index 100% rename from services/filestore/test/unit/coffee/FileConverterTests.coffee rename to services/filestore/test/unit/coffee/FileConverterTests.js diff --git a/services/filestore/test/unit/coffee/FileHandlerTests.coffee b/services/filestore/test/unit/coffee/FileHandlerTests.js similarity index 100% rename from services/filestore/test/unit/coffee/FileHandlerTests.coffee rename to services/filestore/test/unit/coffee/FileHandlerTests.js diff --git a/services/filestore/test/unit/coffee/ImageOptimiserTests.coffee b/services/filestore/test/unit/coffee/ImageOptimiserTests.js similarity index 100% rename from services/filestore/test/unit/coffee/ImageOptimiserTests.coffee rename to services/filestore/test/unit/coffee/ImageOptimiserTests.js diff --git a/services/filestore/test/unit/coffee/KeybuilderTests.coffee b/services/filestore/test/unit/coffee/KeybuilderTests.js similarity index 100% rename from services/filestore/test/unit/coffee/KeybuilderTests.coffee rename to services/filestore/test/unit/coffee/KeybuilderTests.js diff --git a/services/filestore/test/unit/coffee/LocalFileWriterTests.coffee b/services/filestore/test/unit/coffee/LocalFileWriterTests.js similarity index 100% rename from services/filestore/test/unit/coffee/LocalFileWriterTests.coffee rename to services/filestore/test/unit/coffee/LocalFileWriterTests.js diff --git a/services/filestore/test/unit/coffee/PersistorManagerTests.coffee b/services/filestore/test/unit/coffee/PersistorManagerTests.js similarity index 100% rename from services/filestore/test/unit/coffee/PersistorManagerTests.coffee rename to services/filestore/test/unit/coffee/PersistorManagerTests.js diff --git a/services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee b/services/filestore/test/unit/coffee/S3PersistorManagerTests.js similarity index 100% rename from services/filestore/test/unit/coffee/S3PersistorManagerTests.coffee rename to services/filestore/test/unit/coffee/S3PersistorManagerTests.js diff --git a/services/filestore/test/unit/coffee/SafeExecTests.coffee b/services/filestore/test/unit/coffee/SafeExecTests.js similarity index 100% rename from services/filestore/test/unit/coffee/SafeExecTests.coffee rename to services/filestore/test/unit/coffee/SafeExecTests.js diff --git a/services/filestore/test/unit/coffee/SettingsTests.coffee b/services/filestore/test/unit/coffee/SettingsTests.js similarity index 100% rename from services/filestore/test/unit/coffee/SettingsTests.coffee rename to services/filestore/test/unit/coffee/SettingsTests.js From 5cf7138ef1b9fb0d7883122de863467c8b669189 Mon Sep 17 00:00:00 2001 From: decaffeinate Date: Mon, 16 Dec 2019 11:20:22 +0000 Subject: [PATCH 342/555] decaffeinate: Convert AWSSDKPersistorManagerTests.coffee and 12 other files to JS --- .../coffee/AWSSDKPersistorManagerTests.js | 541 ++++++++------ .../test/unit/coffee/BucketControllerTests.js | 147 ++-- .../unit/coffee/FSPersistorManagerTests.js | 555 ++++++++------ .../test/unit/coffee/FileControllerTests.js | 419 ++++++----- .../test/unit/coffee/FileConverterTests.js | 167 +++-- .../test/unit/coffee/FileHandlerTests.js | 461 +++++++----- .../test/unit/coffee/ImageOptimiserTests.js | 124 ++-- .../test/unit/coffee/KeybuilderTests.js | 84 ++- .../test/unit/coffee/LocalFileWriterTests.js | 174 +++-- .../test/unit/coffee/PersistorManagerTests.js | 200 ++--- .../unit/coffee/S3PersistorManagerTests.js | 696 ++++++++++-------- .../test/unit/coffee/SafeExecTests.js | 109 +-- .../test/unit/coffee/SettingsTests.js | 44 +- 13 files changed, 2167 insertions(+), 1554 deletions(-) diff --git a/services/filestore/test/unit/coffee/AWSSDKPersistorManagerTests.js b/services/filestore/test/unit/coffee/AWSSDKPersistorManagerTests.js index 92fb968914..cad0f4b805 100644 --- a/services/filestore/test/unit/coffee/AWSSDKPersistorManagerTests.js +++ b/services/filestore/test/unit/coffee/AWSSDKPersistorManagerTests.js @@ -1,272 +1,353 @@ -sinon = require 'sinon' -chai = require 'chai' +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon'); +const chai = require('chai'); -should = chai.should() -expect = chai.expect +const should = chai.should(); +const { + expect +} = chai; -modulePath = "../../../app/js/AWSSDKPersistorManager.js" -SandboxedModule = require 'sandboxed-module' +const modulePath = "../../../app/js/AWSSDKPersistorManager.js"; +const SandboxedModule = require('sandboxed-module'); -describe "AWSSDKPersistorManager", -> - beforeEach -> - @settings = - filestore: +describe("AWSSDKPersistorManager", function() { + beforeEach(function() { + this.settings = { + filestore: { backend: "aws-sdk" - @s3 = - upload: sinon.stub() - getObject: sinon.stub() - copyObject: sinon.stub() - deleteObject: sinon.stub() - listObjects: sinon.stub() - deleteObjects: sinon.stub() + } + }; + this.s3 = { + upload: sinon.stub(), + getObject: sinon.stub(), + copyObject: sinon.stub(), + deleteObject: sinon.stub(), + listObjects: sinon.stub(), + deleteObjects: sinon.stub(), headObject: sinon.stub() - @awssdk = - S3: sinon.stub().returns @s3 + }; + this.awssdk = + {S3: sinon.stub().returns(this.s3)}; - @requires = - "aws-sdk": @awssdk - "settings-sharelatex": @settings - "logger-sharelatex": - log:-> - err:-> - "fs": @fs = - createReadStream: sinon.stub() - "./Errors": @Errors = - NotFoundError: sinon.stub() - @key = "my/key" - @bucketName = "my-bucket" - @error = "my error" - @AWSSDKPersistorManager = SandboxedModule.require modulePath, requires: @requires + this.requires = { + "aws-sdk": this.awssdk, + "settings-sharelatex": this.settings, + "logger-sharelatex": { + log() {}, + err() {} + }, + "fs": (this.fs = + {createReadStream: sinon.stub()}), + "./Errors": (this.Errors = + {NotFoundError: sinon.stub()}) + }; + this.key = "my/key"; + this.bucketName = "my-bucket"; + this.error = "my error"; + return this.AWSSDKPersistorManager = SandboxedModule.require(modulePath, {requires: this.requires}); + }); - describe "sendFile", -> - beforeEach -> - @stream = {} - @fsPath = "/usr/local/some/file" - @fs.createReadStream.returns @stream + describe("sendFile", function() { + beforeEach(function() { + this.stream = {}; + this.fsPath = "/usr/local/some/file"; + return this.fs.createReadStream.returns(this.stream); + }); - it "should put the file with s3.upload", (done) -> - @s3.upload.callsArgWith 1 - @AWSSDKPersistorManager.sendFile @bucketName, @key, @fsPath, (err) => - expect(err).to.not.be.ok - expect(@s3.upload.calledOnce, "called only once").to.be.true - expect((@s3.upload.calledWith Bucket: @bucketName, Key: @key, Body: @stream) - , "called with correct arguments").to.be.true - done() + it("should put the file with s3.upload", function(done) { + this.s3.upload.callsArgWith(1); + return this.AWSSDKPersistorManager.sendFile(this.bucketName, this.key, this.fsPath, err => { + expect(err).to.not.be.ok; + expect(this.s3.upload.calledOnce, "called only once").to.be.true; + expect((this.s3.upload.calledWith({Bucket: this.bucketName, Key: this.key, Body: this.stream})) + , "called with correct arguments").to.be.true; + return done(); + }); + }); - it "should dispatch the error from s3.upload", (done) -> - @s3.upload.callsArgWith 1, @error - @AWSSDKPersistorManager.sendFile @bucketName, @key, @fsPath, (err) => - expect(err).to.equal @error - done() + return it("should dispatch the error from s3.upload", function(done) { + this.s3.upload.callsArgWith(1, this.error); + return this.AWSSDKPersistorManager.sendFile(this.bucketName, this.key, this.fsPath, err => { + expect(err).to.equal(this.error); + return done(); + }); + }); + }); - describe "sendStream", -> - beforeEach -> - @stream = {} + describe("sendStream", function() { + beforeEach(function() { + return this.stream = {};}); - it "should put the file with s3.upload", (done) -> - @s3.upload.callsArgWith 1 - @AWSSDKPersistorManager.sendStream @bucketName, @key, @stream, (err) => - expect(err).to.not.be.ok - expect(@s3.upload.calledOnce, "called only once").to.be.true - expect((@s3.upload.calledWith Bucket: @bucketName, Key: @key, Body: @stream), - "called with correct arguments").to.be.true - done() + it("should put the file with s3.upload", function(done) { + this.s3.upload.callsArgWith(1); + return this.AWSSDKPersistorManager.sendStream(this.bucketName, this.key, this.stream, err => { + expect(err).to.not.be.ok; + expect(this.s3.upload.calledOnce, "called only once").to.be.true; + expect((this.s3.upload.calledWith({Bucket: this.bucketName, Key: this.key, Body: this.stream})), + "called with correct arguments").to.be.true; + return done(); + }); + }); - it "should dispatch the error from s3.upload", (done) -> - @s3.upload.callsArgWith 1, @error - @AWSSDKPersistorManager.sendStream @bucketName, @key, @stream, (err) => - expect(err).to.equal @error - done() + return it("should dispatch the error from s3.upload", function(done) { + this.s3.upload.callsArgWith(1, this.error); + return this.AWSSDKPersistorManager.sendStream(this.bucketName, this.key, this.stream, err => { + expect(err).to.equal(this.error); + return done(); + }); + }); + }); - describe "getFileStream", -> - beforeEach -> - @opts = {} - @stream = {} - @read_stream = - on: @read_stream_on = sinon.stub() - @object = - createReadStream: sinon.stub().returns @read_stream - @s3.getObject.returns @object + describe("getFileStream", function() { + beforeEach(function() { + this.opts = {}; + this.stream = {}; + this.read_stream = + {on: (this.read_stream_on = sinon.stub())}; + this.object = + {createReadStream: sinon.stub().returns(this.read_stream)}; + return this.s3.getObject.returns(this.object); + }); - it "should return a stream from s3.getObject", (done) -> - @read_stream_on.withArgs('readable').callsArgWith 1 + it("should return a stream from s3.getObject", function(done) { + this.read_stream_on.withArgs('readable').callsArgWith(1); - @AWSSDKPersistorManager.getFileStream @bucketName, @key, @opts, (err, stream) => - expect(@read_stream_on.calledTwice) - expect(err).to.not.be.ok - expect(stream, "returned the stream").to.equal @read_stream - expect((@s3.getObject.calledWith Bucket: @bucketName, Key: @key), - "called with correct arguments").to.be.true - done() + return this.AWSSDKPersistorManager.getFileStream(this.bucketName, this.key, this.opts, (err, stream) => { + expect(this.read_stream_on.calledTwice); + expect(err).to.not.be.ok; + expect(stream, "returned the stream").to.equal(this.read_stream); + expect((this.s3.getObject.calledWith({Bucket: this.bucketName, Key: this.key})), + "called with correct arguments").to.be.true; + return done(); + }); + }); - describe "with start and end options", -> - beforeEach -> - @opts = - start: 0 + describe("with start and end options", function() { + beforeEach(function() { + return this.opts = { + start: 0, end: 8 - it "should pass headers to the s3.GetObject", (done) -> - @read_stream_on.withArgs('readable').callsArgWith 1 - @AWSSDKPersistorManager.getFileStream @bucketName, @key, @opts, (err, stream) => - expect((@s3.getObject.calledWith Bucket: @bucketName, Key: @key, Range: 'bytes=0-8'), - "called with correct arguments").to.be.true - done() + }; + }); + return it("should pass headers to the s3.GetObject", function(done) { + this.read_stream_on.withArgs('readable').callsArgWith(1); + this.AWSSDKPersistorManager.getFileStream(this.bucketName, this.key, this.opts, (err, stream) => { + return expect((this.s3.getObject.calledWith({Bucket: this.bucketName, Key: this.key, Range: 'bytes=0-8'})), + "called with correct arguments").to.be.true; + }); + return done(); + }); + }); - describe "error conditions", -> - describe "when the file doesn't exist", -> - beforeEach -> - @error = new Error() - @error.code = 'NoSuchKey' - it "should produce a NotFoundError", (done) -> - @read_stream_on.withArgs('error').callsArgWith 1, @error - @AWSSDKPersistorManager.getFileStream @bucketName, @key, @opts, (err, stream) => - expect(stream).to.not.be.ok - expect(err).to.be.ok - expect(err instanceof @Errors.NotFoundError, "error is a correct instance").to.equal true - done() + return describe("error conditions", function() { + describe("when the file doesn't exist", function() { + beforeEach(function() { + this.error = new Error(); + return this.error.code = 'NoSuchKey'; + }); + return it("should produce a NotFoundError", function(done) { + this.read_stream_on.withArgs('error').callsArgWith(1, this.error); + return this.AWSSDKPersistorManager.getFileStream(this.bucketName, this.key, this.opts, (err, stream) => { + expect(stream).to.not.be.ok; + expect(err).to.be.ok; + expect(err instanceof this.Errors.NotFoundError, "error is a correct instance").to.equal(true); + return done(); + }); + }); + }); - describe "when there is some other error", -> - beforeEach -> - @error = new Error() - it "should dispatch the error from s3 object stream", (done) -> - @read_stream_on.withArgs('error').callsArgWith 1, @error - @AWSSDKPersistorManager.getFileStream @bucketName, @key, @opts, (err, stream) => - expect(stream).to.not.be.ok - expect(err).to.be.ok - expect(err).to.equal @error - done() + return describe("when there is some other error", function() { + beforeEach(function() { + return this.error = new Error(); + }); + return it("should dispatch the error from s3 object stream", function(done) { + this.read_stream_on.withArgs('error').callsArgWith(1, this.error); + return this.AWSSDKPersistorManager.getFileStream(this.bucketName, this.key, this.opts, (err, stream) => { + expect(stream).to.not.be.ok; + expect(err).to.be.ok; + expect(err).to.equal(this.error); + return done(); + }); + }); + }); + }); + }); - describe "copyFile", -> - beforeEach -> - @destKey = "some/key" - @stream = {} + describe("copyFile", function() { + beforeEach(function() { + this.destKey = "some/key"; + return this.stream = {};}); - it "should copy the file with s3.copyObject", (done) -> - @s3.copyObject.callsArgWith 1 - @AWSSDKPersistorManager.copyFile @bucketName, @key, @destKey, (err) => - expect(err).to.not.be.ok - expect(@s3.copyObject.calledOnce, "called only once").to.be.true - expect((@s3.copyObject.calledWith Bucket: @bucketName, Key: @destKey, CopySource: @bucketName + '/' + @key), - "called with correct arguments").to.be.true - done() + it("should copy the file with s3.copyObject", function(done) { + this.s3.copyObject.callsArgWith(1); + return this.AWSSDKPersistorManager.copyFile(this.bucketName, this.key, this.destKey, err => { + expect(err).to.not.be.ok; + expect(this.s3.copyObject.calledOnce, "called only once").to.be.true; + expect((this.s3.copyObject.calledWith({Bucket: this.bucketName, Key: this.destKey, CopySource: this.bucketName + '/' + this.key})), + "called with correct arguments").to.be.true; + return done(); + }); + }); - it "should dispatch the error from s3.copyObject", (done) -> - @s3.copyObject.callsArgWith 1, @error - @AWSSDKPersistorManager.copyFile @bucketName, @key, @destKey, (err) => - expect(err).to.equal @error - done() + return it("should dispatch the error from s3.copyObject", function(done) { + this.s3.copyObject.callsArgWith(1, this.error); + return this.AWSSDKPersistorManager.copyFile(this.bucketName, this.key, this.destKey, err => { + expect(err).to.equal(this.error); + return done(); + }); + }); + }); - describe "deleteFile", -> - it "should delete the file with s3.deleteObject", (done) -> - @s3.deleteObject.callsArgWith 1 - @AWSSDKPersistorManager.deleteFile @bucketName, @key, (err) => - expect(err).to.not.be.ok - expect(@s3.deleteObject.calledOnce, "called only once").to.be.true - expect((@s3.deleteObject.calledWith Bucket: @bucketName, Key: @key), - "called with correct arguments").to.be.true - done() + describe("deleteFile", function() { + it("should delete the file with s3.deleteObject", function(done) { + this.s3.deleteObject.callsArgWith(1); + return this.AWSSDKPersistorManager.deleteFile(this.bucketName, this.key, err => { + expect(err).to.not.be.ok; + expect(this.s3.deleteObject.calledOnce, "called only once").to.be.true; + expect((this.s3.deleteObject.calledWith({Bucket: this.bucketName, Key: this.key})), + "called with correct arguments").to.be.true; + return done(); + }); + }); - it "should dispatch the error from s3.deleteObject", (done) -> - @s3.deleteObject.callsArgWith 1, @error - @AWSSDKPersistorManager.deleteFile @bucketName, @key, (err) => - expect(err).to.equal @error - done() + return it("should dispatch the error from s3.deleteObject", function(done) { + this.s3.deleteObject.callsArgWith(1, this.error); + return this.AWSSDKPersistorManager.deleteFile(this.bucketName, this.key, err => { + expect(err).to.equal(this.error); + return done(); + }); + }); + }); - describe "deleteDirectory", -> + describe("deleteDirectory", function() { - it "should list the directory content using s3.listObjects", (done) -> - @s3.listObjects.callsArgWith 1, null, Contents: [] - @AWSSDKPersistorManager.deleteDirectory @bucketName, @key, (err) => - expect(err).to.not.be.ok - expect(@s3.listObjects.calledOnce, "called only once").to.be.true - expect((@s3.listObjects.calledWith Bucket: @bucketName, Prefix: @key), - "called with correct arguments").to.be.true - done() + it("should list the directory content using s3.listObjects", function(done) { + this.s3.listObjects.callsArgWith(1, null, {Contents: []}); + return this.AWSSDKPersistorManager.deleteDirectory(this.bucketName, this.key, err => { + expect(err).to.not.be.ok; + expect(this.s3.listObjects.calledOnce, "called only once").to.be.true; + expect((this.s3.listObjects.calledWith({Bucket: this.bucketName, Prefix: this.key})), + "called with correct arguments").to.be.true; + return done(); + }); + }); - it "should dispatch the error from s3.listObjects", (done) -> - @s3.listObjects.callsArgWith 1, @error - @AWSSDKPersistorManager.deleteDirectory @bucketName, @key, (err) => - expect(err).to.equal @error - done() + it("should dispatch the error from s3.listObjects", function(done) { + this.s3.listObjects.callsArgWith(1, this.error); + return this.AWSSDKPersistorManager.deleteDirectory(this.bucketName, this.key, err => { + expect(err).to.equal(this.error); + return done(); + }); + }); - describe "with directory content", -> - beforeEach -> - @fileList = [ - Key: 'foo' - , Key: 'bar' + return describe("with directory content", function() { + beforeEach(function() { + return this.fileList = [ + {Key: 'foo'} + , { Key: 'bar' , Key: 'baz' - ] + } + ];}); - it "should forward the file keys to s3.deleteObjects", (done) -> - @s3.listObjects.callsArgWith 1, null, Contents: @fileList - @s3.deleteObjects.callsArgWith 1 - @AWSSDKPersistorManager.deleteDirectory @bucketName, @key, (err) => - expect(err).to.not.be.ok - expect(@s3.deleteObjects.calledOnce, "called only once").to.be.true - expect((@s3.deleteObjects.calledWith - Bucket: @bucketName - Delete: - Quiet: true - Objects: @fileList), - "called with correct arguments").to.be.true - done() + it("should forward the file keys to s3.deleteObjects", function(done) { + this.s3.listObjects.callsArgWith(1, null, {Contents: this.fileList}); + this.s3.deleteObjects.callsArgWith(1); + return this.AWSSDKPersistorManager.deleteDirectory(this.bucketName, this.key, err => { + expect(err).to.not.be.ok; + expect(this.s3.deleteObjects.calledOnce, "called only once").to.be.true; + expect((this.s3.deleteObjects.calledWith({ + Bucket: this.bucketName, + Delete: { + Quiet: true, + Objects: this.fileList + }})), + "called with correct arguments").to.be.true; + return done(); + }); + }); - it "should dispatch the error from s3.deleteObjects", (done) -> - @s3.listObjects.callsArgWith 1, null, Contents: @fileList - @s3.deleteObjects.callsArgWith 1, @error - @AWSSDKPersistorManager.deleteDirectory @bucketName, @key, (err) => - expect(err).to.equal @error - done() + return it("should dispatch the error from s3.deleteObjects", function(done) { + this.s3.listObjects.callsArgWith(1, null, {Contents: this.fileList}); + this.s3.deleteObjects.callsArgWith(1, this.error); + return this.AWSSDKPersistorManager.deleteDirectory(this.bucketName, this.key, err => { + expect(err).to.equal(this.error); + return done(); + }); + }); + }); + }); - describe "checkIfFileExists", -> + describe("checkIfFileExists", function() { - it "should check for the file with s3.headObject", (done) -> - @s3.headObject.callsArgWith 1, null, {} - @AWSSDKPersistorManager.checkIfFileExists @bucketName, @key, (err, exists) => - expect(err).to.not.be.ok - expect(@s3.headObject.calledOnce, "called only once").to.be.true - expect((@s3.headObject.calledWith Bucket: @bucketName, Key: @key), - "called with correct arguments").to.be.true - done() + it("should check for the file with s3.headObject", function(done) { + this.s3.headObject.callsArgWith(1, null, {}); + return this.AWSSDKPersistorManager.checkIfFileExists(this.bucketName, this.key, (err, exists) => { + expect(err).to.not.be.ok; + expect(this.s3.headObject.calledOnce, "called only once").to.be.true; + expect((this.s3.headObject.calledWith({Bucket: this.bucketName, Key: this.key})), + "called with correct arguments").to.be.true; + return done(); + }); + }); - it "should return false on an inexistant file", (done) -> - @s3.headObject.callsArgWith 1, null, {} - @AWSSDKPersistorManager.checkIfFileExists @bucketName, @key, (err, exists) => - expect(exists).to.be.false - done() + it("should return false on an inexistant file", function(done) { + this.s3.headObject.callsArgWith(1, null, {}); + return this.AWSSDKPersistorManager.checkIfFileExists(this.bucketName, this.key, (err, exists) => { + expect(exists).to.be.false; + return done(); + }); + }); - it "should return true on an existing file", (done) -> - @s3.headObject.callsArgWith 1, null, ETag: "etag" - @AWSSDKPersistorManager.checkIfFileExists @bucketName, @key, (err, exists) => - expect(exists).to.be.true - done() + it("should return true on an existing file", function(done) { + this.s3.headObject.callsArgWith(1, null, {ETag: "etag"}); + return this.AWSSDKPersistorManager.checkIfFileExists(this.bucketName, this.key, (err, exists) => { + expect(exists).to.be.true; + return done(); + }); + }); - it "should dispatch the error from s3.headObject", (done) -> - @s3.headObject.callsArgWith 1, @error - @AWSSDKPersistorManager.checkIfFileExists @bucketName, @key, (err, exists) => - expect(err).to.equal @error - done() + return it("should dispatch the error from s3.headObject", function(done) { + this.s3.headObject.callsArgWith(1, this.error); + return this.AWSSDKPersistorManager.checkIfFileExists(this.bucketName, this.key, (err, exists) => { + expect(err).to.equal(this.error); + return done(); + }); + }); + }); - describe "directorySize", -> + return describe("directorySize", function() { - it "should list the directory content using s3.listObjects", (done) -> - @s3.listObjects.callsArgWith 1, null, Contents: [] - @AWSSDKPersistorManager.directorySize @bucketName, @key, (err) => - expect(err).to.not.be.ok - expect(@s3.listObjects.calledOnce, "called only once").to.be.true - expect((@s3.listObjects.calledWith Bucket: @bucketName, Prefix: @key), - "called with correct arguments").to.be.true - done() + it("should list the directory content using s3.listObjects", function(done) { + this.s3.listObjects.callsArgWith(1, null, {Contents: []}); + return this.AWSSDKPersistorManager.directorySize(this.bucketName, this.key, err => { + expect(err).to.not.be.ok; + expect(this.s3.listObjects.calledOnce, "called only once").to.be.true; + expect((this.s3.listObjects.calledWith({Bucket: this.bucketName, Prefix: this.key})), + "called with correct arguments").to.be.true; + return done(); + }); + }); - it "should dispatch the error from s3.listObjects", (done) -> - @s3.listObjects.callsArgWith 1, @error - @AWSSDKPersistorManager.directorySize @bucketName, @key, (err) => - expect(err).to.equal @error - done() + it("should dispatch the error from s3.listObjects", function(done) { + this.s3.listObjects.callsArgWith(1, this.error); + return this.AWSSDKPersistorManager.directorySize(this.bucketName, this.key, err => { + expect(err).to.equal(this.error); + return done(); + }); + }); - it "should sum directory files sizes", (done) -> - @s3.listObjects.callsArgWith 1, null, Contents: [ { Size: 1024 }, { Size: 2048 }] - @AWSSDKPersistorManager.directorySize @bucketName, @key, (err, size) => - expect(size).to.equal 3072 - done() + return it("should sum directory files sizes", function(done) { + this.s3.listObjects.callsArgWith(1, null, {Contents: [ { Size: 1024 }, { Size: 2048 }]}); + return this.AWSSDKPersistorManager.directorySize(this.bucketName, this.key, (err, size) => { + expect(size).to.equal(3072); + return done(); + }); + }); + }); +}); diff --git a/services/filestore/test/unit/coffee/BucketControllerTests.js b/services/filestore/test/unit/coffee/BucketControllerTests.js index c6bbabc89e..db0e6a5aa3 100644 --- a/services/filestore/test/unit/coffee/BucketControllerTests.js +++ b/services/filestore/test/unit/coffee/BucketControllerTests.js @@ -1,71 +1,100 @@ -assert = require("chai").assert -sinon = require('sinon') -chai = require('chai') -should = chai.should() -expect = chai.expect -modulePath = "../../../app/js/BucketController.js" -SandboxedModule = require('sandboxed-module') +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const { + assert +} = require("chai"); +const sinon = require('sinon'); +const chai = require('chai'); +const should = chai.should(); +const { + expect +} = chai; +const modulePath = "../../../app/js/BucketController.js"; +const SandboxedModule = require('sandboxed-module'); -describe "BucketController", -> +describe("BucketController", function() { - beforeEach -> - @PersistorManager = - sendStream: sinon.stub() - copyFile: sinon.stub() + beforeEach(function() { + this.PersistorManager = { + sendStream: sinon.stub(), + copyFile: sinon.stub(), deleteFile:sinon.stub() + }; - @settings = - s3: - buckets: + this.settings = { + s3: { + buckets: { user_files:"user_files" - filestore: - backend: "s3" - s3: - secret: "secret" + } + }, + filestore: { + backend: "s3", + s3: { + secret: "secret", key: "this_key" + } + } + }; - @FileHandler = - getFile: sinon.stub() - deleteFile: sinon.stub() - insertFile: sinon.stub() + this.FileHandler = { + getFile: sinon.stub(), + deleteFile: sinon.stub(), + insertFile: sinon.stub(), getDirectorySize: sinon.stub() - @LocalFileWriter = {} - @controller = SandboxedModule.require modulePath, requires: - "./LocalFileWriter":@LocalFileWriter - "./FileHandler": @FileHandler - "./PersistorManager":@PersistorManager - "settings-sharelatex": @settings - "metrics-sharelatex": - inc:-> - "logger-sharelatex": - log:-> - err:-> - @project_id = "project_id" - @file_id = "file_id" - @bucket = "user_files" - @key = "#{@project_id}/#{@file_id}" - @req = - query:{} - params: - bucket: @bucket - 0: @key + }; + this.LocalFileWriter = {}; + this.controller = SandboxedModule.require(modulePath, { requires: { + "./LocalFileWriter":this.LocalFileWriter, + "./FileHandler": this.FileHandler, + "./PersistorManager":this.PersistorManager, + "settings-sharelatex": this.settings, + "metrics-sharelatex": { + inc() {} + }, + "logger-sharelatex": { + log() {}, + err() {} + } + } + } + ); + this.project_id = "project_id"; + this.file_id = "file_id"; + this.bucket = "user_files"; + this.key = `${this.project_id}/${this.file_id}`; + this.req = { + query:{}, + params: { + bucket: this.bucket, + 0: this.key + }, headers: {} - @res = - setHeader: -> - @fileStream = {} + }; + this.res = + {setHeader() {}}; + return this.fileStream = {};}); - describe "getFile", -> + return describe("getFile", function() { - it "should pipe the stream", (done)-> - @FileHandler.getFile.callsArgWith(3, null, @fileStream) - @fileStream.pipe = (res)=> - res.should.equal @res - done() - @controller.getFile @req, @res + it("should pipe the stream", function(done){ + this.FileHandler.getFile.callsArgWith(3, null, this.fileStream); + this.fileStream.pipe = res=> { + res.should.equal(this.res); + return done(); + }; + return this.controller.getFile(this.req, this.res); + }); - it "should send a 500 if there is a problem", (done)-> - @FileHandler.getFile.callsArgWith(3, "error") - @res.send = (code)=> - code.should.equal 500 - done() - @controller.getFile @req, @res + return it("should send a 500 if there is a problem", function(done){ + this.FileHandler.getFile.callsArgWith(3, "error"); + this.res.send = code=> { + code.should.equal(500); + return done(); + }; + return this.controller.getFile(this.req, this.res); + }); + }); +}); diff --git a/services/filestore/test/unit/coffee/FSPersistorManagerTests.js b/services/filestore/test/unit/coffee/FSPersistorManagerTests.js index 6980ce3e30..cd73f41ac0 100644 --- a/services/filestore/test/unit/coffee/FSPersistorManagerTests.js +++ b/services/filestore/test/unit/coffee/FSPersistorManagerTests.js @@ -1,281 +1,372 @@ -assert = require("chai").assert -sinon = require('sinon') -chai = require('chai') -should = chai.should -expect = chai.expect -modulePath = "../../../app/js/FSPersistorManager.js" -SandboxedModule = require('sandboxed-module') -fs = require("fs") -response = require("response") +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const { + assert +} = require("chai"); +const sinon = require('sinon'); +const chai = require('chai'); +const { + should +} = chai; +const { + expect +} = chai; +const modulePath = "../../../app/js/FSPersistorManager.js"; +const SandboxedModule = require('sandboxed-module'); +const fs = require("fs"); +const response = require("response"); -describe "FSPersistorManagerTests", -> +describe("FSPersistorManagerTests", function() { - beforeEach -> - @Fs = - rename:sinon.stub() - createReadStream:sinon.stub() - createWriteStream:sinon.stub() - unlink:sinon.stub() - rmdir:sinon.stub() - exists:sinon.stub() - readdir:sinon.stub() - open:sinon.stub() - openSync:sinon.stub() - fstatSync:sinon.stub() - closeSync:sinon.stub() + beforeEach(function() { + this.Fs = { + rename:sinon.stub(), + createReadStream:sinon.stub(), + createWriteStream:sinon.stub(), + unlink:sinon.stub(), + rmdir:sinon.stub(), + exists:sinon.stub(), + readdir:sinon.stub(), + open:sinon.stub(), + openSync:sinon.stub(), + fstatSync:sinon.stub(), + closeSync:sinon.stub(), stat:sinon.stub() - @Rimraf = sinon.stub() - @LocalFileWriter = - writeStream: sinon.stub() + }; + this.Rimraf = sinon.stub(); + this.LocalFileWriter = { + writeStream: sinon.stub(), deleteFile: sinon.stub() - @requires = - "./LocalFileWriter":@LocalFileWriter - "fs":@Fs - "logger-sharelatex": - log:-> - err:-> - "response":response - "rimraf":@Rimraf - "./Errors": @Errors = - NotFoundError: sinon.stub() - @location = "/tmp" - @name1 = "530f2407e7ef165704000007/530f838b46d9a9e859000008" - @name1Filtered ="530f2407e7ef165704000007_530f838b46d9a9e859000008" - @name2 = "second_file" - @error = "error_message" - @FSPersistorManager = SandboxedModule.require modulePath, requires: @requires + }; + this.requires = { + "./LocalFileWriter":this.LocalFileWriter, + "fs":this.Fs, + "logger-sharelatex": { + log() {}, + err() {} + }, + "response":response, + "rimraf":this.Rimraf, + "./Errors": (this.Errors = + {NotFoundError: sinon.stub()}) + }; + this.location = "/tmp"; + this.name1 = "530f2407e7ef165704000007/530f838b46d9a9e859000008"; + this.name1Filtered ="530f2407e7ef165704000007_530f838b46d9a9e859000008"; + this.name2 = "second_file"; + this.error = "error_message"; + return this.FSPersistorManager = SandboxedModule.require(modulePath, {requires: this.requires}); + }); - describe "sendFile", -> - beforeEach -> - @Fs.createReadStream = sinon.stub().returns({ - on: -> - pipe: -> - }) + describe("sendFile", function() { + beforeEach(function() { + return this.Fs.createReadStream = sinon.stub().returns({ + on() {}, + pipe() {} + }); + }); - it "should copy the file", (done) -> - @Fs.createWriteStream =sinon.stub().returns({ - on: (event, handler) -> - process.nextTick(handler) if event is 'finish' - }) - @FSPersistorManager.sendFile @location, @name1, @name2, (err)=> - @Fs.createReadStream.calledWith(@name2).should.equal true - @Fs.createWriteStream.calledWith("#{@location}/#{@name1Filtered}" ).should.equal true - done() + it("should copy the file", function(done) { + this.Fs.createWriteStream =sinon.stub().returns({ + on(event, handler) { + if (event === 'finish') { return process.nextTick(handler); } + } + }); + return this.FSPersistorManager.sendFile(this.location, this.name1, this.name2, err=> { + this.Fs.createReadStream.calledWith(this.name2).should.equal(true); + this.Fs.createWriteStream.calledWith(`${this.location}/${this.name1Filtered}` ).should.equal(true); + return done(); + }); + }); - it "should return an error if the file cannot be stored", (done) -> - @Fs.createWriteStream =sinon.stub().returns({ - on: (event, handler) => - if event is 'error' - process.nextTick () => - handler(@error) - }) - @FSPersistorManager.sendFile @location, @name1, @name2, (err)=> - @Fs.createReadStream.calledWith(@name2).should.equal true - @Fs.createWriteStream.calledWith("#{@location}/#{@name1Filtered}" ).should.equal true - err.should.equal @error - done() + return it("should return an error if the file cannot be stored", function(done) { + this.Fs.createWriteStream =sinon.stub().returns({ + on: (event, handler) => { + if (event === 'error') { + return process.nextTick(() => { + return handler(this.error); + }); + } + } + }); + return this.FSPersistorManager.sendFile(this.location, this.name1, this.name2, err=> { + this.Fs.createReadStream.calledWith(this.name2).should.equal(true); + this.Fs.createWriteStream.calledWith(`${this.location}/${this.name1Filtered}` ).should.equal(true); + err.should.equal(this.error); + return done(); + }); + }); + }); - describe "sendStream", -> - beforeEach -> - @FSPersistorManager.sendFile = sinon.stub().callsArgWith(3) - @LocalFileWriter.writeStream.callsArgWith(2, null, @name1) - @LocalFileWriter.deleteFile.callsArg(1) - @SourceStream = - on:-> + describe("sendStream", function() { + beforeEach(function() { + this.FSPersistorManager.sendFile = sinon.stub().callsArgWith(3); + this.LocalFileWriter.writeStream.callsArgWith(2, null, this.name1); + this.LocalFileWriter.deleteFile.callsArg(1); + return this.SourceStream = + {on() {}}; + }); - it "should sent stream to LocalFileWriter", (done)-> - @FSPersistorManager.sendStream @location, @name1, @SourceStream, => - @LocalFileWriter.writeStream.calledWith(@SourceStream).should.equal true - done() + it("should sent stream to LocalFileWriter", function(done){ + return this.FSPersistorManager.sendStream(this.location, this.name1, this.SourceStream, () => { + this.LocalFileWriter.writeStream.calledWith(this.SourceStream).should.equal(true); + return done(); + }); + }); - it "should return the error from LocalFileWriter", (done)-> - @LocalFileWriter.writeStream.callsArgWith(2, @error) - @FSPersistorManager.sendStream @location, @name1, @SourceStream, (err)=> - err.should.equal @error - done() + it("should return the error from LocalFileWriter", function(done){ + this.LocalFileWriter.writeStream.callsArgWith(2, this.error); + return this.FSPersistorManager.sendStream(this.location, this.name1, this.SourceStream, err=> { + err.should.equal(this.error); + return done(); + }); + }); - it "should send the file to the filestore", (done)-> - @LocalFileWriter.writeStream.callsArgWith(2) - @FSPersistorManager.sendStream @location, @name1, @SourceStream, (err)=> - @FSPersistorManager.sendFile.called.should.equal true - done() + return it("should send the file to the filestore", function(done){ + this.LocalFileWriter.writeStream.callsArgWith(2); + return this.FSPersistorManager.sendStream(this.location, this.name1, this.SourceStream, err=> { + this.FSPersistorManager.sendFile.called.should.equal(true); + return done(); + }); + }); + }); - describe "getFileStream", -> - beforeEach -> - @opts = {} + describe("getFileStream", function() { + beforeEach(function() { + return this.opts = {};}); - it "should use correct file location", (done) -> - @FSPersistorManager.getFileStream @location, @name1, @opts, (err,res) => - @Fs.open.calledWith("#{@location}/#{@name1Filtered}").should.equal true - done() + it("should use correct file location", function(done) { + this.FSPersistorManager.getFileStream(this.location, this.name1, this.opts, (err,res) => {}); + this.Fs.open.calledWith(`${this.location}/${this.name1Filtered}`).should.equal(true); + return done(); + }); - describe "with start and end options", -> + describe("with start and end options", function() { - beforeEach -> - @fd = 2019 - @opts_in = {start: 0, end: 8} - @opts = {start: 0, end: 8, fd: @fd} - @Fs.open.callsArgWith(2, null, @fd) + beforeEach(function() { + this.fd = 2019; + this.opts_in = {start: 0, end: 8}; + this.opts = {start: 0, end: 8, fd: this.fd}; + return this.Fs.open.callsArgWith(2, null, this.fd); + }); - it 'should pass the options to createReadStream', (done) -> - @FSPersistorManager.getFileStream @location, @name1, @opts_in, (err,res)=> - @Fs.createReadStream.calledWith(null, @opts).should.equal true - done() + return it('should pass the options to createReadStream', function(done) { + this.FSPersistorManager.getFileStream(this.location, this.name1, this.opts_in, (err,res)=> {}); + this.Fs.createReadStream.calledWith(null, this.opts).should.equal(true); + return done(); + }); + }); - describe "error conditions", -> + return describe("error conditions", function() { - describe "when the file does not exist", -> + describe("when the file does not exist", function() { - beforeEach -> - @fakeCode = 'ENOENT' - err = new Error() - err.code = @fakeCode - @Fs.open.callsArgWith(2, err, null) + beforeEach(function() { + this.fakeCode = 'ENOENT'; + const err = new Error(); + err.code = this.fakeCode; + return this.Fs.open.callsArgWith(2, err, null); + }); - it "should give a NotFoundError", (done) -> - @FSPersistorManager.getFileStream @location, @name1, @opts, (err,res)=> - expect(res).to.equal null - expect(err).to.not.equal null - expect(err instanceof @Errors.NotFoundError).to.equal true - done() + return it("should give a NotFoundError", function(done) { + return this.FSPersistorManager.getFileStream(this.location, this.name1, this.opts, (err,res)=> { + expect(res).to.equal(null); + expect(err).to.not.equal(null); + expect(err instanceof this.Errors.NotFoundError).to.equal(true); + return done(); + }); + }); + }); - describe "when some other error happens", -> + return describe("when some other error happens", function() { - beforeEach -> - @fakeCode = 'SOMETHINGHORRIBLE' - err = new Error() - err.code = @fakeCode - @Fs.open.callsArgWith(2, err, null) + beforeEach(function() { + this.fakeCode = 'SOMETHINGHORRIBLE'; + const err = new Error(); + err.code = this.fakeCode; + return this.Fs.open.callsArgWith(2, err, null); + }); - it "should give an Error", (done) -> - @FSPersistorManager.getFileStream @location, @name1, @opts, (err,res)=> - expect(res).to.equal null - expect(err).to.not.equal null - expect(err instanceof Error).to.equal true - done() + return it("should give an Error", function(done) { + return this.FSPersistorManager.getFileStream(this.location, this.name1, this.opts, (err,res)=> { + expect(res).to.equal(null); + expect(err).to.not.equal(null); + expect(err instanceof Error).to.equal(true); + return done(); + }); + }); + }); + }); + }); - describe "getFileSize", -> - it "should return the file size", (done) -> - expectedFileSize = 75382 - @Fs.stat.yields(new Error("fs.stat got unexpected arguments")) - @Fs.stat.withArgs("#{@location}/#{@name1Filtered}") - .yields(null, { size: expectedFileSize }) + describe("getFileSize", function() { + it("should return the file size", function(done) { + const expectedFileSize = 75382; + this.Fs.stat.yields(new Error("fs.stat got unexpected arguments")); + this.Fs.stat.withArgs(`${this.location}/${this.name1Filtered}`) + .yields(null, { size: expectedFileSize }); - @FSPersistorManager.getFileSize @location, @name1, (err, fileSize) => - if err? - return done(err) - expect(fileSize).to.equal(expectedFileSize) - done() + return this.FSPersistorManager.getFileSize(this.location, this.name1, (err, fileSize) => { + if (err != null) { + return done(err); + } + expect(fileSize).to.equal(expectedFileSize); + return done(); + }); + }); - it "should throw a NotFoundError if the file does not exist", (done) -> - error = new Error() - error.code = "ENOENT" - @Fs.stat.yields(error) + it("should throw a NotFoundError if the file does not exist", function(done) { + const error = new Error(); + error.code = "ENOENT"; + this.Fs.stat.yields(error); - @FSPersistorManager.getFileSize @location, @name1, (err, fileSize) => - expect(err).to.be.instanceof(@Errors.NotFoundError) - done() + return this.FSPersistorManager.getFileSize(this.location, this.name1, (err, fileSize) => { + expect(err).to.be.instanceof(this.Errors.NotFoundError); + return done(); + }); + }); - it "should rethrow any other error", (done) -> - error = new Error() - @Fs.stat.yields(error) + return it("should rethrow any other error", function(done) { + const error = new Error(); + this.Fs.stat.yields(error); - @FSPersistorManager.getFileSize @location, @name1, (err, fileSize) => - expect(err).to.equal(error) - done() + return this.FSPersistorManager.getFileSize(this.location, this.name1, (err, fileSize) => { + expect(err).to.equal(error); + return done(); + }); + }); + }); - describe "copyFile", -> - beforeEach -> - @ReadStream= - on:-> + describe("copyFile", function() { + beforeEach(function() { + this.ReadStream= { + on() {}, pipe:sinon.stub() - @WriteStream= - on:-> - @Fs.createReadStream.returns(@ReadStream) - @Fs.createWriteStream.returns(@WriteStream) + }; + this.WriteStream= + {on() {}}; + this.Fs.createReadStream.returns(this.ReadStream); + return this.Fs.createWriteStream.returns(this.WriteStream); + }); - it "Should open the source for reading", (done) -> - @FSPersistorManager.copyFile @location, @name1, @name2, -> - @Fs.createReadStream.calledWith("#{@location}/#{@name1Filtered}").should.equal true - done() + it("Should open the source for reading", function(done) { + this.FSPersistorManager.copyFile(this.location, this.name1, this.name2, function() {}); + this.Fs.createReadStream.calledWith(`${this.location}/${this.name1Filtered}`).should.equal(true); + return done(); + }); - it "Should open the target for writing", (done) -> - @FSPersistorManager.copyFile @location, @name1, @name2, -> - @Fs.createWriteStream.calledWith("#{@location}/#{@name2}").should.equal true - done() + it("Should open the target for writing", function(done) { + this.FSPersistorManager.copyFile(this.location, this.name1, this.name2, function() {}); + this.Fs.createWriteStream.calledWith(`${this.location}/${this.name2}`).should.equal(true); + return done(); + }); - it "Should pipe the source to the target", (done) -> - @FSPersistorManager.copyFile @location, @name1, @name2, -> - @ReadStream.pipe.calledWith(@WriteStream).should.equal true - done() + return it("Should pipe the source to the target", function(done) { + this.FSPersistorManager.copyFile(this.location, this.name1, this.name2, function() {}); + this.ReadStream.pipe.calledWith(this.WriteStream).should.equal(true); + return done(); + }); + }); - describe "deleteFile", -> - beforeEach -> - @Fs.unlink.callsArgWith(1,@error) + describe("deleteFile", function() { + beforeEach(function() { + return this.Fs.unlink.callsArgWith(1,this.error); + }); - it "Should call unlink with correct options", (done) -> - @FSPersistorManager.deleteFile @location, @name1, (err) => - @Fs.unlink.calledWith("#{@location}/#{@name1Filtered}").should.equal true - done() + it("Should call unlink with correct options", function(done) { + return this.FSPersistorManager.deleteFile(this.location, this.name1, err => { + this.Fs.unlink.calledWith(`${this.location}/${this.name1Filtered}`).should.equal(true); + return done(); + }); + }); - it "Should propogate the error", (done) -> - @FSPersistorManager.deleteFile @location, @name1, (err) => - err.should.equal @error - done() + return it("Should propogate the error", function(done) { + return this.FSPersistorManager.deleteFile(this.location, this.name1, err => { + err.should.equal(this.error); + return done(); + }); + }); + }); - describe "deleteDirectory", -> - beforeEach -> - @Rimraf.callsArgWith(1,@error) + describe("deleteDirectory", function() { + beforeEach(function() { + return this.Rimraf.callsArgWith(1,this.error); + }); - it "Should call rmdir(rimraf) with correct options", (done) -> - @FSPersistorManager.deleteDirectory @location, @name1, (err) => - @Rimraf.calledWith("#{@location}/#{@name1Filtered}").should.equal true - done() + it("Should call rmdir(rimraf) with correct options", function(done) { + return this.FSPersistorManager.deleteDirectory(this.location, this.name1, err => { + this.Rimraf.calledWith(`${this.location}/${this.name1Filtered}`).should.equal(true); + return done(); + }); + }); - it "Should propogate the error", (done) -> - @FSPersistorManager.deleteDirectory @location, @name1, (err) => - err.should.equal @error - done() + return it("Should propogate the error", function(done) { + return this.FSPersistorManager.deleteDirectory(this.location, this.name1, err => { + err.should.equal(this.error); + return done(); + }); + }); + }); - describe "checkIfFileExists", -> - beforeEach -> - @Fs.exists.callsArgWith(1,true) + describe("checkIfFileExists", function() { + beforeEach(function() { + return this.Fs.exists.callsArgWith(1,true); + }); - it "Should call exists with correct options", (done) -> - @FSPersistorManager.checkIfFileExists @location, @name1, (exists) => - @Fs.exists.calledWith("#{@location}/#{@name1Filtered}").should.equal true - done() + it("Should call exists with correct options", function(done) { + return this.FSPersistorManager.checkIfFileExists(this.location, this.name1, exists => { + this.Fs.exists.calledWith(`${this.location}/${this.name1Filtered}`).should.equal(true); + return done(); + }); + }); - # fs.exists simply returns false on any error, so... - it "should not return an error", (done) -> - @FSPersistorManager.checkIfFileExists @location, @name1, (err,exists) => - expect(err).to.be.null - done() + // fs.exists simply returns false on any error, so... + it("should not return an error", function(done) { + return this.FSPersistorManager.checkIfFileExists(this.location, this.name1, (err,exists) => { + expect(err).to.be.null; + return done(); + }); + }); - it "Should return true for existing files", (done) -> - @Fs.exists.callsArgWith(1,true) - @FSPersistorManager.checkIfFileExists @location, @name1, (err,exists) => - exists.should.be.true - done() + it("Should return true for existing files", function(done) { + this.Fs.exists.callsArgWith(1,true); + return this.FSPersistorManager.checkIfFileExists(this.location, this.name1, (err,exists) => { + exists.should.be.true; + return done(); + }); + }); - it "Should return false for non-existing files", (done) -> - @Fs.exists.callsArgWith(1,false) - @FSPersistorManager.checkIfFileExists @location, @name1, (err,exists) => - exists.should.be.false - done() + return it("Should return false for non-existing files", function(done) { + this.Fs.exists.callsArgWith(1,false); + return this.FSPersistorManager.checkIfFileExists(this.location, this.name1, (err,exists) => { + exists.should.be.false; + return done(); + }); + }); + }); - describe "directorySize", -> + return describe("directorySize", function() { - it "should propogate the error", (done) -> - @Fs.readdir.callsArgWith(1, @error) - @FSPersistorManager.directorySize @location, @name1, (err, totalsize) => - err.should.equal @error - done() + it("should propogate the error", function(done) { + this.Fs.readdir.callsArgWith(1, this.error); + return this.FSPersistorManager.directorySize(this.location, this.name1, (err, totalsize) => { + err.should.equal(this.error); + return done(); + }); + }); - it "should sum directory files size", (done) -> - @Fs.readdir.callsArgWith(1, null, [ {'file1'}, {'file2'} ]) - @Fs.fstatSync.returns({size : 1024}) - @FSPersistorManager.directorySize @location, @name1, (err, totalsize) => - expect(totalsize).to.equal 2048 - done() + return it("should sum directory files size", function(done) { + this.Fs.readdir.callsArgWith(1, null, [ {'file1': 'file1'}, {'file2': 'file2'} ]); + this.Fs.fstatSync.returns({size : 1024}); + return this.FSPersistorManager.directorySize(this.location, this.name1, (err, totalsize) => { + expect(totalsize).to.equal(2048); + return done(); + }); + }); + }); +}); diff --git a/services/filestore/test/unit/coffee/FileControllerTests.js b/services/filestore/test/unit/coffee/FileControllerTests.js index 821aadb68d..ea3b0e32b8 100644 --- a/services/filestore/test/unit/coffee/FileControllerTests.js +++ b/services/filestore/test/unit/coffee/FileControllerTests.js @@ -1,216 +1,283 @@ -assert = require("chai").assert -sinon = require('sinon') -chai = require('chai') -should = chai.should() -expect = chai.expect -modulePath = "../../../app/js/FileController.js" -SandboxedModule = require('sandboxed-module') +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const { + assert +} = require("chai"); +const sinon = require('sinon'); +const chai = require('chai'); +const should = chai.should(); +const { + expect +} = chai; +const modulePath = "../../../app/js/FileController.js"; +const SandboxedModule = require('sandboxed-module'); -describe "FileController", -> +describe("FileController", function() { - beforeEach -> - @PersistorManager = - sendStream: sinon.stub() - copyFile: sinon.stub() + beforeEach(function() { + this.PersistorManager = { + sendStream: sinon.stub(), + copyFile: sinon.stub(), deleteFile:sinon.stub() + }; - @settings = - s3: - buckets: + this.settings = { + s3: { + buckets: { user_files:"user_files" - @FileHandler = - getFile: sinon.stub() - getFileSize: sinon.stub() - deleteFile: sinon.stub() - insertFile: sinon.stub() + } + } + }; + this.FileHandler = { + getFile: sinon.stub(), + getFileSize: sinon.stub(), + deleteFile: sinon.stub(), + insertFile: sinon.stub(), getDirectorySize: sinon.stub() - @LocalFileWriter = {} - @controller = SandboxedModule.require modulePath, requires: - "./LocalFileWriter":@LocalFileWriter - "./FileHandler": @FileHandler - "./PersistorManager":@PersistorManager - "./Errors": @Errors = - NotFoundError: sinon.stub() - "settings-sharelatex": @settings - "metrics-sharelatex": - inc:-> - "logger-sharelatex": - log:-> - err:-> - @project_id = "project_id" - @file_id = "file_id" - @bucket = "user_files" - @key = "#{@project_id}/#{@file_id}" - @req = - key:@key - bucket:@bucket - query:{} - params: - project_id:@project_id - file_id:@file_id + }; + this.LocalFileWriter = {}; + this.controller = SandboxedModule.require(modulePath, { requires: { + "./LocalFileWriter":this.LocalFileWriter, + "./FileHandler": this.FileHandler, + "./PersistorManager":this.PersistorManager, + "./Errors": (this.Errors = + {NotFoundError: sinon.stub()}), + "settings-sharelatex": this.settings, + "metrics-sharelatex": { + inc() {} + }, + "logger-sharelatex": { + log() {}, + err() {} + } + } + } + ); + this.project_id = "project_id"; + this.file_id = "file_id"; + this.bucket = "user_files"; + this.key = `${this.project_id}/${this.file_id}`; + this.req = { + key:this.key, + bucket:this.bucket, + query:{}, + params: { + project_id:this.project_id, + file_id:this.file_id + }, headers: {} - @res = - set: sinon.stub().returnsThis() + }; + this.res = { + set: sinon.stub().returnsThis(), status: sinon.stub().returnsThis() - @fileStream = {} + }; + return this.fileStream = {};}); - describe "getFile", -> + describe("getFile", function() { - it "should pipe the stream", (done)-> - @FileHandler.getFile.callsArgWith(3, null, @fileStream) - @fileStream.pipe = (res)=> - res.should.equal @res - done() - @controller.getFile @req, @res + it("should pipe the stream", function(done){ + this.FileHandler.getFile.callsArgWith(3, null, this.fileStream); + this.fileStream.pipe = res=> { + res.should.equal(this.res); + return done(); + }; + return this.controller.getFile(this.req, this.res); + }); - it "should send a 200 if the cacheWarm param is true", (done)-> - @req.query.cacheWarm = true - @FileHandler.getFile.callsArgWith(3, null, @fileStream) - @res.send = (statusCode)=> - statusCode.should.equal 200 - done() - @controller.getFile @req, @res + it("should send a 200 if the cacheWarm param is true", function(done){ + this.req.query.cacheWarm = true; + this.FileHandler.getFile.callsArgWith(3, null, this.fileStream); + this.res.send = statusCode=> { + statusCode.should.equal(200); + return done(); + }; + return this.controller.getFile(this.req, this.res); + }); - it "should send a 500 if there is a problem", (done)-> - @FileHandler.getFile.callsArgWith(3, "error") - @res.send = (code)=> - code.should.equal 500 - done() - @controller.getFile @req, @res + it("should send a 500 if there is a problem", function(done){ + this.FileHandler.getFile.callsArgWith(3, "error"); + this.res.send = code=> { + code.should.equal(500); + return done(); + }; + return this.controller.getFile(this.req, this.res); + }); - describe "with a 'Range' header set", -> + return describe("with a 'Range' header set", function() { - beforeEach -> - @req.headers.range = 'bytes=0-8' + beforeEach(function() { + return this.req.headers.range = 'bytes=0-8'; + }); - it "should pass 'start' and 'end' options to FileHandler", (done) -> - @FileHandler.getFile.callsArgWith(3, null, @fileStream) - @fileStream.pipe = (res)=> - expect(@FileHandler.getFile.lastCall.args[2].start).to.equal 0 - expect(@FileHandler.getFile.lastCall.args[2].end).to.equal 8 - done() - @controller.getFile @req, @res + return it("should pass 'start' and 'end' options to FileHandler", function(done) { + this.FileHandler.getFile.callsArgWith(3, null, this.fileStream); + this.fileStream.pipe = res=> { + expect(this.FileHandler.getFile.lastCall.args[2].start).to.equal(0); + expect(this.FileHandler.getFile.lastCall.args[2].end).to.equal(8); + return done(); + }; + return this.controller.getFile(this.req, this.res); + }); + }); + }); - describe "getFileHead", -> - it "should return the file size in a Content-Length header", (done) -> - expectedFileSize = 84921 - @FileHandler.getFileSize.yields( + describe("getFileHead", function() { + it("should return the file size in a Content-Length header", function(done) { + const expectedFileSize = 84921; + this.FileHandler.getFileSize.yields( new Error("FileHandler.getFileSize: unexpected arguments") - ) - @FileHandler.getFileSize.withArgs(@bucket, @key).yields(null, expectedFileSize) + ); + this.FileHandler.getFileSize.withArgs(this.bucket, this.key).yields(null, expectedFileSize); - @res.end = () => - expect(@res.status.lastCall.args[0]).to.equal(200) - expect(@res.set.calledWith("Content-Length", expectedFileSize)).to.equal(true) - done() + this.res.end = () => { + expect(this.res.status.lastCall.args[0]).to.equal(200); + expect(this.res.set.calledWith("Content-Length", expectedFileSize)).to.equal(true); + return done(); + }; - @controller.getFileHead(@req, @res) + return this.controller.getFileHead(this.req, this.res); + }); - it "should return a 404 is the file is not found", (done) -> - @FileHandler.getFileSize.yields(new @Errors.NotFoundError()) + it("should return a 404 is the file is not found", function(done) { + this.FileHandler.getFileSize.yields(new this.Errors.NotFoundError()); - @res.end = () => - expect(@res.status.lastCall.args[0]).to.equal(404) - done() + this.res.end = () => { + expect(this.res.status.lastCall.args[0]).to.equal(404); + return done(); + }; - @controller.getFileHead(@req, @res) + return this.controller.getFileHead(this.req, this.res); + }); - it "should return a 500 on internal errors", (done) -> - @FileHandler.getFileSize.yields(new Error()) + return it("should return a 500 on internal errors", function(done) { + this.FileHandler.getFileSize.yields(new Error()); - @res.end = () => - expect(@res.status.lastCall.args[0]).to.equal(500) - done() + this.res.end = () => { + expect(this.res.status.lastCall.args[0]).to.equal(500); + return done(); + }; - @controller.getFileHead(@req, @res) + return this.controller.getFileHead(this.req, this.res); + }); + }); - describe "insertFile", -> + describe("insertFile", () => it("should send bucket name key and res to PersistorManager", function(done){ + this.FileHandler.insertFile.callsArgWith(3); + this.res.send = () => { + this.FileHandler.insertFile.calledWith(this.bucket, this.key, this.req).should.equal(true); + return done(); + }; + return this.controller.insertFile(this.req, this.res); + })); - it "should send bucket name key and res to PersistorManager", (done)-> - @FileHandler.insertFile.callsArgWith(3) - @res.send = => - @FileHandler.insertFile.calledWith(@bucket, @key, @req).should.equal true - done() - @controller.insertFile @req, @res + describe("copyFile", function() { + beforeEach(function() { + this.oldFile_id = "old_file_id"; + this.oldProject_id = "old_project_id"; + return this.req.body = { + source: { + project_id: this.oldProject_id, + file_id: this.oldFile_id + } + }; + }); - describe "copyFile", -> - beforeEach -> - @oldFile_id = "old_file_id" - @oldProject_id = "old_project_id" - @req.body = - source: - project_id: @oldProject_id - file_id: @oldFile_id + it("should send bucket name and both keys to PersistorManager", function(done){ + this.PersistorManager.copyFile.callsArgWith(3); + this.res.send = code=> { + code.should.equal(200); + this.PersistorManager.copyFile.calledWith(this.bucket, `${this.oldProject_id}/${this.oldFile_id}`, this.key).should.equal(true); + return done(); + }; + return this.controller.copyFile(this.req, this.res); + }); - it "should send bucket name and both keys to PersistorManager", (done)-> - @PersistorManager.copyFile.callsArgWith(3) - @res.send = (code)=> - code.should.equal 200 - @PersistorManager.copyFile.calledWith(@bucket, "#{@oldProject_id}/#{@oldFile_id}", @key).should.equal true - done() - @controller.copyFile @req, @res + it("should send a 404 if the original file was not found", function(done) { + this.PersistorManager.copyFile.callsArgWith(3, new this.Errors.NotFoundError()); + this.res.send = code=> { + code.should.equal(404); + return done(); + }; + return this.controller.copyFile(this.req, this.res); + }); - it "should send a 404 if the original file was not found", (done) -> - @PersistorManager.copyFile.callsArgWith(3, new @Errors.NotFoundError()) - @res.send = (code)=> - code.should.equal 404 - done() - @controller.copyFile @req, @res + return it("should send a 500 if there was an error", function(done){ + this.PersistorManager.copyFile.callsArgWith(3, "error"); + this.res.send = code=> { + code.should.equal(500); + return done(); + }; + return this.controller.copyFile(this.req, this.res); + }); + }); - it "should send a 500 if there was an error", (done)-> - @PersistorManager.copyFile.callsArgWith(3, "error") - @res.send = (code)=> - code.should.equal 500 - done() - @controller.copyFile @req, @res + describe("delete file", function() { - describe "delete file", -> + it("should tell the file handler", function(done){ + this.FileHandler.deleteFile.callsArgWith(2); + this.res.send = code=> { + code.should.equal(204); + this.FileHandler.deleteFile.calledWith(this.bucket, this.key).should.equal(true); + return done(); + }; + return this.controller.deleteFile(this.req, this.res); + }); - it "should tell the file handler", (done)-> - @FileHandler.deleteFile.callsArgWith(2) - @res.send = (code)=> - code.should.equal 204 - @FileHandler.deleteFile.calledWith(@bucket, @key).should.equal true - done() - @controller.deleteFile @req, @res + return it("should send a 500 if there was an error", function(done){ + this.FileHandler.deleteFile.callsArgWith(2, "error"); + this.res.send = function(code){ + code.should.equal(500); + return done(); + }; + return this.controller.deleteFile(this.req, this.res); + }); + }); - it "should send a 500 if there was an error", (done)-> - @FileHandler.deleteFile.callsArgWith(2, "error") - @res.send = (code)-> - code.should.equal 500 - done() - @controller.deleteFile @req, @res + describe("_get_range", function() { - describe "_get_range", -> + it("should parse a valid Range header", function(done) { + const result = this.controller._get_range('bytes=0-200'); + expect(result).to.not.equal(null); + expect(result.start).to.equal(0); + expect(result.end).to.equal(200); + return done(); + }); - it "should parse a valid Range header", (done) -> - result = @controller._get_range('bytes=0-200') - expect(result).to.not.equal null - expect(result.start).to.equal 0 - expect(result.end).to.equal 200 - done() + it("should return null for an invalid Range header", function(done) { + const result = this.controller._get_range('wat'); + expect(result).to.equal(null); + return done(); + }); - it "should return null for an invalid Range header", (done) -> - result = @controller._get_range('wat') - expect(result).to.equal null - done() + return it("should return null for any type other than 'bytes'", function(done) { + const result = this.controller._get_range('carrots=0-200'); + expect(result).to.equal(null); + return done(); + }); + }); - it "should return null for any type other than 'bytes'", (done) -> - result = @controller._get_range('carrots=0-200') - expect(result).to.equal null - done() + return describe("directorySize", function() { - describe "directorySize", -> + it("should return total directory size bytes", function(done) { + this.FileHandler.getDirectorySize.callsArgWith(2, null, 1024); + return this.controller.directorySize(this.req, { json:result=> { + expect(result['total bytes']).to.equal(1024); + return done(); + } + } + ); + }); - it "should return total directory size bytes", (done) -> - @FileHandler.getDirectorySize.callsArgWith(2, null, 1024) - @controller.directorySize @req, json:(result)=> - expect(result['total bytes']).to.equal 1024 - done() - - it "should send a 500 if there was an error", (done)-> - @FileHandler.getDirectorySize.callsArgWith(2, "error") - @res.send = (code)-> - code.should.equal 500 - done() - @controller.directorySize @req, @res + return it("should send a 500 if there was an error", function(done){ + this.FileHandler.getDirectorySize.callsArgWith(2, "error"); + this.res.send = function(code){ + code.should.equal(500); + return done(); + }; + return this.controller.directorySize(this.req, this.res); + }); + }); +}); diff --git a/services/filestore/test/unit/coffee/FileConverterTests.js b/services/filestore/test/unit/coffee/FileConverterTests.js index ed59d56376..c546b61a15 100644 --- a/services/filestore/test/unit/coffee/FileConverterTests.js +++ b/services/filestore/test/unit/coffee/FileConverterTests.js @@ -1,81 +1,112 @@ -assert = require("chai").assert -sinon = require('sinon') -chai = require('chai') -should = chai.should() -expect = chai.expect -modulePath = "../../../app/js/FileConverter.js" -SandboxedModule = require('sandboxed-module') +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const { + assert +} = require("chai"); +const sinon = require('sinon'); +const chai = require('chai'); +const should = chai.should(); +const { + expect +} = chai; +const modulePath = "../../../app/js/FileConverter.js"; +const SandboxedModule = require('sandboxed-module'); -describe "FileConverter", -> +describe("FileConverter", function() { - beforeEach -> + beforeEach(function() { - @safe_exec = sinon.stub() - @converter = SandboxedModule.require modulePath, requires: - "./SafeExec": @safe_exec - "logger-sharelatex": - log:-> - err:-> - "metrics-sharelatex": - inc:-> - Timer:-> - done:-> - "settings-sharelatex": @Settings = - commands: + this.safe_exec = sinon.stub(); + this.converter = SandboxedModule.require(modulePath, { requires: { + "./SafeExec": this.safe_exec, + "logger-sharelatex": { + log() {}, + err() {} + }, + "metrics-sharelatex": { + inc() {}, + Timer() { + return {done() {}}; + } + }, + "settings-sharelatex": (this.Settings = { + commands: { convertCommandPrefix: [] + } + }) + } + }); - @sourcePath = "/this/path/here.eps" - @format = "png" - @error = "Error" + this.sourcePath = "/this/path/here.eps"; + this.format = "png"; + return this.error = "Error"; + }); - describe "convert", -> + describe("convert", function() { - it "should convert the source to the requested format", (done)-> - @safe_exec.callsArgWith(2) - @converter.convert @sourcePath, @format, (err)=> - args = @safe_exec.args[0][0] - args.indexOf("#{@sourcePath}[0]").should.not.equal -1 - args.indexOf("#{@sourcePath}.#{@format}").should.not.equal -1 - done() + it("should convert the source to the requested format", function(done){ + this.safe_exec.callsArgWith(2); + return this.converter.convert(this.sourcePath, this.format, err=> { + const args = this.safe_exec.args[0][0]; + args.indexOf(`${this.sourcePath}[0]`).should.not.equal(-1); + args.indexOf(`${this.sourcePath}.${this.format}`).should.not.equal(-1); + return done(); + }); + }); - it "should return the dest path", (done)-> - @safe_exec.callsArgWith(2) - @converter.convert @sourcePath, @format, (err, destPath)=> - destPath.should.equal "#{@sourcePath}.#{@format}" - done() + it("should return the dest path", function(done){ + this.safe_exec.callsArgWith(2); + return this.converter.convert(this.sourcePath, this.format, (err, destPath)=> { + destPath.should.equal(`${this.sourcePath}.${this.format}`); + return done(); + }); + }); - it "should return the error from convert", (done)-> - @safe_exec.callsArgWith(2, @error) - @converter.convert @sourcePath, @format, (err)=> - err.should.equal @error - done() + it("should return the error from convert", function(done){ + this.safe_exec.callsArgWith(2, this.error); + return this.converter.convert(this.sourcePath, this.format, err=> { + err.should.equal(this.error); + return done(); + }); + }); - it "should not accapt an non aproved format", (done)-> - @safe_exec.callsArgWith(2) - @converter.convert @sourcePath, "ahhhhh", (err)=> - expect(err).to.exist - done() + it("should not accapt an non aproved format", function(done){ + this.safe_exec.callsArgWith(2); + return this.converter.convert(this.sourcePath, "ahhhhh", err=> { + expect(err).to.exist; + return done(); + }); + }); - it "should prefix the command with Settings.commands.convertCommandPrefix", (done) -> - @safe_exec.callsArgWith(2) - @Settings.commands.convertCommandPrefix = ["nice"] - @converter.convert @sourcePath, @format, (err)=> - command = @safe_exec.args[0][0] - command[0].should.equal "nice" - done() + return it("should prefix the command with Settings.commands.convertCommandPrefix", function(done) { + this.safe_exec.callsArgWith(2); + this.Settings.commands.convertCommandPrefix = ["nice"]; + return this.converter.convert(this.sourcePath, this.format, err=> { + const command = this.safe_exec.args[0][0]; + command[0].should.equal("nice"); + return done(); + }); + }); + }); - describe "thumbnail", -> - it "should call converter resize with args", (done)-> - @safe_exec.callsArgWith(2) - @converter.thumbnail @sourcePath, (err)=> - args = @safe_exec.args[0][0] - args.indexOf("#{@sourcePath}[0]").should.not.equal -1 - done() + describe("thumbnail", () => it("should call converter resize with args", function(done){ + this.safe_exec.callsArgWith(2); + return this.converter.thumbnail(this.sourcePath, err=> { + const args = this.safe_exec.args[0][0]; + args.indexOf(`${this.sourcePath}[0]`).should.not.equal(-1); + return done(); + }); + })); - describe "preview", -> - it "should call converter resize with args", (done)-> - @safe_exec.callsArgWith(2) - @converter.preview @sourcePath, (err)=> - args = @safe_exec.args[0][0] - args.indexOf("#{@sourcePath}[0]").should.not.equal -1 - done() + return describe("preview", () => it("should call converter resize with args", function(done){ + this.safe_exec.callsArgWith(2); + return this.converter.preview(this.sourcePath, err=> { + const args = this.safe_exec.args[0][0]; + args.indexOf(`${this.sourcePath}[0]`).should.not.equal(-1); + return done(); + }); + })); +}); diff --git a/services/filestore/test/unit/coffee/FileHandlerTests.js b/services/filestore/test/unit/coffee/FileHandlerTests.js index 754366195e..f83561166f 100644 --- a/services/filestore/test/unit/coffee/FileHandlerTests.js +++ b/services/filestore/test/unit/coffee/FileHandlerTests.js @@ -1,208 +1,275 @@ -assert = require("chai").assert -sinon = require('sinon') -chai = require('chai') -should = chai.should() -expect = chai.expect -modulePath = "../../../app/js/FileHandler.js" -SandboxedModule = require('sandboxed-module') +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const { + assert +} = require("chai"); +const sinon = require('sinon'); +const chai = require('chai'); +const should = chai.should(); +const { + expect +} = chai; +const modulePath = "../../../app/js/FileHandler.js"; +const SandboxedModule = require('sandboxed-module'); -describe "FileHandler", -> +describe("FileHandler", function() { - beforeEach -> - @settings = - s3: - buckets: + beforeEach(function() { + this.settings = { + s3: { + buckets: { user_files:"user_files" - @PersistorManager = - getFileStream: sinon.stub() - checkIfFileExists: sinon.stub() - deleteFile: sinon.stub() - deleteDirectory: sinon.stub() - sendStream: sinon.stub() - insertFile: sinon.stub() - directorySize: sinon.stub() - @LocalFileWriter = - writeStream: sinon.stub() - getStream: sinon.stub() - deleteFile: sinon.stub() - @FileConverter = - convert: sinon.stub() - thumbnail: sinon.stub() - preview: sinon.stub() - @keyBuilder = - addCachingToKey: sinon.stub() - getConvertedFolderKey: sinon.stub() - @ImageOptimiser = - compressPng: sinon.stub() - @handler = SandboxedModule.require modulePath, requires: - "settings-sharelatex": @settings - "./PersistorManager":@PersistorManager - "./LocalFileWriter":@LocalFileWriter - "./FileConverter":@FileConverter - "./KeyBuilder": @keyBuilder - "./ImageOptimiser":@ImageOptimiser - "logger-sharelatex": - log:-> - err:-> - @bucket = "my_bucket" - @key = "key/here" - @stubbedPath = "/var/somewhere/path" - @format = "png" - @formattedStubbedPath = "#{@stubbedPath}.#{@format}" - - describe "insertFile", -> - beforeEach -> - @stream = {} - @PersistorManager.deleteDirectory.callsArgWith(2) - @PersistorManager.sendStream.callsArgWith(3) - - it "should send file to the filestore", (done)-> - @handler.insertFile @bucket, @key, @stream, => - @PersistorManager.sendStream.calledWith(@bucket, @key, @stream).should.equal true - done() - - it "should delete the convetedKey folder", (done)-> - @keyBuilder.getConvertedFolderKey.returns(@stubbedConvetedKey) - @handler.insertFile @bucket, @key, @stream, => - @PersistorManager.deleteDirectory.calledWith(@bucket, @stubbedConvetedKey).should.equal true - done() - - describe "deleteFile", -> - beforeEach -> - @keyBuilder.getConvertedFolderKey.returns(@stubbedConvetedKey) - @PersistorManager.deleteFile.callsArgWith(2) - @PersistorManager.deleteDirectory.callsArgWith(2) - - it "should tell the filestore manager to delete the file", (done)-> - @handler.deleteFile @bucket, @key, => - @PersistorManager.deleteFile.calledWith(@bucket, @key).should.equal true - done() - - it "should tell the filestore manager to delete the cached foler", (done)-> - @handler.deleteFile @bucket, @key, => - @PersistorManager.deleteDirectory.calledWith(@bucket, @stubbedConvetedKey).should.equal true - done() - - describe "getFile", -> - beforeEach -> - @handler._getStandardFile = sinon.stub().callsArgWith(3) - @handler._getConvertedFile = sinon.stub().callsArgWith(3) - - it "should call _getStandardFile if no format or style are defined", (done)-> - - @handler.getFile @bucket, @key, null, => - @handler._getStandardFile.called.should.equal true - @handler._getConvertedFile.called.should.equal false - done() - - it "should pass options to _getStandardFile", (done) -> - options = {start: 0, end: 8} - @handler.getFile @bucket, @key, options, => - expect(@handler._getStandardFile.lastCall.args[2].start).to.equal 0 - expect(@handler._getStandardFile.lastCall.args[2].end).to.equal 8 - done() - - it "should call _getConvertedFile if a format is defined", (done)-> - @handler.getFile @bucket, @key, format:"png", => - @handler._getStandardFile.called.should.equal false - @handler._getConvertedFile.called.should.equal true - done() - - describe "_getStandardFile", -> - - beforeEach -> - @fileStream = {on:->} - @PersistorManager.getFileStream.callsArgWith(3, "err", @fileStream) - - it "should get the stream", (done)-> - @handler.getFile @bucket, @key, null, => - @PersistorManager.getFileStream.calledWith(@bucket, @key).should.equal true - done() - - it "should return the stream and error", (done)-> - @handler.getFile @bucket, @key, null, (err, stream)=> - err.should.equal "err" - stream.should.equal @fileStream - done() - - it "should pass options to PersistorManager", (done) -> - @handler.getFile @bucket, @key, {start: 0, end: 8}, => - expect(@PersistorManager.getFileStream.lastCall.args[2].start).to.equal 0 - expect(@PersistorManager.getFileStream.lastCall.args[2].end).to.equal 8 - done() - - - describe "_getConvertedFile", -> - - it "should getFileStream if it does exists", (done)-> - @PersistorManager.checkIfFileExists.callsArgWith(2, null, true) - @PersistorManager.getFileStream.callsArgWith(3) - @handler._getConvertedFile @bucket, @key, {}, => - @PersistorManager.getFileStream.calledWith(@bucket).should.equal true - done() - - it "should call _getConvertedFileAndCache if it does exists", (done)-> - @PersistorManager.checkIfFileExists.callsArgWith(2, null, false) - @handler._getConvertedFileAndCache = sinon.stub().callsArgWith(4) - @handler._getConvertedFile @bucket, @key, {}, => - @handler._getConvertedFileAndCache.calledWith(@bucket, @key).should.equal true - done() - - describe "_getConvertedFileAndCache", -> - - it "should _convertFile ", (done)-> - @stubbedStream = {"something":"here"} - @localStream = { - on: -> + } } - @PersistorManager.sendFile = sinon.stub().callsArgWith(3) - @LocalFileWriter.getStream = sinon.stub().callsArgWith(1, null, @localStream) - @convetedKey = @key+"converted" - @handler._convertFile = sinon.stub().callsArgWith(3, null, @stubbedPath) - @ImageOptimiser.compressPng = sinon.stub().callsArgWith(1) - @handler._getConvertedFileAndCache @bucket, @key, @convetedKey, {}, (err, fsStream)=> - @handler._convertFile.called.should.equal true - @PersistorManager.sendFile.calledWith(@bucket, @convetedKey, @stubbedPath).should.equal true - @ImageOptimiser.compressPng.calledWith(@stubbedPath).should.equal true - @LocalFileWriter.getStream.calledWith(@stubbedPath).should.equal true - fsStream.should.equal @localStream - done() + }; + this.PersistorManager = { + getFileStream: sinon.stub(), + checkIfFileExists: sinon.stub(), + deleteFile: sinon.stub(), + deleteDirectory: sinon.stub(), + sendStream: sinon.stub(), + insertFile: sinon.stub(), + directorySize: sinon.stub() + }; + this.LocalFileWriter = { + writeStream: sinon.stub(), + getStream: sinon.stub(), + deleteFile: sinon.stub() + }; + this.FileConverter = { + convert: sinon.stub(), + thumbnail: sinon.stub(), + preview: sinon.stub() + }; + this.keyBuilder = { + addCachingToKey: sinon.stub(), + getConvertedFolderKey: sinon.stub() + }; + this.ImageOptimiser = + {compressPng: sinon.stub()}; + this.handler = SandboxedModule.require(modulePath, { requires: { + "settings-sharelatex": this.settings, + "./PersistorManager":this.PersistorManager, + "./LocalFileWriter":this.LocalFileWriter, + "./FileConverter":this.FileConverter, + "./KeyBuilder": this.keyBuilder, + "./ImageOptimiser":this.ImageOptimiser, + "logger-sharelatex": { + log() {}, + err() {} + } + } + } + ); + this.bucket = "my_bucket"; + this.key = "key/here"; + this.stubbedPath = "/var/somewhere/path"; + this.format = "png"; + return this.formattedStubbedPath = `${this.stubbedPath}.${this.format}`; + }); - describe "_convertFile", -> - beforeEach -> - @FileConverter.convert.callsArgWith(2, null, @formattedStubbedPath) - @FileConverter.thumbnail.callsArgWith(1, null, @formattedStubbedPath) - @FileConverter.preview.callsArgWith(1, null, @formattedStubbedPath) - @handler._writeS3FileToDisk = sinon.stub().callsArgWith(3, null, @stubbedPath) - @LocalFileWriter.deleteFile.callsArgWith(1) + describe("insertFile", function() { + beforeEach(function() { + this.stream = {}; + this.PersistorManager.deleteDirectory.callsArgWith(2); + return this.PersistorManager.sendStream.callsArgWith(3); + }); - it "should call thumbnail on the writer path if style was thumbnail was specified", (done)-> - @handler._convertFile @bucket, @key, style:"thumbnail", (err, path)=> - path.should.equal @formattedStubbedPath - @FileConverter.thumbnail.calledWith(@stubbedPath).should.equal true - @LocalFileWriter.deleteFile.calledWith(@stubbedPath).should.equal true - done() + it("should send file to the filestore", function(done){ + return this.handler.insertFile(this.bucket, this.key, this.stream, () => { + this.PersistorManager.sendStream.calledWith(this.bucket, this.key, this.stream).should.equal(true); + return done(); + }); + }); - it "should call preview on the writer path if style was preview was specified", (done)-> - @handler._convertFile @bucket, @key, style:"preview", (err, path)=> - path.should.equal @formattedStubbedPath - @FileConverter.preview.calledWith(@stubbedPath).should.equal true - @LocalFileWriter.deleteFile.calledWith(@stubbedPath).should.equal true - done() + return it("should delete the convetedKey folder", function(done){ + this.keyBuilder.getConvertedFolderKey.returns(this.stubbedConvetedKey); + return this.handler.insertFile(this.bucket, this.key, this.stream, () => { + this.PersistorManager.deleteDirectory.calledWith(this.bucket, this.stubbedConvetedKey).should.equal(true); + return done(); + }); + }); + }); - it "should call convert on the writer path if a format was specified", (done)-> - @handler._convertFile @bucket, @key, format:@format, (err, path)=> - path.should.equal @formattedStubbedPath - @FileConverter.convert.calledWith(@stubbedPath, @format).should.equal true - @LocalFileWriter.deleteFile.calledWith(@stubbedPath).should.equal true - done() + describe("deleteFile", function() { + beforeEach(function() { + this.keyBuilder.getConvertedFolderKey.returns(this.stubbedConvetedKey); + this.PersistorManager.deleteFile.callsArgWith(2); + return this.PersistorManager.deleteDirectory.callsArgWith(2); + }); - describe "getDirectorySize", -> + it("should tell the filestore manager to delete the file", function(done){ + return this.handler.deleteFile(this.bucket, this.key, () => { + this.PersistorManager.deleteFile.calledWith(this.bucket, this.key).should.equal(true); + return done(); + }); + }); - beforeEach -> - @PersistorManager.directorySize.callsArgWith(2) + return it("should tell the filestore manager to delete the cached foler", function(done){ + return this.handler.deleteFile(this.bucket, this.key, () => { + this.PersistorManager.deleteDirectory.calledWith(this.bucket, this.stubbedConvetedKey).should.equal(true); + return done(); + }); + }); + }); - it "should call the filestore manager to get directory size", (done)-> - @handler.getDirectorySize @bucket, @key, => - @PersistorManager.directorySize.calledWith(@bucket, @key).should.equal true - done() + describe("getFile", function() { + beforeEach(function() { + this.handler._getStandardFile = sinon.stub().callsArgWith(3); + return this.handler._getConvertedFile = sinon.stub().callsArgWith(3); + }); + + it("should call _getStandardFile if no format or style are defined", function(done){ + + return this.handler.getFile(this.bucket, this.key, null, () => { + this.handler._getStandardFile.called.should.equal(true); + this.handler._getConvertedFile.called.should.equal(false); + return done(); + }); + }); + + it("should pass options to _getStandardFile", function(done) { + const options = {start: 0, end: 8}; + return this.handler.getFile(this.bucket, this.key, options, () => { + expect(this.handler._getStandardFile.lastCall.args[2].start).to.equal(0); + expect(this.handler._getStandardFile.lastCall.args[2].end).to.equal(8); + return done(); + }); + }); + + return it("should call _getConvertedFile if a format is defined", function(done){ + return this.handler.getFile(this.bucket, this.key, {format:"png"}, () => { + this.handler._getStandardFile.called.should.equal(false); + this.handler._getConvertedFile.called.should.equal(true); + return done(); + }); + }); + }); + + describe("_getStandardFile", function() { + + beforeEach(function() { + this.fileStream = {on() {}}; + return this.PersistorManager.getFileStream.callsArgWith(3, "err", this.fileStream); + }); + + it("should get the stream", function(done){ + return this.handler.getFile(this.bucket, this.key, null, () => { + this.PersistorManager.getFileStream.calledWith(this.bucket, this.key).should.equal(true); + return done(); + }); + }); + + it("should return the stream and error", function(done){ + return this.handler.getFile(this.bucket, this.key, null, (err, stream)=> { + err.should.equal("err"); + stream.should.equal(this.fileStream); + return done(); + }); + }); + + return it("should pass options to PersistorManager", function(done) { + return this.handler.getFile(this.bucket, this.key, {start: 0, end: 8}, () => { + expect(this.PersistorManager.getFileStream.lastCall.args[2].start).to.equal(0); + expect(this.PersistorManager.getFileStream.lastCall.args[2].end).to.equal(8); + return done(); + }); + }); + }); + + + describe("_getConvertedFile", function() { + + it("should getFileStream if it does exists", function(done){ + this.PersistorManager.checkIfFileExists.callsArgWith(2, null, true); + this.PersistorManager.getFileStream.callsArgWith(3); + return this.handler._getConvertedFile(this.bucket, this.key, {}, () => { + this.PersistorManager.getFileStream.calledWith(this.bucket).should.equal(true); + return done(); + }); + }); + + return it("should call _getConvertedFileAndCache if it does exists", function(done){ + this.PersistorManager.checkIfFileExists.callsArgWith(2, null, false); + this.handler._getConvertedFileAndCache = sinon.stub().callsArgWith(4); + return this.handler._getConvertedFile(this.bucket, this.key, {}, () => { + this.handler._getConvertedFileAndCache.calledWith(this.bucket, this.key).should.equal(true); + return done(); + }); + }); + }); + + describe("_getConvertedFileAndCache", () => it("should _convertFile ", function(done){ + this.stubbedStream = {"something":"here"}; + this.localStream = { + on() {} + }; + this.PersistorManager.sendFile = sinon.stub().callsArgWith(3); + this.LocalFileWriter.getStream = sinon.stub().callsArgWith(1, null, this.localStream); + this.convetedKey = this.key+"converted"; + this.handler._convertFile = sinon.stub().callsArgWith(3, null, this.stubbedPath); + this.ImageOptimiser.compressPng = sinon.stub().callsArgWith(1); + return this.handler._getConvertedFileAndCache(this.bucket, this.key, this.convetedKey, {}, (err, fsStream)=> { + this.handler._convertFile.called.should.equal(true); + this.PersistorManager.sendFile.calledWith(this.bucket, this.convetedKey, this.stubbedPath).should.equal(true); + this.ImageOptimiser.compressPng.calledWith(this.stubbedPath).should.equal(true); + this.LocalFileWriter.getStream.calledWith(this.stubbedPath).should.equal(true); + fsStream.should.equal(this.localStream); + return done(); + }); + })); + + describe("_convertFile", function() { + beforeEach(function() { + this.FileConverter.convert.callsArgWith(2, null, this.formattedStubbedPath); + this.FileConverter.thumbnail.callsArgWith(1, null, this.formattedStubbedPath); + this.FileConverter.preview.callsArgWith(1, null, this.formattedStubbedPath); + this.handler._writeS3FileToDisk = sinon.stub().callsArgWith(3, null, this.stubbedPath); + return this.LocalFileWriter.deleteFile.callsArgWith(1); + }); + + it("should call thumbnail on the writer path if style was thumbnail was specified", function(done){ + return this.handler._convertFile(this.bucket, this.key, {style:"thumbnail"}, (err, path)=> { + path.should.equal(this.formattedStubbedPath); + this.FileConverter.thumbnail.calledWith(this.stubbedPath).should.equal(true); + this.LocalFileWriter.deleteFile.calledWith(this.stubbedPath).should.equal(true); + return done(); + }); + }); + + it("should call preview on the writer path if style was preview was specified", function(done){ + return this.handler._convertFile(this.bucket, this.key, {style:"preview"}, (err, path)=> { + path.should.equal(this.formattedStubbedPath); + this.FileConverter.preview.calledWith(this.stubbedPath).should.equal(true); + this.LocalFileWriter.deleteFile.calledWith(this.stubbedPath).should.equal(true); + return done(); + }); + }); + + return it("should call convert on the writer path if a format was specified", function(done){ + return this.handler._convertFile(this.bucket, this.key, {format:this.format}, (err, path)=> { + path.should.equal(this.formattedStubbedPath); + this.FileConverter.convert.calledWith(this.stubbedPath, this.format).should.equal(true); + this.LocalFileWriter.deleteFile.calledWith(this.stubbedPath).should.equal(true); + return done(); + }); + }); + }); + + return describe("getDirectorySize", function() { + + beforeEach(function() { + return this.PersistorManager.directorySize.callsArgWith(2); + }); + + return it("should call the filestore manager to get directory size", function(done){ + return this.handler.getDirectorySize(this.bucket, this.key, () => { + this.PersistorManager.directorySize.calledWith(this.bucket, this.key).should.equal(true); + return done(); + }); + }); + }); +}); diff --git a/services/filestore/test/unit/coffee/ImageOptimiserTests.js b/services/filestore/test/unit/coffee/ImageOptimiserTests.js index 398eacf70f..6074120a56 100644 --- a/services/filestore/test/unit/coffee/ImageOptimiserTests.js +++ b/services/filestore/test/unit/coffee/ImageOptimiserTests.js @@ -1,64 +1,84 @@ -assert = require("chai").assert -sinon = require('sinon') -chai = require('chai') -should = chai.should() -expect = chai.expect -modulePath = "../../../app/js/ImageOptimiser.js" -SandboxedModule = require('sandboxed-module') +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const { + assert +} = require("chai"); +const sinon = require('sinon'); +const chai = require('chai'); +const should = chai.should(); +const { + expect +} = chai; +const modulePath = "../../../app/js/ImageOptimiser.js"; +const SandboxedModule = require('sandboxed-module'); -describe "ImageOptimiser", -> +describe("ImageOptimiser", function() { - beforeEach -> - @child_process = - exec : sinon.stub() - @settings = - enableConversions:true - @optimiser = SandboxedModule.require modulePath, requires: - 'child_process': @child_process - "logger-sharelatex": - log:-> - err:-> - warn:-> - "settings-sharelatex": @settings + beforeEach(function() { + this.child_process = + {exec : sinon.stub()}; + this.settings = + {enableConversions:true}; + this.optimiser = SandboxedModule.require(modulePath, { requires: { + 'child_process': this.child_process, + "logger-sharelatex": { + log() {}, + err() {}, + warn() {} + }, + "settings-sharelatex": this.settings + } + } + ); - @sourcePath = "/this/path/here.eps" - @error = "Error" + this.sourcePath = "/this/path/here.eps"; + return this.error = "Error"; + }); - describe "compressPng", -> + describe("compressPng", function() { - it "convert the file", (done)-> - @child_process.exec.callsArgWith(2) - @optimiser.compressPng @sourcePath, (err)=> - args = @child_process.exec.args[0][0] - args.should.equal "optipng #{@sourcePath}" - done() + it("convert the file", function(done){ + this.child_process.exec.callsArgWith(2); + return this.optimiser.compressPng(this.sourcePath, err=> { + const args = this.child_process.exec.args[0][0]; + args.should.equal(`optipng ${this.sourcePath}`); + return done(); + }); + }); - it "should return the error", (done)-> - @child_process.exec.callsArgWith(2, @error) - @optimiser.compressPng @sourcePath, (err)=> - err.should.equal @error - done() + return it("should return the error", function(done){ + this.child_process.exec.callsArgWith(2, this.error); + return this.optimiser.compressPng(this.sourcePath, err=> { + err.should.equal(this.error); + return done(); + }); + }); + }); - describe 'when enableConversions is disabled', -> - - it 'should produce an error', (done) -> - @settings.enableConversions = false - @child_process.exec.callsArgWith(2) - @optimiser.compressPng @sourcePath, (err)=> - @child_process.exec.called.should.equal false - expect(err).to.exist - done() + describe('when enableConversions is disabled', () => it('should produce an error', function(done) { + this.settings.enableConversions = false; + this.child_process.exec.callsArgWith(2); + return this.optimiser.compressPng(this.sourcePath, err=> { + this.child_process.exec.called.should.equal(false); + expect(err).to.exist; + return done(); + }); + })); - describe 'when optimiser is sigkilled', -> - - it 'should not produce an error', (done) -> - @error = new Error('woops') - @error.signal = 'SIGKILL' - @child_process.exec.callsArgWith(2, @error) - @optimiser.compressPng @sourcePath, (err)=> - expect(err).to.equal(null) - done() + return describe('when optimiser is sigkilled', () => it('should not produce an error', function(done) { + this.error = new Error('woops'); + this.error.signal = 'SIGKILL'; + this.child_process.exec.callsArgWith(2, this.error); + return this.optimiser.compressPng(this.sourcePath, err=> { + expect(err).to.equal(null); + return done(); + }); + })); +}); diff --git a/services/filestore/test/unit/coffee/KeybuilderTests.js b/services/filestore/test/unit/coffee/KeybuilderTests.js index 3ab2dd037a..063a278f23 100644 --- a/services/filestore/test/unit/coffee/KeybuilderTests.js +++ b/services/filestore/test/unit/coffee/KeybuilderTests.js @@ -1,39 +1,59 @@ +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ -assert = require("chai").assert -sinon = require('sinon') -chai = require('chai') -should = chai.should() -expect = chai.expect -modulePath = "../../../app/js/KeyBuilder.js" -SandboxedModule = require('sandboxed-module') +const { + assert +} = require("chai"); +const sinon = require('sinon'); +const chai = require('chai'); +const should = chai.should(); +const { + expect +} = chai; +const modulePath = "../../../app/js/KeyBuilder.js"; +const SandboxedModule = require('sandboxed-module'); -describe "LocalFileWriter", -> +describe("LocalFileWriter", function() { - beforeEach -> + beforeEach(function() { - @keyBuilder = SandboxedModule.require modulePath, requires: - "logger-sharelatex": - log:-> - err:-> - @key = "123/456" + this.keyBuilder = SandboxedModule.require(modulePath, { requires: { + "logger-sharelatex": { + log() {}, + err() {} + } + } + } + ); + return this.key = "123/456"; + }); - describe "cachedKey", -> + return describe("cachedKey", function() { - it "should add the fomat on", -> - opts = + it("should add the fomat on", function() { + const opts = + {format: "png"}; + const newKey = this.keyBuilder.addCachingToKey(this.key, opts); + return newKey.should.equal(`${this.key}-converted-cache/format-png`); + }); + + it("should add the style on", function() { + const opts = + {style: "thumbnail"}; + const newKey = this.keyBuilder.addCachingToKey(this.key, opts); + return newKey.should.equal(`${this.key}-converted-cache/style-thumbnail`); + }); + + return it("should add format on first", function() { + const opts = { + style: "thumbnail", format: "png" - newKey = @keyBuilder.addCachingToKey @key, opts - newKey.should.equal "#{@key}-converted-cache/format-png" - - it "should add the style on", -> - opts = - style: "thumbnail" - newKey = @keyBuilder.addCachingToKey @key, opts - newKey.should.equal "#{@key}-converted-cache/style-thumbnail" - - it "should add format on first", -> - opts = - style: "thumbnail" - format: "png" - newKey = @keyBuilder.addCachingToKey @key, opts - newKey.should.equal "#{@key}-converted-cache/format-png-style-thumbnail" + }; + const newKey = this.keyBuilder.addCachingToKey(this.key, opts); + return newKey.should.equal(`${this.key}-converted-cache/format-png-style-thumbnail`); + }); + }); +}); diff --git a/services/filestore/test/unit/coffee/LocalFileWriterTests.js b/services/filestore/test/unit/coffee/LocalFileWriterTests.js index 55cf1f551f..ed3eb2dfa3 100644 --- a/services/filestore/test/unit/coffee/LocalFileWriterTests.js +++ b/services/filestore/test/unit/coffee/LocalFileWriterTests.js @@ -1,83 +1,121 @@ +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ -assert = require("chai").assert -sinon = require('sinon') -chai = require('chai') -should = chai.should() -expect = chai.expect -modulePath = "../../../app/js/LocalFileWriter.js" -SandboxedModule = require('sandboxed-module') +const { + assert +} = require("chai"); +const sinon = require('sinon'); +const chai = require('chai'); +const should = chai.should(); +const { + expect +} = chai; +const modulePath = "../../../app/js/LocalFileWriter.js"; +const SandboxedModule = require('sandboxed-module'); -describe "LocalFileWriter", -> +describe("LocalFileWriter", function() { - beforeEach -> + beforeEach(function() { - @writeStream = - on: (type, cb)-> - if type == "finish" - cb() - @readStream = - on: -> - @fs = - createWriteStream : sinon.stub().returns(@writeStream) - createReadStream: sinon.stub().returns(@readStream) + this.writeStream = { + on(type, cb){ + if (type === "finish") { + return cb(); + } + } + }; + this.readStream = + {on() {}}; + this.fs = { + createWriteStream : sinon.stub().returns(this.writeStream), + createReadStream: sinon.stub().returns(this.readStream), unlink: sinon.stub() - @settings = - path: + }; + this.settings = { + path: { uploadFolder:"somewhere" - @writer = SandboxedModule.require modulePath, requires: - "fs": @fs - "logger-sharelatex": - log:-> - err:-> - "settings-sharelatex":@settings - "metrics-sharelatex": - inc:-> - Timer:-> - done:-> + } + }; + this.writer = SandboxedModule.require(modulePath, { requires: { + "fs": this.fs, + "logger-sharelatex": { + log() {}, + err() {} + }, + "settings-sharelatex":this.settings, + "metrics-sharelatex": { + inc() {}, + Timer() { + return {done() {}}; + } + } + } + } + ); - @stubbedFsPath = "something/uploads/eio2k1j3" + return this.stubbedFsPath = "something/uploads/eio2k1j3"; + }); - describe "writeStrem", -> - beforeEach -> - @writer._getPath = sinon.stub().returns(@stubbedFsPath) + describe("writeStrem", function() { + beforeEach(function() { + return this.writer._getPath = sinon.stub().returns(this.stubbedFsPath); + }); - it "write the stream to ./uploads", (done)-> - stream = - pipe: (dest)=> - dest.should.equal @writeStream - done() - on: -> - @writer.writeStream stream, null, ()=> + it("write the stream to ./uploads", function(done){ + const stream = { + pipe: dest=> { + dest.should.equal(this.writeStream); + return done(); + }, + on() {} + }; + return this.writer.writeStream(stream, null, ()=> {}); + }); - it "should send the path in the callback", (done)-> - stream = - pipe: (dest)=> - on: (type, cb)-> - if type == "end" - cb() - @writer.writeStream stream, null, (err, fsPath)=> - fsPath.should.equal @stubbedFsPath - done() + return it("should send the path in the callback", function(done){ + const stream = { + pipe: dest=> {}, + on(type, cb){ + if (type === "end") { + return cb(); + } + } + }; + return this.writer.writeStream(stream, null, (err, fsPath)=> { + fsPath.should.equal(this.stubbedFsPath); + return done(); + }); + }); + }); - describe "getStream", -> + describe("getStream", function() { - it "should read the stream from the file ", (done)-> - @writer.getStream @stubbedFsPath, (err, stream)=> - @fs.createReadStream.calledWith(@stubbedFsPath).should.equal true - done() + it("should read the stream from the file ", function(done){ + return this.writer.getStream(this.stubbedFsPath, (err, stream)=> { + this.fs.createReadStream.calledWith(this.stubbedFsPath).should.equal(true); + return done(); + }); + }); - it "should send the stream in the callback", (done)-> - @writer.getStream @stubbedFsPath, (err, readStream)=> - readStream.should.equal @readStream - done() + return it("should send the stream in the callback", function(done){ + return this.writer.getStream(this.stubbedFsPath, (err, readStream)=> { + readStream.should.equal(this.readStream); + return done(); + }); + }); + }); - describe "delete file", -> - - it "should unlink the file", (done)-> - error = "my error" - @fs.unlink.callsArgWith(1, error) - @writer.deleteFile @stubbedFsPath, (err)=> - @fs.unlink.calledWith(@stubbedFsPath).should.equal true - err.should.equal error - done() + return describe("delete file", () => it("should unlink the file", function(done){ + const error = "my error"; + this.fs.unlink.callsArgWith(1, error); + return this.writer.deleteFile(this.stubbedFsPath, err=> { + this.fs.unlink.calledWith(this.stubbedFsPath).should.equal(true); + err.should.equal(error); + return done(); + }); + })); +}); diff --git a/services/filestore/test/unit/coffee/PersistorManagerTests.js b/services/filestore/test/unit/coffee/PersistorManagerTests.js index 3c4ca329e1..d5b859448a 100644 --- a/services/filestore/test/unit/coffee/PersistorManagerTests.js +++ b/services/filestore/test/unit/coffee/PersistorManagerTests.js @@ -1,101 +1,131 @@ -logger = require("logger-sharelatex") -assert = require("chai").assert -sinon = require('sinon') -chai = require('chai') -should = chai.should() -expect = chai.expect -modulePath = "../../../app/js/PersistorManager.js" -SandboxedModule = require('sandboxed-module') +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const logger = require("logger-sharelatex"); +const { + assert +} = require("chai"); +const sinon = require('sinon'); +const chai = require('chai'); +const should = chai.should(); +const { + expect +} = chai; +const modulePath = "../../../app/js/PersistorManager.js"; +const SandboxedModule = require('sandboxed-module'); -describe "PersistorManagerTests", -> +describe("PersistorManagerTests", function() { - beforeEach -> - @S3PersistorManager = - getFileStream: sinon.stub() - checkIfFileExists: sinon.stub() - deleteFile: sinon.stub() - deleteDirectory: sinon.stub() - sendStream: sinon.stub() + beforeEach(function() { + return this.S3PersistorManager = { + getFileStream: sinon.stub(), + checkIfFileExists: sinon.stub(), + deleteFile: sinon.stub(), + deleteDirectory: sinon.stub(), + sendStream: sinon.stub(), insertFile: sinon.stub() + }; + }); - describe "test s3 mixin", -> - beforeEach -> - @settings = - filestore: + describe("test s3 mixin", function() { + beforeEach(function() { + this.settings = { + filestore: { backend: "s3" - @requires = - "./S3PersistorManager": @S3PersistorManager - "settings-sharelatex": @settings - "logger-sharelatex": - log:-> - err:-> - @PersistorManager = SandboxedModule.require modulePath, requires: @requires + } + }; + this.requires = { + "./S3PersistorManager": this.S3PersistorManager, + "settings-sharelatex": this.settings, + "logger-sharelatex": { + log() {}, + err() {} + } + }; + return this.PersistorManager = SandboxedModule.require(modulePath, {requires: this.requires}); + }); - it "should load getFileStream", (done) -> - @PersistorManager.should.respondTo("getFileStream") - @PersistorManager.getFileStream() - @S3PersistorManager.getFileStream.calledOnce.should.equal true - done() + it("should load getFileStream", function(done) { + this.PersistorManager.should.respondTo("getFileStream"); + this.PersistorManager.getFileStream(); + this.S3PersistorManager.getFileStream.calledOnce.should.equal(true); + return done(); + }); - it "should load checkIfFileExists", (done) -> - @PersistorManager.checkIfFileExists() - @S3PersistorManager.checkIfFileExists.calledOnce.should.equal true - done() + it("should load checkIfFileExists", function(done) { + this.PersistorManager.checkIfFileExists(); + this.S3PersistorManager.checkIfFileExists.calledOnce.should.equal(true); + return done(); + }); - it "should load deleteFile", (done) -> - @PersistorManager.deleteFile() - @S3PersistorManager.deleteFile.calledOnce.should.equal true - done() + it("should load deleteFile", function(done) { + this.PersistorManager.deleteFile(); + this.S3PersistorManager.deleteFile.calledOnce.should.equal(true); + return done(); + }); - it "should load deleteDirectory", (done) -> - @PersistorManager.deleteDirectory() - @S3PersistorManager.deleteDirectory.calledOnce.should.equal true - done() + it("should load deleteDirectory", function(done) { + this.PersistorManager.deleteDirectory(); + this.S3PersistorManager.deleteDirectory.calledOnce.should.equal(true); + return done(); + }); - it "should load sendStream", (done) -> - @PersistorManager.sendStream() - @S3PersistorManager.sendStream.calledOnce.should.equal true - done() + it("should load sendStream", function(done) { + this.PersistorManager.sendStream(); + this.S3PersistorManager.sendStream.calledOnce.should.equal(true); + return done(); + }); - it "should load insertFile", (done) -> - @PersistorManager.insertFile() - @S3PersistorManager.insertFile.calledOnce.should.equal true - done() + return it("should load insertFile", function(done) { + this.PersistorManager.insertFile(); + this.S3PersistorManager.insertFile.calledOnce.should.equal(true); + return done(); + }); + }); - describe "test unspecified mixins", -> + describe("test unspecified mixins", () => it("should load s3 when no wrapper specified", function(done) { + this.settings = {filestore:{}}; + this.requires = { + "./S3PersistorManager": this.S3PersistorManager, + "settings-sharelatex": this.settings, + "logger-sharelatex": { + log() {}, + err() {} + } + }; + this.PersistorManager = SandboxedModule.require(modulePath, {requires: this.requires}); + this.PersistorManager.should.respondTo("getFileStream"); + this.PersistorManager.getFileStream(); + this.S3PersistorManager.getFileStream.calledOnce.should.equal(true); + return done(); + })); - it "should load s3 when no wrapper specified", (done) -> - @settings = {filestore:{}} - @requires = - "./S3PersistorManager": @S3PersistorManager - "settings-sharelatex": @settings - "logger-sharelatex": - log:-> - err:-> - @PersistorManager = SandboxedModule.require modulePath, requires: @requires - @PersistorManager.should.respondTo("getFileStream") - @PersistorManager.getFileStream() - @S3PersistorManager.getFileStream.calledOnce.should.equal true - done() - - describe "test invalid mixins", -> - it "should not load an invalid wrapper", (done) -> - @settings = - filestore: - backend:"magic" - @requires = - "./S3PersistorManager": @S3PersistorManager - "settings-sharelatex": @settings - "logger-sharelatex": - log:-> - err:-> - @fsWrapper=null - try - @PersistorManager=SandboxedModule.require modulePath, requires: @requires - catch error - assert.equal("Unknown filestore backend: magic",error.message) - assert.isNull(@fsWrapper) - done() + return describe("test invalid mixins", () => it("should not load an invalid wrapper", function(done) { + this.settings = { + filestore: { + backend:"magic" + } + }; + this.requires = { + "./S3PersistorManager": this.S3PersistorManager, + "settings-sharelatex": this.settings, + "logger-sharelatex": { + log() {}, + err() {} + } + }; + this.fsWrapper=null; + try { + this.PersistorManager=SandboxedModule.require(modulePath, {requires: this.requires}); + } catch (error) { + assert.equal("Unknown filestore backend: magic",error.message); + } + assert.isNull(this.fsWrapper); + return done(); + })); +}); diff --git a/services/filestore/test/unit/coffee/S3PersistorManagerTests.js b/services/filestore/test/unit/coffee/S3PersistorManagerTests.js index a5ab5c2932..4396ce9606 100644 --- a/services/filestore/test/unit/coffee/S3PersistorManagerTests.js +++ b/services/filestore/test/unit/coffee/S3PersistorManagerTests.js @@ -1,343 +1,447 @@ -assert = require("chai").assert -sinon = require('sinon') -chai = require('chai') -should = chai.should() -expect = chai.expect -modulePath = "../../../app/js/S3PersistorManager.js" -SandboxedModule = require('sandboxed-module') +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const { + assert +} = require("chai"); +const sinon = require('sinon'); +const chai = require('chai'); +const should = chai.should(); +const { + expect +} = chai; +const modulePath = "../../../app/js/S3PersistorManager.js"; +const SandboxedModule = require('sandboxed-module'); -describe "S3PersistorManagerTests", -> +describe("S3PersistorManagerTests", function() { - beforeEach -> - @settings = - filestore: - backend: "s3" - s3: - secret: "secret" + beforeEach(function() { + this.settings = { + filestore: { + backend: "s3", + s3: { + secret: "secret", key: "this_key" - stores: + }, + stores: { user_files:"sl_user_files" - @knoxClient = - putFile:sinon.stub() - copyFile:sinon.stub() - list: sinon.stub() - deleteMultiple: sinon.stub() - get: sinon.stub() - @knox = - createClient: sinon.stub().returns(@knoxClient) - @s3EventHandlers = {} - @s3Request = - on: sinon.stub().callsFake (event, callback) => - @s3EventHandlers[event] = callback - send: sinon.stub() - @s3Response = - httpResponse: - createUnbufferedStream: sinon.stub() - @s3Client = - copyObject: sinon.stub() - headObject: sinon.stub() - getObject: sinon.stub().returns(@s3Request) - @awsS3 = sinon.stub().returns(@s3Client) - @LocalFileWriter = - writeStream: sinon.stub() - deleteFile: sinon.stub() - @request = sinon.stub() - @requires = - "knox": @knox - "aws-sdk/clients/s3": @awsS3 - "settings-sharelatex": @settings - "./LocalFileWriter":@LocalFileWriter - "logger-sharelatex": - log:-> - err:-> - "request": @request - "./Errors": @Errors = - NotFoundError: sinon.stub() - @key = "my/key" - @bucketName = "my-bucket" - @error = "my errror" - @S3PersistorManager = SandboxedModule.require modulePath, requires: @requires - - describe "getFileStream", -> - describe "success", -> - beforeEach () -> - @expectedStream = { expectedStream: true } - @expectedStream.on = sinon.stub() - @s3Request.send.callsFake () => - @s3EventHandlers.httpHeaders(200, {}, @s3Response, "OK") - @s3Response.httpResponse.createUnbufferedStream.returns(@expectedStream) - - it "returns a stream", (done) -> - @S3PersistorManager.getFileStream @bucketName, @key, {}, (err, stream) => - if err? - return done(err) - expect(stream).to.equal(@expectedStream) - done() - - it "sets the AWS client up with credentials from settings", (done) -> - @S3PersistorManager.getFileStream @bucketName, @key, {}, (err, stream) => - if err? - return done(err) - expect(@awsS3.lastCall.args).to.deep.equal([{ - credentials: - accessKeyId: @settings.filestore.s3.key - secretAccessKey: @settings.filestore.s3.secret - }]) - done() - - it "fetches the right key from the right bucket", (done) -> - @S3PersistorManager.getFileStream @bucketName, @key, {}, (err, stream) => - if err? - return done(err) - expect(@s3Client.getObject.lastCall.args).to.deep.equal([{ - Bucket: @bucketName, - Key: @key - }]) - done() - - it "accepts alternative credentials", (done) -> - accessKeyId = "that_key" - secret = "that_secret" - opts = { - credentials: - auth_key: accessKeyId - auth_secret: secret } - @S3PersistorManager.getFileStream @bucketName, @key, opts, (err, stream) => - if err? - return done(err) - expect(@awsS3.lastCall.args).to.deep.equal([{ - credentials: - accessKeyId: accessKeyId + } + }; + this.knoxClient = { + putFile:sinon.stub(), + copyFile:sinon.stub(), + list: sinon.stub(), + deleteMultiple: sinon.stub(), + get: sinon.stub() + }; + this.knox = + {createClient: sinon.stub().returns(this.knoxClient)}; + this.s3EventHandlers = {}; + this.s3Request = { + on: sinon.stub().callsFake((event, callback) => { + return this.s3EventHandlers[event] = callback; + }), + send: sinon.stub() + }; + this.s3Response = { + httpResponse: { + createUnbufferedStream: sinon.stub() + } + }; + this.s3Client = { + copyObject: sinon.stub(), + headObject: sinon.stub(), + getObject: sinon.stub().returns(this.s3Request) + }; + this.awsS3 = sinon.stub().returns(this.s3Client); + this.LocalFileWriter = { + writeStream: sinon.stub(), + deleteFile: sinon.stub() + }; + this.request = sinon.stub(); + this.requires = { + "knox": this.knox, + "aws-sdk/clients/s3": this.awsS3, + "settings-sharelatex": this.settings, + "./LocalFileWriter":this.LocalFileWriter, + "logger-sharelatex": { + log() {}, + err() {} + }, + "request": this.request, + "./Errors": (this.Errors = + {NotFoundError: sinon.stub()}) + }; + this.key = "my/key"; + this.bucketName = "my-bucket"; + this.error = "my errror"; + return this.S3PersistorManager = SandboxedModule.require(modulePath, {requires: this.requires}); + }); + + describe("getFileStream", function() { + describe("success", function() { + beforeEach(function() { + this.expectedStream = { expectedStream: true }; + this.expectedStream.on = sinon.stub(); + this.s3Request.send.callsFake(() => { + return this.s3EventHandlers.httpHeaders(200, {}, this.s3Response, "OK"); + }); + return this.s3Response.httpResponse.createUnbufferedStream.returns(this.expectedStream); + }); + + it("returns a stream", function(done) { + return this.S3PersistorManager.getFileStream(this.bucketName, this.key, {}, (err, stream) => { + if (err != null) { + return done(err); + } + expect(stream).to.equal(this.expectedStream); + return done(); + }); + }); + + it("sets the AWS client up with credentials from settings", function(done) { + return this.S3PersistorManager.getFileStream(this.bucketName, this.key, {}, (err, stream) => { + if (err != null) { + return done(err); + } + expect(this.awsS3.lastCall.args).to.deep.equal([{ + credentials: { + accessKeyId: this.settings.filestore.s3.key, + secretAccessKey: this.settings.filestore.s3.secret + } + }]); + return done(); + }); + }); + + it("fetches the right key from the right bucket", function(done) { + return this.S3PersistorManager.getFileStream(this.bucketName, this.key, {}, (err, stream) => { + if (err != null) { + return done(err); + } + expect(this.s3Client.getObject.lastCall.args).to.deep.equal([{ + Bucket: this.bucketName, + Key: this.key + }]); + return done(); + }); + }); + + it("accepts alternative credentials", function(done) { + const accessKeyId = "that_key"; + const secret = "that_secret"; + const opts = { + credentials: { + auth_key: accessKeyId, + auth_secret: secret + } + }; + return this.S3PersistorManager.getFileStream(this.bucketName, this.key, opts, (err, stream) => { + if (err != null) { + return done(err); + } + expect(this.awsS3.lastCall.args).to.deep.equal([{ + credentials: { + accessKeyId, secretAccessKey: secret - }]) - expect(stream).to.equal(@expectedStream) - done() + } + }]); + expect(stream).to.equal(this.expectedStream); + return done(); + }); + }); - it "accepts byte range", (done) -> - start = 0 - end = 8 - opts = { start: start, end: end } - @S3PersistorManager.getFileStream @bucketName, @key, opts, (err, stream) => - if err? - return done(err) - expect(@s3Client.getObject.lastCall.args).to.deep.equal([{ - Bucket: @bucketName - Key: @key - Range: "bytes=#{start}-#{end}" - }]) - expect(stream).to.equal(@expectedStream) - done() + return it("accepts byte range", function(done) { + const start = 0; + const end = 8; + const opts = { start, end }; + return this.S3PersistorManager.getFileStream(this.bucketName, this.key, opts, (err, stream) => { + if (err != null) { + return done(err); + } + expect(this.s3Client.getObject.lastCall.args).to.deep.equal([{ + Bucket: this.bucketName, + Key: this.key, + Range: `bytes=${start}-${end}` + }]); + expect(stream).to.equal(this.expectedStream); + return done(); + }); + }); + }); - describe "errors", -> - describe "when the file doesn't exist", -> - beforeEach -> - @s3Request.send.callsFake () => - @s3EventHandlers.httpHeaders(404, {}, @s3Response, "Not found") + return describe("errors", function() { + describe("when the file doesn't exist", function() { + beforeEach(function() { + return this.s3Request.send.callsFake(() => { + return this.s3EventHandlers.httpHeaders(404, {}, this.s3Response, "Not found"); + }); + }); - it "returns a NotFoundError that indicates the bucket and key", (done) -> - @S3PersistorManager.getFileStream @bucketName, @key, {}, (err, stream) => - expect(err).to.be.instanceof(@Errors.NotFoundError) - errMsg = @Errors.NotFoundError.lastCall.args[0] - expect(errMsg).to.match(new RegExp(".*#{@bucketName}.*")) - expect(errMsg).to.match(new RegExp(".*#{@key}.*")) - done() + return it("returns a NotFoundError that indicates the bucket and key", function(done) { + return this.S3PersistorManager.getFileStream(this.bucketName, this.key, {}, (err, stream) => { + expect(err).to.be.instanceof(this.Errors.NotFoundError); + const errMsg = this.Errors.NotFoundError.lastCall.args[0]; + expect(errMsg).to.match(new RegExp(`.*${this.bucketName}.*`)); + expect(errMsg).to.match(new RegExp(`.*${this.key}.*`)); + return done(); + }); + }); + }); - describe "when S3 encounters an unkown error", -> - beforeEach -> - @s3Request.send.callsFake () => - @s3EventHandlers.httpHeaders(500, {}, @s3Response, "Internal server error") + describe("when S3 encounters an unkown error", function() { + beforeEach(function() { + return this.s3Request.send.callsFake(() => { + return this.s3EventHandlers.httpHeaders(500, {}, this.s3Response, "Internal server error"); + }); + }); - it "returns an error", (done) -> - @S3PersistorManager.getFileStream @bucketName, @key, {}, (err, stream) => - expect(err).to.be.instanceof(Error) - done() + return it("returns an error", function(done) { + return this.S3PersistorManager.getFileStream(this.bucketName, this.key, {}, (err, stream) => { + expect(err).to.be.instanceof(Error); + return done(); + }); + }); + }); - describe "when the S3 request errors out before receiving HTTP headers", -> - beforeEach -> - @s3Request.send.callsFake () => - @s3EventHandlers.error(new Error("connection failed")) + return describe("when the S3 request errors out before receiving HTTP headers", function() { + beforeEach(function() { + return this.s3Request.send.callsFake(() => { + return this.s3EventHandlers.error(new Error("connection failed")); + }); + }); - it "returns an error", (done) -> - @S3PersistorManager.getFileStream @bucketName, @key, {}, (err, stream) => - expect(err).to.be.instanceof(Error) - done() + return it("returns an error", function(done) { + return this.S3PersistorManager.getFileStream(this.bucketName, this.key, {}, (err, stream) => { + expect(err).to.be.instanceof(Error); + return done(); + }); + }); + }); + }); + }); - describe "getFileSize", -> - it "should obtain the file size from S3", (done) -> - expectedFileSize = 123 - @s3Client.headObject.yields(new Error( + describe("getFileSize", function() { + it("should obtain the file size from S3", function(done) { + const expectedFileSize = 123; + this.s3Client.headObject.yields(new Error( "s3Client.headObject got unexpected arguments" - )) - @s3Client.headObject.withArgs({ - Bucket: @bucketName - Key: @key - }).yields(null, { ContentLength: expectedFileSize }) + )); + this.s3Client.headObject.withArgs({ + Bucket: this.bucketName, + Key: this.key + }).yields(null, { ContentLength: expectedFileSize }); - @S3PersistorManager.getFileSize @bucketName, @key, (err, fileSize) => - if err? - return done(err) - expect(fileSize).to.equal(expectedFileSize) - done() + return this.S3PersistorManager.getFileSize(this.bucketName, this.key, (err, fileSize) => { + if (err != null) { + return done(err); + } + expect(fileSize).to.equal(expectedFileSize); + return done(); + }); + }); - [403, 404].forEach (statusCode) -> - it "should throw NotFoundError when S3 responds with #{statusCode}", (done) -> - error = new Error() - error.statusCode = statusCode - @s3Client.headObject.yields(error) + [403, 404].forEach(statusCode => it(`should throw NotFoundError when S3 responds with ${statusCode}`, function(done) { + const error = new Error(); + error.statusCode = statusCode; + this.s3Client.headObject.yields(error); - @S3PersistorManager.getFileSize @bucketName, @key, (err, fileSize) => - expect(err).to.be.an.instanceof(@Errors.NotFoundError) - done() + return this.S3PersistorManager.getFileSize(this.bucketName, this.key, (err, fileSize) => { + expect(err).to.be.an.instanceof(this.Errors.NotFoundError); + return done(); + }); + })); - it "should rethrow any other error", (done) -> - error = new Error() - @s3Client.headObject.yields(error) - @s3Client.headObject.yields(error) + return it("should rethrow any other error", function(done) { + const error = new Error(); + this.s3Client.headObject.yields(error); + this.s3Client.headObject.yields(error); - @S3PersistorManager.getFileSize @bucketName, @key, (err, fileSize) => - expect(err).to.equal(error) - done() + return this.S3PersistorManager.getFileSize(this.bucketName, this.key, (err, fileSize) => { + expect(err).to.equal(error); + return done(); + }); + }); + }); - describe "sendFile", -> + describe("sendFile", function() { - beforeEach -> - @knoxClient.putFile.returns on:-> + beforeEach(function() { + return this.knoxClient.putFile.returns({on() {}}); + }); - it "should put file with knox", (done)-> - @LocalFileWriter.deleteFile.callsArgWith(1) - @knoxClient.putFile.callsArgWith(2, @error) - @S3PersistorManager.sendFile @bucketName, @key, @fsPath, (err)=> - @knoxClient.putFile.calledWith(@fsPath, @key).should.equal true - err.should.equal @error - done() + it("should put file with knox", function(done){ + this.LocalFileWriter.deleteFile.callsArgWith(1); + this.knoxClient.putFile.callsArgWith(2, this.error); + return this.S3PersistorManager.sendFile(this.bucketName, this.key, this.fsPath, err=> { + this.knoxClient.putFile.calledWith(this.fsPath, this.key).should.equal(true); + err.should.equal(this.error); + return done(); + }); + }); - it "should delete the file and pass the error with it", (done)-> - @LocalFileWriter.deleteFile.callsArgWith(1) - @knoxClient.putFile.callsArgWith(2, @error) - @S3PersistorManager.sendFile @bucketName, @key, @fsPath, (err)=> - @knoxClient.putFile.calledWith(@fsPath, @key).should.equal true - err.should.equal @error - done() + return it("should delete the file and pass the error with it", function(done){ + this.LocalFileWriter.deleteFile.callsArgWith(1); + this.knoxClient.putFile.callsArgWith(2, this.error); + return this.S3PersistorManager.sendFile(this.bucketName, this.key, this.fsPath, err=> { + this.knoxClient.putFile.calledWith(this.fsPath, this.key).should.equal(true); + err.should.equal(this.error); + return done(); + }); + }); + }); - describe "sendStream", -> - beforeEach -> - @fsPath = "to/some/where" - @origin = - on:-> - @S3PersistorManager.sendFile = sinon.stub().callsArgWith(3) + describe("sendStream", function() { + beforeEach(function() { + this.fsPath = "to/some/where"; + this.origin = + {on() {}}; + return this.S3PersistorManager.sendFile = sinon.stub().callsArgWith(3); + }); - it "should send stream to LocalFileWriter", (done)-> - @LocalFileWriter.deleteFile.callsArgWith(1) - @LocalFileWriter.writeStream.callsArgWith(2, null, @fsPath) - @S3PersistorManager.sendStream @bucketName, @key, @origin, => - @LocalFileWriter.writeStream.calledWith(@origin).should.equal true - done() + it("should send stream to LocalFileWriter", function(done){ + this.LocalFileWriter.deleteFile.callsArgWith(1); + this.LocalFileWriter.writeStream.callsArgWith(2, null, this.fsPath); + return this.S3PersistorManager.sendStream(this.bucketName, this.key, this.origin, () => { + this.LocalFileWriter.writeStream.calledWith(this.origin).should.equal(true); + return done(); + }); + }); - it "should return the error from LocalFileWriter", (done)-> - @LocalFileWriter.deleteFile.callsArgWith(1) - @LocalFileWriter.writeStream.callsArgWith(2, @error) - @S3PersistorManager.sendStream @bucketName, @key, @origin, (err)=> - err.should.equal @error - done() + it("should return the error from LocalFileWriter", function(done){ + this.LocalFileWriter.deleteFile.callsArgWith(1); + this.LocalFileWriter.writeStream.callsArgWith(2, this.error); + return this.S3PersistorManager.sendStream(this.bucketName, this.key, this.origin, err=> { + err.should.equal(this.error); + return done(); + }); + }); - it "should send the file to the filestore", (done)-> - @LocalFileWriter.deleteFile.callsArgWith(1) - @LocalFileWriter.writeStream.callsArgWith(2) - @S3PersistorManager.sendStream @bucketName, @key, @origin, (err)=> - @S3PersistorManager.sendFile.called.should.equal true - done() + return it("should send the file to the filestore", function(done){ + this.LocalFileWriter.deleteFile.callsArgWith(1); + this.LocalFileWriter.writeStream.callsArgWith(2); + return this.S3PersistorManager.sendStream(this.bucketName, this.key, this.origin, err=> { + this.S3PersistorManager.sendFile.called.should.equal(true); + return done(); + }); + }); + }); - describe "copyFile", -> - beforeEach -> - @sourceKey = "my/key" - @destKey = "my/dest/key" + describe("copyFile", function() { + beforeEach(function() { + this.sourceKey = "my/key"; + return this.destKey = "my/dest/key"; + }); - it "should use AWS SDK to copy file", (done)-> - @s3Client.copyObject.callsArgWith(1, @error) - @S3PersistorManager.copyFile @bucketName, @sourceKey, @destKey, (err)=> - err.should.equal @error - @s3Client.copyObject.calledWith({Bucket: @bucketName, Key: @destKey, CopySource: @bucketName + '/' + @key}).should.equal true - done() + it("should use AWS SDK to copy file", function(done){ + this.s3Client.copyObject.callsArgWith(1, this.error); + return this.S3PersistorManager.copyFile(this.bucketName, this.sourceKey, this.destKey, err=> { + err.should.equal(this.error); + this.s3Client.copyObject.calledWith({Bucket: this.bucketName, Key: this.destKey, CopySource: this.bucketName + '/' + this.key}).should.equal(true); + return done(); + }); + }); - it "should return a NotFoundError object if the original file does not exist", (done)-> - NoSuchKeyError = {code: "NoSuchKey"} - @s3Client.copyObject.callsArgWith(1, NoSuchKeyError) - @S3PersistorManager.copyFile @bucketName, @sourceKey, @destKey, (err)=> - expect(err instanceof @Errors.NotFoundError).to.equal true - done() + return it("should return a NotFoundError object if the original file does not exist", function(done){ + const NoSuchKeyError = {code: "NoSuchKey"}; + this.s3Client.copyObject.callsArgWith(1, NoSuchKeyError); + return this.S3PersistorManager.copyFile(this.bucketName, this.sourceKey, this.destKey, err=> { + expect(err instanceof this.Errors.NotFoundError).to.equal(true); + return done(); + }); + }); + }); - describe "deleteDirectory", -> + describe("deleteDirectory", () => it("should list the contents passing them onto multi delete", function(done){ + const data = + {Contents: [{Key:"1234"}, {Key: "456"}]}; + this.knoxClient.list.callsArgWith(1, null, data); + this.knoxClient.deleteMultiple.callsArgWith(1); + return this.S3PersistorManager.deleteDirectory(this.bucketName, this.key, err=> { + this.knoxClient.deleteMultiple.calledWith(["1234","456"]).should.equal(true); + return done(); + }); + })); - it "should list the contents passing them onto multi delete", (done)-> - data = - Contents: [{Key:"1234"}, {Key: "456"}] - @knoxClient.list.callsArgWith(1, null, data) - @knoxClient.deleteMultiple.callsArgWith(1) - @S3PersistorManager.deleteDirectory @bucketName, @key, (err)=> - @knoxClient.deleteMultiple.calledWith(["1234","456"]).should.equal true - done() + describe("deleteFile", function() { - describe "deleteFile", -> + it("should use correct options", function(done){ + this.request.callsArgWith(1); - it "should use correct options", (done)-> - @request.callsArgWith(1) + return this.S3PersistorManager.deleteFile(this.bucketName, this.key, err=> { + const opts = this.request.args[0][0]; + assert.deepEqual(opts.aws, {key:this.settings.filestore.s3.key, secret:this.settings.filestore.s3.secret, bucket:this.bucketName}); + opts.method.should.equal("delete"); + opts.timeout.should.equal((30*1000)); + opts.uri.should.equal(`https://${this.bucketName}.s3.amazonaws.com/${this.key}`); + return done(); + }); + }); - @S3PersistorManager.deleteFile @bucketName, @key, (err)=> - opts = @request.args[0][0] - assert.deepEqual(opts.aws, {key:@settings.filestore.s3.key, secret:@settings.filestore.s3.secret, bucket:@bucketName}) - opts.method.should.equal "delete" - opts.timeout.should.equal (30*1000) - opts.uri.should.equal "https://#{@bucketName}.s3.amazonaws.com/#{@key}" - done() + return it("should return the error", function(done){ + this.request.callsArgWith(1, this.error); - it "should return the error", (done)-> - @request.callsArgWith(1, @error) + return this.S3PersistorManager.deleteFile(this.bucketName, this.key, err=> { + err.should.equal(this.error); + return done(); + }); + }); + }); - @S3PersistorManager.deleteFile @bucketName, @key, (err)=> - err.should.equal @error - done() + describe("checkIfFileExists", function() { - describe "checkIfFileExists", -> + it("should use correct options", function(done){ + this.request.callsArgWith(1, null, {statusCode:200}); - it "should use correct options", (done)-> - @request.callsArgWith(1, null, statusCode:200) + return this.S3PersistorManager.checkIfFileExists(this.bucketName, this.key, err=> { + const opts = this.request.args[0][0]; + assert.deepEqual(opts.aws, {key:this.settings.filestore.s3.key, secret:this.settings.filestore.s3.secret, bucket:this.bucketName}); + opts.method.should.equal("head"); + opts.timeout.should.equal((30*1000)); + opts.uri.should.equal(`https://${this.bucketName}.s3.amazonaws.com/${this.key}`); + return done(); + }); + }); - @S3PersistorManager.checkIfFileExists @bucketName, @key, (err)=> - opts = @request.args[0][0] - assert.deepEqual(opts.aws, {key:@settings.filestore.s3.key, secret:@settings.filestore.s3.secret, bucket:@bucketName}) - opts.method.should.equal "head" - opts.timeout.should.equal (30*1000) - opts.uri.should.equal "https://#{@bucketName}.s3.amazonaws.com/#{@key}" - done() + it("should return true for a 200", function(done){ + this.request.callsArgWith(1, null, {statusCode:200}); - it "should return true for a 200", (done)-> - @request.callsArgWith(1, null, statusCode:200) + return this.S3PersistorManager.checkIfFileExists(this.bucketName, this.key, (err, exists)=> { + exists.should.equal(true); + return done(); + }); + }); - @S3PersistorManager.checkIfFileExists @bucketName, @key, (err, exists)=> - exists.should.equal true - done() + it("should return false for a non 200", function(done){ + this.request.callsArgWith(1, null, {statusCode:404}); - it "should return false for a non 200", (done)-> - @request.callsArgWith(1, null, statusCode:404) + return this.S3PersistorManager.checkIfFileExists(this.bucketName, this.key, (err, exists)=> { + exists.should.equal(false); + return done(); + }); + }); - @S3PersistorManager.checkIfFileExists @bucketName, @key, (err, exists)=> - exists.should.equal false - done() + return it("should return the error", function(done){ + this.request.callsArgWith(1, this.error, {}); - it "should return the error", (done)-> - @request.callsArgWith(1, @error, {}) + return this.S3PersistorManager.checkIfFileExists(this.bucketName, this.key, err=> { + err.should.equal(this.error); + return done(); + }); + }); + }); - @S3PersistorManager.checkIfFileExists @bucketName, @key, (err)=> - err.should.equal @error - done() - - describe "directorySize", -> - - it "should sum directory files size", (done) -> - data = - Contents: [ {Size: 1024}, {Size: 2048} ] - @knoxClient.list.callsArgWith(1, null, data) - @S3PersistorManager.directorySize @bucketName, @key, (err, totalSize)=> - totalSize.should.equal 3072 - done() + return describe("directorySize", () => it("should sum directory files size", function(done) { + const data = + {Contents: [ {Size: 1024}, {Size: 2048} ]}; + this.knoxClient.list.callsArgWith(1, null, data); + return this.S3PersistorManager.directorySize(this.bucketName, this.key, (err, totalSize)=> { + totalSize.should.equal(3072); + return done(); + }); + })); +}); diff --git a/services/filestore/test/unit/coffee/SafeExecTests.js b/services/filestore/test/unit/coffee/SafeExecTests.js index 1be22f3993..4c462f887d 100644 --- a/services/filestore/test/unit/coffee/SafeExecTests.js +++ b/services/filestore/test/unit/coffee/SafeExecTests.js @@ -1,50 +1,75 @@ -assert = require("chai").assert -sinon = require('sinon') -chai = require('chai') -should = chai.should() -expect = chai.expect -modulePath = "../../../app/js/SafeExec.js" -SandboxedModule = require('sandboxed-module') +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const { + assert +} = require("chai"); +const sinon = require('sinon'); +const chai = require('chai'); +const should = chai.should(); +const { + expect +} = chai; +const modulePath = "../../../app/js/SafeExec.js"; +const SandboxedModule = require('sandboxed-module'); -describe "SafeExec", -> +describe("SafeExec", function() { - beforeEach -> - @settings = - enableConversions:true - @safe_exec = SandboxedModule.require modulePath, requires: - "logger-sharelatex": - log:-> - err:-> - "settings-sharelatex": @settings - @options = {timeout: 10*1000, killSignal: "SIGTERM" } + beforeEach(function() { + this.settings = + {enableConversions:true}; + this.safe_exec = SandboxedModule.require(modulePath, { requires: { + "logger-sharelatex": { + log() {}, + err() {} + }, + "settings-sharelatex": this.settings + } + } + ); + return this.options = {timeout: 10*1000, killSignal: "SIGTERM" };}); - describe "safe_exec", -> + return describe("safe_exec", function() { - it "should execute a valid command", (done) -> - @safe_exec ["/bin/echo", "hello"], @options, (err, stdout, stderr) => - stdout.should.equal "hello\n" - should.not.exist(err) - done() + it("should execute a valid command", function(done) { + return this.safe_exec(["/bin/echo", "hello"], this.options, (err, stdout, stderr) => { + stdout.should.equal("hello\n"); + should.not.exist(err); + return done(); + }); + }); - it "should error when conversions are disabled", (done) -> - @settings.enableConversions = false - @safe_exec ["/bin/echo", "hello"], @options, (err, stdout, stderr) => - expect(err).to.exist - done() + it("should error when conversions are disabled", function(done) { + this.settings.enableConversions = false; + return this.safe_exec(["/bin/echo", "hello"], this.options, (err, stdout, stderr) => { + expect(err).to.exist; + return done(); + }); + }); - it "should execute a command with non-zero exit status", (done) -> - @safe_exec ["/usr/bin/env", "false"], @options, (err, stdout, stderr) => - stdout.should.equal "" - stderr.should.equal "" - err.message.should.equal "exit status 1" - done() + it("should execute a command with non-zero exit status", function(done) { + return this.safe_exec(["/usr/bin/env", "false"], this.options, (err, stdout, stderr) => { + stdout.should.equal(""); + stderr.should.equal(""); + err.message.should.equal("exit status 1"); + return done(); + }); + }); - it "should handle an invalid command", (done) -> - @safe_exec ["/bin/foobar"], @options, (err, stdout, stderr) => - err.code.should.equal "ENOENT" - done() + it("should handle an invalid command", function(done) { + return this.safe_exec(["/bin/foobar"], this.options, (err, stdout, stderr) => { + err.code.should.equal("ENOENT"); + return done(); + }); + }); - it "should handle a command that runs too long", (done) -> - @safe_exec ["/bin/sleep", "10"], {timeout: 500, killSignal: "SIGTERM"}, (err, stdout, stderr) => - err.should.equal "SIGTERM" - done() + return it("should handle a command that runs too long", function(done) { + return this.safe_exec(["/bin/sleep", "10"], {timeout: 500, killSignal: "SIGTERM"}, (err, stdout, stderr) => { + err.should.equal("SIGTERM"); + return done(); + }); + }); + }); +}); diff --git a/services/filestore/test/unit/coffee/SettingsTests.js b/services/filestore/test/unit/coffee/SettingsTests.js index efb690511e..5c1b79d70c 100644 --- a/services/filestore/test/unit/coffee/SettingsTests.js +++ b/services/filestore/test/unit/coffee/SettingsTests.js @@ -1,19 +1,29 @@ -assert = require("chai").assert -sinon = require('sinon') -chai = require('chai') -should = chai.should() -expect = chai.expect -modulePath = "../../../app/js/BucketController.js" +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const { + assert +} = require("chai"); +const sinon = require('sinon'); +const chai = require('chai'); +const should = chai.should(); +const { + expect +} = chai; +const modulePath = "../../../app/js/BucketController.js"; -describe "Settings", -> - describe "s3", -> - it "should use JSONified env var if present", (done)-> - s3_settings = - bucket1: - auth_key: 'bucket1_key' - auth_secret: 'bucket1_secret' - process.env['S3_BUCKET_CREDENTIALS'] = JSON.stringify s3_settings +describe("Settings", () => describe("s3", () => it("should use JSONified env var if present", function(done){ + const s3_settings = { + bucket1: { + auth_key: 'bucket1_key', + auth_secret: 'bucket1_secret' + } + }; + process.env['S3_BUCKET_CREDENTIALS'] = JSON.stringify(s3_settings); - settings = require("settings-sharelatex") - expect(settings.filestore.s3BucketCreds).to.deep.equal s3_settings - done() + const settings = require("settings-sharelatex"); + expect(settings.filestore.s3BucketCreds).to.deep.equal(s3_settings); + return done(); +}))); From ff04085bddf763fdd4cf050a41d811e5e042fe54 Mon Sep 17 00:00:00 2001 From: decaffeinate Date: Mon, 16 Dec 2019 11:20:25 +0000 Subject: [PATCH 343/555] decaffeinate: Run post-processing cleanups on AWSSDKPersistorManagerTests.coffee and 12 other files --- .../test/unit/coffee/AWSSDKPersistorManagerTests.js | 8 ++++++++ .../filestore/test/unit/coffee/BucketControllerTests.js | 6 ++++++ .../filestore/test/unit/coffee/FSPersistorManagerTests.js | 7 +++++++ .../filestore/test/unit/coffee/FileControllerTests.js | 6 ++++++ services/filestore/test/unit/coffee/FileConverterTests.js | 7 +++++++ services/filestore/test/unit/coffee/FileHandlerTests.js | 7 +++++++ .../filestore/test/unit/coffee/ImageOptimiserTests.js | 7 +++++++ services/filestore/test/unit/coffee/KeybuilderTests.js | 6 ++++++ .../filestore/test/unit/coffee/LocalFileWriterTests.js | 7 +++++++ .../filestore/test/unit/coffee/PersistorManagerTests.js | 6 ++++++ .../filestore/test/unit/coffee/S3PersistorManagerTests.js | 7 +++++++ services/filestore/test/unit/coffee/SafeExecTests.js | 6 ++++++ services/filestore/test/unit/coffee/SettingsTests.js | 8 +++++++- 13 files changed, 87 insertions(+), 1 deletion(-) diff --git a/services/filestore/test/unit/coffee/AWSSDKPersistorManagerTests.js b/services/filestore/test/unit/coffee/AWSSDKPersistorManagerTests.js index cad0f4b805..da31ee399a 100644 --- a/services/filestore/test/unit/coffee/AWSSDKPersistorManagerTests.js +++ b/services/filestore/test/unit/coffee/AWSSDKPersistorManagerTests.js @@ -1,3 +1,11 @@ +/* eslint-disable + handle-callback-err, + no-dupe-keys, + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/services/filestore/test/unit/coffee/BucketControllerTests.js b/services/filestore/test/unit/coffee/BucketControllerTests.js index db0e6a5aa3..86db19da1b 100644 --- a/services/filestore/test/unit/coffee/BucketControllerTests.js +++ b/services/filestore/test/unit/coffee/BucketControllerTests.js @@ -1,3 +1,9 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/services/filestore/test/unit/coffee/FSPersistorManagerTests.js b/services/filestore/test/unit/coffee/FSPersistorManagerTests.js index cd73f41ac0..b3c2842b46 100644 --- a/services/filestore/test/unit/coffee/FSPersistorManagerTests.js +++ b/services/filestore/test/unit/coffee/FSPersistorManagerTests.js @@ -1,3 +1,10 @@ +/* eslint-disable + handle-callback-err, + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/services/filestore/test/unit/coffee/FileControllerTests.js b/services/filestore/test/unit/coffee/FileControllerTests.js index ea3b0e32b8..d60ca5fbd1 100644 --- a/services/filestore/test/unit/coffee/FileControllerTests.js +++ b/services/filestore/test/unit/coffee/FileControllerTests.js @@ -1,3 +1,9 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/services/filestore/test/unit/coffee/FileConverterTests.js b/services/filestore/test/unit/coffee/FileConverterTests.js index c546b61a15..96cb0ee53a 100644 --- a/services/filestore/test/unit/coffee/FileConverterTests.js +++ b/services/filestore/test/unit/coffee/FileConverterTests.js @@ -1,3 +1,10 @@ +/* eslint-disable + handle-callback-err, + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/services/filestore/test/unit/coffee/FileHandlerTests.js b/services/filestore/test/unit/coffee/FileHandlerTests.js index f83561166f..13c60f08da 100644 --- a/services/filestore/test/unit/coffee/FileHandlerTests.js +++ b/services/filestore/test/unit/coffee/FileHandlerTests.js @@ -1,3 +1,10 @@ +/* eslint-disable + handle-callback-err, + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/services/filestore/test/unit/coffee/ImageOptimiserTests.js b/services/filestore/test/unit/coffee/ImageOptimiserTests.js index 6074120a56..4e6cb858f4 100644 --- a/services/filestore/test/unit/coffee/ImageOptimiserTests.js +++ b/services/filestore/test/unit/coffee/ImageOptimiserTests.js @@ -1,3 +1,10 @@ +/* eslint-disable + handle-callback-err, + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/services/filestore/test/unit/coffee/KeybuilderTests.js b/services/filestore/test/unit/coffee/KeybuilderTests.js index 063a278f23..1e99899be7 100644 --- a/services/filestore/test/unit/coffee/KeybuilderTests.js +++ b/services/filestore/test/unit/coffee/KeybuilderTests.js @@ -1,3 +1,9 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/services/filestore/test/unit/coffee/LocalFileWriterTests.js b/services/filestore/test/unit/coffee/LocalFileWriterTests.js index ed3eb2dfa3..1bf131a3c4 100644 --- a/services/filestore/test/unit/coffee/LocalFileWriterTests.js +++ b/services/filestore/test/unit/coffee/LocalFileWriterTests.js @@ -1,3 +1,10 @@ +/* eslint-disable + handle-callback-err, + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/services/filestore/test/unit/coffee/PersistorManagerTests.js b/services/filestore/test/unit/coffee/PersistorManagerTests.js index d5b859448a..620d6f0083 100644 --- a/services/filestore/test/unit/coffee/PersistorManagerTests.js +++ b/services/filestore/test/unit/coffee/PersistorManagerTests.js @@ -1,3 +1,9 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/services/filestore/test/unit/coffee/S3PersistorManagerTests.js b/services/filestore/test/unit/coffee/S3PersistorManagerTests.js index 4396ce9606..a3a7d16825 100644 --- a/services/filestore/test/unit/coffee/S3PersistorManagerTests.js +++ b/services/filestore/test/unit/coffee/S3PersistorManagerTests.js @@ -1,3 +1,10 @@ +/* eslint-disable + handle-callback-err, + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/services/filestore/test/unit/coffee/SafeExecTests.js b/services/filestore/test/unit/coffee/SafeExecTests.js index 4c462f887d..f9a0e51ca8 100644 --- a/services/filestore/test/unit/coffee/SafeExecTests.js +++ b/services/filestore/test/unit/coffee/SafeExecTests.js @@ -1,3 +1,9 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/services/filestore/test/unit/coffee/SettingsTests.js b/services/filestore/test/unit/coffee/SettingsTests.js index 5c1b79d70c..98d17723a9 100644 --- a/services/filestore/test/unit/coffee/SettingsTests.js +++ b/services/filestore/test/unit/coffee/SettingsTests.js @@ -1,3 +1,9 @@ +/* eslint-disable + camelcase, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns @@ -21,7 +27,7 @@ describe("Settings", () => describe("s3", () => it("should use JSONified env var auth_secret: 'bucket1_secret' } }; - process.env['S3_BUCKET_CREDENTIALS'] = JSON.stringify(s3_settings); + process.env.S3_BUCKET_CREDENTIALS = JSON.stringify(s3_settings); const settings = require("settings-sharelatex"); expect(settings.filestore.s3BucketCreds).to.deep.equal(s3_settings); From ec60f778e62f9180ddc3c1584eaccc7372863db3 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 16 Dec 2019 11:20:27 +0000 Subject: [PATCH 344/555] Rename test/unit/coffee to test/unit/js --- .../test/unit/{coffee => js}/AWSSDKPersistorManagerTests.js | 0 .../filestore/test/unit/{coffee => js}/BucketControllerTests.js | 0 .../filestore/test/unit/{coffee => js}/FSPersistorManagerTests.js | 0 .../filestore/test/unit/{coffee => js}/FileControllerTests.js | 0 services/filestore/test/unit/{coffee => js}/FileConverterTests.js | 0 services/filestore/test/unit/{coffee => js}/FileHandlerTests.js | 0 .../filestore/test/unit/{coffee => js}/ImageOptimiserTests.js | 0 services/filestore/test/unit/{coffee => js}/KeybuilderTests.js | 0 .../filestore/test/unit/{coffee => js}/LocalFileWriterTests.js | 0 .../filestore/test/unit/{coffee => js}/PersistorManagerTests.js | 0 .../filestore/test/unit/{coffee => js}/S3PersistorManagerTests.js | 0 services/filestore/test/unit/{coffee => js}/SafeExecTests.js | 0 services/filestore/test/unit/{coffee => js}/SettingsTests.js | 0 13 files changed, 0 insertions(+), 0 deletions(-) rename services/filestore/test/unit/{coffee => js}/AWSSDKPersistorManagerTests.js (100%) rename services/filestore/test/unit/{coffee => js}/BucketControllerTests.js (100%) rename services/filestore/test/unit/{coffee => js}/FSPersistorManagerTests.js (100%) rename services/filestore/test/unit/{coffee => js}/FileControllerTests.js (100%) rename services/filestore/test/unit/{coffee => js}/FileConverterTests.js (100%) rename services/filestore/test/unit/{coffee => js}/FileHandlerTests.js (100%) rename services/filestore/test/unit/{coffee => js}/ImageOptimiserTests.js (100%) rename services/filestore/test/unit/{coffee => js}/KeybuilderTests.js (100%) rename services/filestore/test/unit/{coffee => js}/LocalFileWriterTests.js (100%) rename services/filestore/test/unit/{coffee => js}/PersistorManagerTests.js (100%) rename services/filestore/test/unit/{coffee => js}/S3PersistorManagerTests.js (100%) rename services/filestore/test/unit/{coffee => js}/SafeExecTests.js (100%) rename services/filestore/test/unit/{coffee => js}/SettingsTests.js (100%) diff --git a/services/filestore/test/unit/coffee/AWSSDKPersistorManagerTests.js b/services/filestore/test/unit/js/AWSSDKPersistorManagerTests.js similarity index 100% rename from services/filestore/test/unit/coffee/AWSSDKPersistorManagerTests.js rename to services/filestore/test/unit/js/AWSSDKPersistorManagerTests.js diff --git a/services/filestore/test/unit/coffee/BucketControllerTests.js b/services/filestore/test/unit/js/BucketControllerTests.js similarity index 100% rename from services/filestore/test/unit/coffee/BucketControllerTests.js rename to services/filestore/test/unit/js/BucketControllerTests.js diff --git a/services/filestore/test/unit/coffee/FSPersistorManagerTests.js b/services/filestore/test/unit/js/FSPersistorManagerTests.js similarity index 100% rename from services/filestore/test/unit/coffee/FSPersistorManagerTests.js rename to services/filestore/test/unit/js/FSPersistorManagerTests.js diff --git a/services/filestore/test/unit/coffee/FileControllerTests.js b/services/filestore/test/unit/js/FileControllerTests.js similarity index 100% rename from services/filestore/test/unit/coffee/FileControllerTests.js rename to services/filestore/test/unit/js/FileControllerTests.js diff --git a/services/filestore/test/unit/coffee/FileConverterTests.js b/services/filestore/test/unit/js/FileConverterTests.js similarity index 100% rename from services/filestore/test/unit/coffee/FileConverterTests.js rename to services/filestore/test/unit/js/FileConverterTests.js diff --git a/services/filestore/test/unit/coffee/FileHandlerTests.js b/services/filestore/test/unit/js/FileHandlerTests.js similarity index 100% rename from services/filestore/test/unit/coffee/FileHandlerTests.js rename to services/filestore/test/unit/js/FileHandlerTests.js diff --git a/services/filestore/test/unit/coffee/ImageOptimiserTests.js b/services/filestore/test/unit/js/ImageOptimiserTests.js similarity index 100% rename from services/filestore/test/unit/coffee/ImageOptimiserTests.js rename to services/filestore/test/unit/js/ImageOptimiserTests.js diff --git a/services/filestore/test/unit/coffee/KeybuilderTests.js b/services/filestore/test/unit/js/KeybuilderTests.js similarity index 100% rename from services/filestore/test/unit/coffee/KeybuilderTests.js rename to services/filestore/test/unit/js/KeybuilderTests.js diff --git a/services/filestore/test/unit/coffee/LocalFileWriterTests.js b/services/filestore/test/unit/js/LocalFileWriterTests.js similarity index 100% rename from services/filestore/test/unit/coffee/LocalFileWriterTests.js rename to services/filestore/test/unit/js/LocalFileWriterTests.js diff --git a/services/filestore/test/unit/coffee/PersistorManagerTests.js b/services/filestore/test/unit/js/PersistorManagerTests.js similarity index 100% rename from services/filestore/test/unit/coffee/PersistorManagerTests.js rename to services/filestore/test/unit/js/PersistorManagerTests.js diff --git a/services/filestore/test/unit/coffee/S3PersistorManagerTests.js b/services/filestore/test/unit/js/S3PersistorManagerTests.js similarity index 100% rename from services/filestore/test/unit/coffee/S3PersistorManagerTests.js rename to services/filestore/test/unit/js/S3PersistorManagerTests.js diff --git a/services/filestore/test/unit/coffee/SafeExecTests.js b/services/filestore/test/unit/js/SafeExecTests.js similarity index 100% rename from services/filestore/test/unit/coffee/SafeExecTests.js rename to services/filestore/test/unit/js/SafeExecTests.js diff --git a/services/filestore/test/unit/coffee/SettingsTests.js b/services/filestore/test/unit/js/SettingsTests.js similarity index 100% rename from services/filestore/test/unit/coffee/SettingsTests.js rename to services/filestore/test/unit/js/SettingsTests.js From eae7d284958d68ae9b0cb15cde4b5a3f561fe8aa Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 16 Dec 2019 11:20:29 +0000 Subject: [PATCH 345/555] Prettier: convert test/unit decaffeinated files to Prettier format --- .../unit/js/AWSSDKPersistorManagerTests.js | 778 ++++++++------ .../test/unit/js/BucketControllerTests.js | 174 ++-- .../test/unit/js/FSPersistorManagerTests.js | 719 +++++++------ .../test/unit/js/FileControllerTests.js | 508 ++++----- .../test/unit/js/FileConverterTests.js | 196 ++-- .../test/unit/js/FileHandlerTests.js | 578 ++++++----- .../test/unit/js/ImageOptimiserTests.js | 136 ++- .../filestore/test/unit/js/KeybuilderTests.js | 91 +- .../test/unit/js/LocalFileWriterTests.js | 208 ++-- .../test/unit/js/PersistorManagerTests.js | 242 ++--- .../test/unit/js/S3PersistorManagerTests.js | 961 ++++++++++-------- .../filestore/test/unit/js/SafeExecTests.js | 142 +-- .../filestore/test/unit/js/SettingsTests.js | 38 +- 13 files changed, 2642 insertions(+), 2129 deletions(-) diff --git a/services/filestore/test/unit/js/AWSSDKPersistorManagerTests.js b/services/filestore/test/unit/js/AWSSDKPersistorManagerTests.js index da31ee399a..ea88da71c3 100644 --- a/services/filestore/test/unit/js/AWSSDKPersistorManagerTests.js +++ b/services/filestore/test/unit/js/AWSSDKPersistorManagerTests.js @@ -11,351 +11,499 @@ * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const sinon = require('sinon'); -const chai = require('chai'); +const sinon = require('sinon') +const chai = require('chai') -const should = chai.should(); -const { - expect -} = chai; +const should = chai.should() +const { expect } = chai -const modulePath = "../../../app/js/AWSSDKPersistorManager.js"; -const SandboxedModule = require('sandboxed-module'); +const modulePath = '../../../app/js/AWSSDKPersistorManager.js' +const SandboxedModule = require('sandboxed-module') -describe("AWSSDKPersistorManager", function() { - beforeEach(function() { - this.settings = { - filestore: { - backend: "aws-sdk" - } - }; - this.s3 = { - upload: sinon.stub(), - getObject: sinon.stub(), - copyObject: sinon.stub(), - deleteObject: sinon.stub(), - listObjects: sinon.stub(), - deleteObjects: sinon.stub(), - headObject: sinon.stub() - }; - this.awssdk = - {S3: sinon.stub().returns(this.s3)}; +describe('AWSSDKPersistorManager', function() { + beforeEach(function() { + this.settings = { + filestore: { + backend: 'aws-sdk' + } + } + this.s3 = { + upload: sinon.stub(), + getObject: sinon.stub(), + copyObject: sinon.stub(), + deleteObject: sinon.stub(), + listObjects: sinon.stub(), + deleteObjects: sinon.stub(), + headObject: sinon.stub() + } + this.awssdk = { S3: sinon.stub().returns(this.s3) } - this.requires = { - "aws-sdk": this.awssdk, - "settings-sharelatex": this.settings, - "logger-sharelatex": { - log() {}, - err() {} - }, - "fs": (this.fs = - {createReadStream: sinon.stub()}), - "./Errors": (this.Errors = - {NotFoundError: sinon.stub()}) - }; - this.key = "my/key"; - this.bucketName = "my-bucket"; - this.error = "my error"; - return this.AWSSDKPersistorManager = SandboxedModule.require(modulePath, {requires: this.requires}); - }); + this.requires = { + 'aws-sdk': this.awssdk, + 'settings-sharelatex': this.settings, + 'logger-sharelatex': { + log() {}, + err() {} + }, + fs: (this.fs = { createReadStream: sinon.stub() }), + './Errors': (this.Errors = { NotFoundError: sinon.stub() }) + } + this.key = 'my/key' + this.bucketName = 'my-bucket' + this.error = 'my error' + return (this.AWSSDKPersistorManager = SandboxedModule.require(modulePath, { + requires: this.requires + })) + }) - describe("sendFile", function() { - beforeEach(function() { - this.stream = {}; - this.fsPath = "/usr/local/some/file"; - return this.fs.createReadStream.returns(this.stream); - }); + describe('sendFile', function() { + beforeEach(function() { + this.stream = {} + this.fsPath = '/usr/local/some/file' + return this.fs.createReadStream.returns(this.stream) + }) - it("should put the file with s3.upload", function(done) { - this.s3.upload.callsArgWith(1); - return this.AWSSDKPersistorManager.sendFile(this.bucketName, this.key, this.fsPath, err => { - expect(err).to.not.be.ok; - expect(this.s3.upload.calledOnce, "called only once").to.be.true; - expect((this.s3.upload.calledWith({Bucket: this.bucketName, Key: this.key, Body: this.stream})) - , "called with correct arguments").to.be.true; - return done(); - }); - }); + it('should put the file with s3.upload', function(done) { + this.s3.upload.callsArgWith(1) + return this.AWSSDKPersistorManager.sendFile( + this.bucketName, + this.key, + this.fsPath, + err => { + expect(err).to.not.be.ok + expect(this.s3.upload.calledOnce, 'called only once').to.be.true + expect( + this.s3.upload.calledWith({ + Bucket: this.bucketName, + Key: this.key, + Body: this.stream + }), + 'called with correct arguments' + ).to.be.true + return done() + } + ) + }) - return it("should dispatch the error from s3.upload", function(done) { - this.s3.upload.callsArgWith(1, this.error); - return this.AWSSDKPersistorManager.sendFile(this.bucketName, this.key, this.fsPath, err => { - expect(err).to.equal(this.error); - return done(); - }); - }); - }); + return it('should dispatch the error from s3.upload', function(done) { + this.s3.upload.callsArgWith(1, this.error) + return this.AWSSDKPersistorManager.sendFile( + this.bucketName, + this.key, + this.fsPath, + err => { + expect(err).to.equal(this.error) + return done() + } + ) + }) + }) + describe('sendStream', function() { + beforeEach(function() { + return (this.stream = {}) + }) - describe("sendStream", function() { - beforeEach(function() { - return this.stream = {};}); + it('should put the file with s3.upload', function(done) { + this.s3.upload.callsArgWith(1) + return this.AWSSDKPersistorManager.sendStream( + this.bucketName, + this.key, + this.stream, + err => { + expect(err).to.not.be.ok + expect(this.s3.upload.calledOnce, 'called only once').to.be.true + expect( + this.s3.upload.calledWith({ + Bucket: this.bucketName, + Key: this.key, + Body: this.stream + }), + 'called with correct arguments' + ).to.be.true + return done() + } + ) + }) - it("should put the file with s3.upload", function(done) { - this.s3.upload.callsArgWith(1); - return this.AWSSDKPersistorManager.sendStream(this.bucketName, this.key, this.stream, err => { - expect(err).to.not.be.ok; - expect(this.s3.upload.calledOnce, "called only once").to.be.true; - expect((this.s3.upload.calledWith({Bucket: this.bucketName, Key: this.key, Body: this.stream})), - "called with correct arguments").to.be.true; - return done(); - }); - }); + return it('should dispatch the error from s3.upload', function(done) { + this.s3.upload.callsArgWith(1, this.error) + return this.AWSSDKPersistorManager.sendStream( + this.bucketName, + this.key, + this.stream, + err => { + expect(err).to.equal(this.error) + return done() + } + ) + }) + }) - return it("should dispatch the error from s3.upload", function(done) { - this.s3.upload.callsArgWith(1, this.error); - return this.AWSSDKPersistorManager.sendStream(this.bucketName, this.key, this.stream, err => { - expect(err).to.equal(this.error); - return done(); - }); - }); - }); + describe('getFileStream', function() { + beforeEach(function() { + this.opts = {} + this.stream = {} + this.read_stream = { on: (this.read_stream_on = sinon.stub()) } + this.object = { createReadStream: sinon.stub().returns(this.read_stream) } + return this.s3.getObject.returns(this.object) + }) - describe("getFileStream", function() { - beforeEach(function() { - this.opts = {}; - this.stream = {}; - this.read_stream = - {on: (this.read_stream_on = sinon.stub())}; - this.object = - {createReadStream: sinon.stub().returns(this.read_stream)}; - return this.s3.getObject.returns(this.object); - }); + it('should return a stream from s3.getObject', function(done) { + this.read_stream_on.withArgs('readable').callsArgWith(1) - it("should return a stream from s3.getObject", function(done) { - this.read_stream_on.withArgs('readable').callsArgWith(1); + return this.AWSSDKPersistorManager.getFileStream( + this.bucketName, + this.key, + this.opts, + (err, stream) => { + expect(this.read_stream_on.calledTwice) + expect(err).to.not.be.ok + expect(stream, 'returned the stream').to.equal(this.read_stream) + expect( + this.s3.getObject.calledWith({ + Bucket: this.bucketName, + Key: this.key + }), + 'called with correct arguments' + ).to.be.true + return done() + } + ) + }) - return this.AWSSDKPersistorManager.getFileStream(this.bucketName, this.key, this.opts, (err, stream) => { - expect(this.read_stream_on.calledTwice); - expect(err).to.not.be.ok; - expect(stream, "returned the stream").to.equal(this.read_stream); - expect((this.s3.getObject.calledWith({Bucket: this.bucketName, Key: this.key})), - "called with correct arguments").to.be.true; - return done(); - }); - }); + describe('with start and end options', function() { + beforeEach(function() { + return (this.opts = { + start: 0, + end: 8 + }) + }) + return it('should pass headers to the s3.GetObject', function(done) { + this.read_stream_on.withArgs('readable').callsArgWith(1) + this.AWSSDKPersistorManager.getFileStream( + this.bucketName, + this.key, + this.opts, + (err, stream) => { + return expect( + this.s3.getObject.calledWith({ + Bucket: this.bucketName, + Key: this.key, + Range: 'bytes=0-8' + }), + 'called with correct arguments' + ).to.be.true + } + ) + return done() + }) + }) - describe("with start and end options", function() { - beforeEach(function() { - return this.opts = { - start: 0, - end: 8 - }; - }); - return it("should pass headers to the s3.GetObject", function(done) { - this.read_stream_on.withArgs('readable').callsArgWith(1); - this.AWSSDKPersistorManager.getFileStream(this.bucketName, this.key, this.opts, (err, stream) => { - return expect((this.s3.getObject.calledWith({Bucket: this.bucketName, Key: this.key, Range: 'bytes=0-8'})), - "called with correct arguments").to.be.true; - }); - return done(); - }); - }); + return describe('error conditions', function() { + describe("when the file doesn't exist", function() { + beforeEach(function() { + this.error = new Error() + return (this.error.code = 'NoSuchKey') + }) + return it('should produce a NotFoundError', function(done) { + this.read_stream_on.withArgs('error').callsArgWith(1, this.error) + return this.AWSSDKPersistorManager.getFileStream( + this.bucketName, + this.key, + this.opts, + (err, stream) => { + expect(stream).to.not.be.ok + expect(err).to.be.ok + expect( + err instanceof this.Errors.NotFoundError, + 'error is a correct instance' + ).to.equal(true) + return done() + } + ) + }) + }) - return describe("error conditions", function() { - describe("when the file doesn't exist", function() { - beforeEach(function() { - this.error = new Error(); - return this.error.code = 'NoSuchKey'; - }); - return it("should produce a NotFoundError", function(done) { - this.read_stream_on.withArgs('error').callsArgWith(1, this.error); - return this.AWSSDKPersistorManager.getFileStream(this.bucketName, this.key, this.opts, (err, stream) => { - expect(stream).to.not.be.ok; - expect(err).to.be.ok; - expect(err instanceof this.Errors.NotFoundError, "error is a correct instance").to.equal(true); - return done(); - }); - }); - }); + return describe('when there is some other error', function() { + beforeEach(function() { + return (this.error = new Error()) + }) + return it('should dispatch the error from s3 object stream', function(done) { + this.read_stream_on.withArgs('error').callsArgWith(1, this.error) + return this.AWSSDKPersistorManager.getFileStream( + this.bucketName, + this.key, + this.opts, + (err, stream) => { + expect(stream).to.not.be.ok + expect(err).to.be.ok + expect(err).to.equal(this.error) + return done() + } + ) + }) + }) + }) + }) - return describe("when there is some other error", function() { - beforeEach(function() { - return this.error = new Error(); - }); - return it("should dispatch the error from s3 object stream", function(done) { - this.read_stream_on.withArgs('error').callsArgWith(1, this.error); - return this.AWSSDKPersistorManager.getFileStream(this.bucketName, this.key, this.opts, (err, stream) => { - expect(stream).to.not.be.ok; - expect(err).to.be.ok; - expect(err).to.equal(this.error); - return done(); - }); - }); - }); - }); - }); + describe('copyFile', function() { + beforeEach(function() { + this.destKey = 'some/key' + return (this.stream = {}) + }) - describe("copyFile", function() { - beforeEach(function() { - this.destKey = "some/key"; - return this.stream = {};}); + it('should copy the file with s3.copyObject', function(done) { + this.s3.copyObject.callsArgWith(1) + return this.AWSSDKPersistorManager.copyFile( + this.bucketName, + this.key, + this.destKey, + err => { + expect(err).to.not.be.ok + expect(this.s3.copyObject.calledOnce, 'called only once').to.be.true + expect( + this.s3.copyObject.calledWith({ + Bucket: this.bucketName, + Key: this.destKey, + CopySource: this.bucketName + '/' + this.key + }), + 'called with correct arguments' + ).to.be.true + return done() + } + ) + }) - it("should copy the file with s3.copyObject", function(done) { - this.s3.copyObject.callsArgWith(1); - return this.AWSSDKPersistorManager.copyFile(this.bucketName, this.key, this.destKey, err => { - expect(err).to.not.be.ok; - expect(this.s3.copyObject.calledOnce, "called only once").to.be.true; - expect((this.s3.copyObject.calledWith({Bucket: this.bucketName, Key: this.destKey, CopySource: this.bucketName + '/' + this.key})), - "called with correct arguments").to.be.true; - return done(); - }); - }); + return it('should dispatch the error from s3.copyObject', function(done) { + this.s3.copyObject.callsArgWith(1, this.error) + return this.AWSSDKPersistorManager.copyFile( + this.bucketName, + this.key, + this.destKey, + err => { + expect(err).to.equal(this.error) + return done() + } + ) + }) + }) - return it("should dispatch the error from s3.copyObject", function(done) { - this.s3.copyObject.callsArgWith(1, this.error); - return this.AWSSDKPersistorManager.copyFile(this.bucketName, this.key, this.destKey, err => { - expect(err).to.equal(this.error); - return done(); - }); - }); - }); + describe('deleteFile', function() { + it('should delete the file with s3.deleteObject', function(done) { + this.s3.deleteObject.callsArgWith(1) + return this.AWSSDKPersistorManager.deleteFile( + this.bucketName, + this.key, + err => { + expect(err).to.not.be.ok + expect(this.s3.deleteObject.calledOnce, 'called only once').to.be.true + expect( + this.s3.deleteObject.calledWith({ + Bucket: this.bucketName, + Key: this.key + }), + 'called with correct arguments' + ).to.be.true + return done() + } + ) + }) - describe("deleteFile", function() { - it("should delete the file with s3.deleteObject", function(done) { - this.s3.deleteObject.callsArgWith(1); - return this.AWSSDKPersistorManager.deleteFile(this.bucketName, this.key, err => { - expect(err).to.not.be.ok; - expect(this.s3.deleteObject.calledOnce, "called only once").to.be.true; - expect((this.s3.deleteObject.calledWith({Bucket: this.bucketName, Key: this.key})), - "called with correct arguments").to.be.true; - return done(); - }); - }); + return it('should dispatch the error from s3.deleteObject', function(done) { + this.s3.deleteObject.callsArgWith(1, this.error) + return this.AWSSDKPersistorManager.deleteFile( + this.bucketName, + this.key, + err => { + expect(err).to.equal(this.error) + return done() + } + ) + }) + }) - return it("should dispatch the error from s3.deleteObject", function(done) { - this.s3.deleteObject.callsArgWith(1, this.error); - return this.AWSSDKPersistorManager.deleteFile(this.bucketName, this.key, err => { - expect(err).to.equal(this.error); - return done(); - }); - }); - }); + describe('deleteDirectory', function() { + it('should list the directory content using s3.listObjects', function(done) { + this.s3.listObjects.callsArgWith(1, null, { Contents: [] }) + return this.AWSSDKPersistorManager.deleteDirectory( + this.bucketName, + this.key, + err => { + expect(err).to.not.be.ok + expect(this.s3.listObjects.calledOnce, 'called only once').to.be.true + expect( + this.s3.listObjects.calledWith({ + Bucket: this.bucketName, + Prefix: this.key + }), + 'called with correct arguments' + ).to.be.true + return done() + } + ) + }) - describe("deleteDirectory", function() { + it('should dispatch the error from s3.listObjects', function(done) { + this.s3.listObjects.callsArgWith(1, this.error) + return this.AWSSDKPersistorManager.deleteDirectory( + this.bucketName, + this.key, + err => { + expect(err).to.equal(this.error) + return done() + } + ) + }) - it("should list the directory content using s3.listObjects", function(done) { - this.s3.listObjects.callsArgWith(1, null, {Contents: []}); - return this.AWSSDKPersistorManager.deleteDirectory(this.bucketName, this.key, err => { - expect(err).to.not.be.ok; - expect(this.s3.listObjects.calledOnce, "called only once").to.be.true; - expect((this.s3.listObjects.calledWith({Bucket: this.bucketName, Prefix: this.key})), - "called with correct arguments").to.be.true; - return done(); - }); - }); + return describe('with directory content', function() { + beforeEach(function() { + return (this.fileList = [{ Key: 'foo' }, { Key: 'bar', Key: 'baz' }]) + }) - it("should dispatch the error from s3.listObjects", function(done) { - this.s3.listObjects.callsArgWith(1, this.error); - return this.AWSSDKPersistorManager.deleteDirectory(this.bucketName, this.key, err => { - expect(err).to.equal(this.error); - return done(); - }); - }); + it('should forward the file keys to s3.deleteObjects', function(done) { + this.s3.listObjects.callsArgWith(1, null, { Contents: this.fileList }) + this.s3.deleteObjects.callsArgWith(1) + return this.AWSSDKPersistorManager.deleteDirectory( + this.bucketName, + this.key, + err => { + expect(err).to.not.be.ok + expect(this.s3.deleteObjects.calledOnce, 'called only once').to.be + .true + expect( + this.s3.deleteObjects.calledWith({ + Bucket: this.bucketName, + Delete: { + Quiet: true, + Objects: this.fileList + } + }), + 'called with correct arguments' + ).to.be.true + return done() + } + ) + }) - return describe("with directory content", function() { - beforeEach(function() { - return this.fileList = [ - {Key: 'foo'} - , { Key: 'bar' - , Key: 'baz' - } - ];}); + return it('should dispatch the error from s3.deleteObjects', function(done) { + this.s3.listObjects.callsArgWith(1, null, { Contents: this.fileList }) + this.s3.deleteObjects.callsArgWith(1, this.error) + return this.AWSSDKPersistorManager.deleteDirectory( + this.bucketName, + this.key, + err => { + expect(err).to.equal(this.error) + return done() + } + ) + }) + }) + }) - it("should forward the file keys to s3.deleteObjects", function(done) { - this.s3.listObjects.callsArgWith(1, null, {Contents: this.fileList}); - this.s3.deleteObjects.callsArgWith(1); - return this.AWSSDKPersistorManager.deleteDirectory(this.bucketName, this.key, err => { - expect(err).to.not.be.ok; - expect(this.s3.deleteObjects.calledOnce, "called only once").to.be.true; - expect((this.s3.deleteObjects.calledWith({ - Bucket: this.bucketName, - Delete: { - Quiet: true, - Objects: this.fileList - }})), - "called with correct arguments").to.be.true; - return done(); - }); - }); + describe('checkIfFileExists', function() { + it('should check for the file with s3.headObject', function(done) { + this.s3.headObject.callsArgWith(1, null, {}) + return this.AWSSDKPersistorManager.checkIfFileExists( + this.bucketName, + this.key, + (err, exists) => { + expect(err).to.not.be.ok + expect(this.s3.headObject.calledOnce, 'called only once').to.be.true + expect( + this.s3.headObject.calledWith({ + Bucket: this.bucketName, + Key: this.key + }), + 'called with correct arguments' + ).to.be.true + return done() + } + ) + }) - return it("should dispatch the error from s3.deleteObjects", function(done) { - this.s3.listObjects.callsArgWith(1, null, {Contents: this.fileList}); - this.s3.deleteObjects.callsArgWith(1, this.error); - return this.AWSSDKPersistorManager.deleteDirectory(this.bucketName, this.key, err => { - expect(err).to.equal(this.error); - return done(); - }); - }); - }); - }); + it('should return false on an inexistant file', function(done) { + this.s3.headObject.callsArgWith(1, null, {}) + return this.AWSSDKPersistorManager.checkIfFileExists( + this.bucketName, + this.key, + (err, exists) => { + expect(exists).to.be.false + return done() + } + ) + }) + it('should return true on an existing file', function(done) { + this.s3.headObject.callsArgWith(1, null, { ETag: 'etag' }) + return this.AWSSDKPersistorManager.checkIfFileExists( + this.bucketName, + this.key, + (err, exists) => { + expect(exists).to.be.true + return done() + } + ) + }) - describe("checkIfFileExists", function() { + return it('should dispatch the error from s3.headObject', function(done) { + this.s3.headObject.callsArgWith(1, this.error) + return this.AWSSDKPersistorManager.checkIfFileExists( + this.bucketName, + this.key, + (err, exists) => { + expect(err).to.equal(this.error) + return done() + } + ) + }) + }) - it("should check for the file with s3.headObject", function(done) { - this.s3.headObject.callsArgWith(1, null, {}); - return this.AWSSDKPersistorManager.checkIfFileExists(this.bucketName, this.key, (err, exists) => { - expect(err).to.not.be.ok; - expect(this.s3.headObject.calledOnce, "called only once").to.be.true; - expect((this.s3.headObject.calledWith({Bucket: this.bucketName, Key: this.key})), - "called with correct arguments").to.be.true; - return done(); - }); - }); + return describe('directorySize', function() { + it('should list the directory content using s3.listObjects', function(done) { + this.s3.listObjects.callsArgWith(1, null, { Contents: [] }) + return this.AWSSDKPersistorManager.directorySize( + this.bucketName, + this.key, + err => { + expect(err).to.not.be.ok + expect(this.s3.listObjects.calledOnce, 'called only once').to.be.true + expect( + this.s3.listObjects.calledWith({ + Bucket: this.bucketName, + Prefix: this.key + }), + 'called with correct arguments' + ).to.be.true + return done() + } + ) + }) - it("should return false on an inexistant file", function(done) { - this.s3.headObject.callsArgWith(1, null, {}); - return this.AWSSDKPersistorManager.checkIfFileExists(this.bucketName, this.key, (err, exists) => { - expect(exists).to.be.false; - return done(); - }); - }); + it('should dispatch the error from s3.listObjects', function(done) { + this.s3.listObjects.callsArgWith(1, this.error) + return this.AWSSDKPersistorManager.directorySize( + this.bucketName, + this.key, + err => { + expect(err).to.equal(this.error) + return done() + } + ) + }) - it("should return true on an existing file", function(done) { - this.s3.headObject.callsArgWith(1, null, {ETag: "etag"}); - return this.AWSSDKPersistorManager.checkIfFileExists(this.bucketName, this.key, (err, exists) => { - expect(exists).to.be.true; - return done(); - }); - }); - - return it("should dispatch the error from s3.headObject", function(done) { - this.s3.headObject.callsArgWith(1, this.error); - return this.AWSSDKPersistorManager.checkIfFileExists(this.bucketName, this.key, (err, exists) => { - expect(err).to.equal(this.error); - return done(); - }); - }); - }); - - return describe("directorySize", function() { - - it("should list the directory content using s3.listObjects", function(done) { - this.s3.listObjects.callsArgWith(1, null, {Contents: []}); - return this.AWSSDKPersistorManager.directorySize(this.bucketName, this.key, err => { - expect(err).to.not.be.ok; - expect(this.s3.listObjects.calledOnce, "called only once").to.be.true; - expect((this.s3.listObjects.calledWith({Bucket: this.bucketName, Prefix: this.key})), - "called with correct arguments").to.be.true; - return done(); - }); - }); - - it("should dispatch the error from s3.listObjects", function(done) { - this.s3.listObjects.callsArgWith(1, this.error); - return this.AWSSDKPersistorManager.directorySize(this.bucketName, this.key, err => { - expect(err).to.equal(this.error); - return done(); - }); - }); - - return it("should sum directory files sizes", function(done) { - this.s3.listObjects.callsArgWith(1, null, {Contents: [ { Size: 1024 }, { Size: 2048 }]}); - return this.AWSSDKPersistorManager.directorySize(this.bucketName, this.key, (err, size) => { - expect(size).to.equal(3072); - return done(); - }); - }); - }); -}); + return it('should sum directory files sizes', function(done) { + this.s3.listObjects.callsArgWith(1, null, { + Contents: [{ Size: 1024 }, { Size: 2048 }] + }) + return this.AWSSDKPersistorManager.directorySize( + this.bucketName, + this.key, + (err, size) => { + expect(size).to.equal(3072) + return done() + } + ) + }) + }) +}) diff --git a/services/filestore/test/unit/js/BucketControllerTests.js b/services/filestore/test/unit/js/BucketControllerTests.js index 86db19da1b..ef74b3f6c0 100644 --- a/services/filestore/test/unit/js/BucketControllerTests.js +++ b/services/filestore/test/unit/js/BucketControllerTests.js @@ -9,98 +9,92 @@ * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const { - assert -} = require("chai"); -const sinon = require('sinon'); -const chai = require('chai'); -const should = chai.should(); -const { - expect -} = chai; -const modulePath = "../../../app/js/BucketController.js"; -const SandboxedModule = require('sandboxed-module'); +const { assert } = require('chai') +const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() +const { expect } = chai +const modulePath = '../../../app/js/BucketController.js' +const SandboxedModule = require('sandboxed-module') -describe("BucketController", function() { +describe('BucketController', function() { + beforeEach(function() { + this.PersistorManager = { + sendStream: sinon.stub(), + copyFile: sinon.stub(), + deleteFile: sinon.stub() + } - beforeEach(function() { - this.PersistorManager = { - sendStream: sinon.stub(), - copyFile: sinon.stub(), - deleteFile:sinon.stub() - }; + this.settings = { + s3: { + buckets: { + user_files: 'user_files' + } + }, + filestore: { + backend: 's3', + s3: { + secret: 'secret', + key: 'this_key' + } + } + } - this.settings = { - s3: { - buckets: { - user_files:"user_files" - } - }, - filestore: { - backend: "s3", - s3: { - secret: "secret", - key: "this_key" - } - } - }; + this.FileHandler = { + getFile: sinon.stub(), + deleteFile: sinon.stub(), + insertFile: sinon.stub(), + getDirectorySize: sinon.stub() + } + this.LocalFileWriter = {} + this.controller = SandboxedModule.require(modulePath, { + requires: { + './LocalFileWriter': this.LocalFileWriter, + './FileHandler': this.FileHandler, + './PersistorManager': this.PersistorManager, + 'settings-sharelatex': this.settings, + 'metrics-sharelatex': { + inc() {} + }, + 'logger-sharelatex': { + log() {}, + err() {} + } + } + }) + this.project_id = 'project_id' + this.file_id = 'file_id' + this.bucket = 'user_files' + this.key = `${this.project_id}/${this.file_id}` + this.req = { + query: {}, + params: { + bucket: this.bucket, + 0: this.key + }, + headers: {} + } + this.res = { setHeader() {} } + return (this.fileStream = {}) + }) - this.FileHandler = { - getFile: sinon.stub(), - deleteFile: sinon.stub(), - insertFile: sinon.stub(), - getDirectorySize: sinon.stub() - }; - this.LocalFileWriter = {}; - this.controller = SandboxedModule.require(modulePath, { requires: { - "./LocalFileWriter":this.LocalFileWriter, - "./FileHandler": this.FileHandler, - "./PersistorManager":this.PersistorManager, - "settings-sharelatex": this.settings, - "metrics-sharelatex": { - inc() {} - }, - "logger-sharelatex": { - log() {}, - err() {} - } - } - } - ); - this.project_id = "project_id"; - this.file_id = "file_id"; - this.bucket = "user_files"; - this.key = `${this.project_id}/${this.file_id}`; - this.req = { - query:{}, - params: { - bucket: this.bucket, - 0: this.key - }, - headers: {} - }; - this.res = - {setHeader() {}}; - return this.fileStream = {};}); + return describe('getFile', function() { + it('should pipe the stream', function(done) { + this.FileHandler.getFile.callsArgWith(3, null, this.fileStream) + this.fileStream.pipe = res => { + res.should.equal(this.res) + return done() + } + return this.controller.getFile(this.req, this.res) + }) - return describe("getFile", function() { - - it("should pipe the stream", function(done){ - this.FileHandler.getFile.callsArgWith(3, null, this.fileStream); - this.fileStream.pipe = res=> { - res.should.equal(this.res); - return done(); - }; - return this.controller.getFile(this.req, this.res); - }); - - return it("should send a 500 if there is a problem", function(done){ - this.FileHandler.getFile.callsArgWith(3, "error"); - this.res.send = code=> { - code.should.equal(500); - return done(); - }; - return this.controller.getFile(this.req, this.res); - }); - }); -}); + return it('should send a 500 if there is a problem', function(done) { + this.FileHandler.getFile.callsArgWith(3, 'error') + this.res.send = code => { + code.should.equal(500) + return done() + } + return this.controller.getFile(this.req, this.res) + }) + }) +}) diff --git a/services/filestore/test/unit/js/FSPersistorManagerTests.js b/services/filestore/test/unit/js/FSPersistorManagerTests.js index b3c2842b46..9e9018c17b 100644 --- a/services/filestore/test/unit/js/FSPersistorManagerTests.js +++ b/services/filestore/test/unit/js/FSPersistorManagerTests.js @@ -11,369 +11,492 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const { - assert -} = require("chai"); -const sinon = require('sinon'); -const chai = require('chai'); -const { - should -} = chai; -const { - expect -} = chai; -const modulePath = "../../../app/js/FSPersistorManager.js"; -const SandboxedModule = require('sandboxed-module'); -const fs = require("fs"); -const response = require("response"); - -describe("FSPersistorManagerTests", function() { +const { assert } = require('chai') +const sinon = require('sinon') +const chai = require('chai') +const { should } = chai +const { expect } = chai +const modulePath = '../../../app/js/FSPersistorManager.js' +const SandboxedModule = require('sandboxed-module') +const fs = require('fs') +const response = require('response') +describe('FSPersistorManagerTests', function() { beforeEach(function() { this.Fs = { - rename:sinon.stub(), - createReadStream:sinon.stub(), - createWriteStream:sinon.stub(), - unlink:sinon.stub(), - rmdir:sinon.stub(), - exists:sinon.stub(), - readdir:sinon.stub(), - open:sinon.stub(), - openSync:sinon.stub(), - fstatSync:sinon.stub(), - closeSync:sinon.stub(), - stat:sinon.stub() - }; - this.Rimraf = sinon.stub(); + rename: sinon.stub(), + createReadStream: sinon.stub(), + createWriteStream: sinon.stub(), + unlink: sinon.stub(), + rmdir: sinon.stub(), + exists: sinon.stub(), + readdir: sinon.stub(), + open: sinon.stub(), + openSync: sinon.stub(), + fstatSync: sinon.stub(), + closeSync: sinon.stub(), + stat: sinon.stub() + } + this.Rimraf = sinon.stub() this.LocalFileWriter = { writeStream: sinon.stub(), deleteFile: sinon.stub() - }; + } this.requires = { - "./LocalFileWriter":this.LocalFileWriter, - "fs":this.Fs, - "logger-sharelatex": { + './LocalFileWriter': this.LocalFileWriter, + fs: this.Fs, + 'logger-sharelatex': { log() {}, err() {} }, - "response":response, - "rimraf":this.Rimraf, - "./Errors": (this.Errors = - {NotFoundError: sinon.stub()}) - }; - this.location = "/tmp"; - this.name1 = "530f2407e7ef165704000007/530f838b46d9a9e859000008"; - this.name1Filtered ="530f2407e7ef165704000007_530f838b46d9a9e859000008"; - this.name2 = "second_file"; - this.error = "error_message"; - return this.FSPersistorManager = SandboxedModule.require(modulePath, {requires: this.requires}); - }); + response: response, + rimraf: this.Rimraf, + './Errors': (this.Errors = { NotFoundError: sinon.stub() }) + } + this.location = '/tmp' + this.name1 = '530f2407e7ef165704000007/530f838b46d9a9e859000008' + this.name1Filtered = '530f2407e7ef165704000007_530f838b46d9a9e859000008' + this.name2 = 'second_file' + this.error = 'error_message' + return (this.FSPersistorManager = SandboxedModule.require(modulePath, { + requires: this.requires + })) + }) - describe("sendFile", function() { + describe('sendFile', function() { beforeEach(function() { - return this.Fs.createReadStream = sinon.stub().returns({ + return (this.Fs.createReadStream = sinon.stub().returns({ on() {}, pipe() {} - }); - }); + })) + }) - it("should copy the file", function(done) { - this.Fs.createWriteStream =sinon.stub().returns({ + it('should copy the file', function(done) { + this.Fs.createWriteStream = sinon.stub().returns({ on(event, handler) { - if (event === 'finish') { return process.nextTick(handler); } + if (event === 'finish') { + return process.nextTick(handler) + } } - }); - return this.FSPersistorManager.sendFile(this.location, this.name1, this.name2, err=> { - this.Fs.createReadStream.calledWith(this.name2).should.equal(true); - this.Fs.createWriteStream.calledWith(`${this.location}/${this.name1Filtered}` ).should.equal(true); - return done(); - }); - }); + }) + return this.FSPersistorManager.sendFile( + this.location, + this.name1, + this.name2, + err => { + this.Fs.createReadStream.calledWith(this.name2).should.equal(true) + this.Fs.createWriteStream + .calledWith(`${this.location}/${this.name1Filtered}`) + .should.equal(true) + return done() + } + ) + }) - return it("should return an error if the file cannot be stored", function(done) { - this.Fs.createWriteStream =sinon.stub().returns({ + return it('should return an error if the file cannot be stored', function(done) { + this.Fs.createWriteStream = sinon.stub().returns({ on: (event, handler) => { - if (event === 'error') { - return process.nextTick(() => { - return handler(this.error); - }); + if (event === 'error') { + return process.nextTick(() => { + return handler(this.error) + }) + } } - } - }); - return this.FSPersistorManager.sendFile(this.location, this.name1, this.name2, err=> { - this.Fs.createReadStream.calledWith(this.name2).should.equal(true); - this.Fs.createWriteStream.calledWith(`${this.location}/${this.name1Filtered}` ).should.equal(true); - err.should.equal(this.error); - return done(); - }); - }); - }); + }) + return this.FSPersistorManager.sendFile( + this.location, + this.name1, + this.name2, + err => { + this.Fs.createReadStream.calledWith(this.name2).should.equal(true) + this.Fs.createWriteStream + .calledWith(`${this.location}/${this.name1Filtered}`) + .should.equal(true) + err.should.equal(this.error) + return done() + } + ) + }) + }) - describe("sendStream", function() { + describe('sendStream', function() { beforeEach(function() { - this.FSPersistorManager.sendFile = sinon.stub().callsArgWith(3); - this.LocalFileWriter.writeStream.callsArgWith(2, null, this.name1); - this.LocalFileWriter.deleteFile.callsArg(1); - return this.SourceStream = - {on() {}}; - }); + this.FSPersistorManager.sendFile = sinon.stub().callsArgWith(3) + this.LocalFileWriter.writeStream.callsArgWith(2, null, this.name1) + this.LocalFileWriter.deleteFile.callsArg(1) + return (this.SourceStream = { on() {} }) + }) - it("should sent stream to LocalFileWriter", function(done){ - return this.FSPersistorManager.sendStream(this.location, this.name1, this.SourceStream, () => { - this.LocalFileWriter.writeStream.calledWith(this.SourceStream).should.equal(true); - return done(); - }); - }); + it('should sent stream to LocalFileWriter', function(done) { + return this.FSPersistorManager.sendStream( + this.location, + this.name1, + this.SourceStream, + () => { + this.LocalFileWriter.writeStream + .calledWith(this.SourceStream) + .should.equal(true) + return done() + } + ) + }) - it("should return the error from LocalFileWriter", function(done){ - this.LocalFileWriter.writeStream.callsArgWith(2, this.error); - return this.FSPersistorManager.sendStream(this.location, this.name1, this.SourceStream, err=> { - err.should.equal(this.error); - return done(); - }); - }); + it('should return the error from LocalFileWriter', function(done) { + this.LocalFileWriter.writeStream.callsArgWith(2, this.error) + return this.FSPersistorManager.sendStream( + this.location, + this.name1, + this.SourceStream, + err => { + err.should.equal(this.error) + return done() + } + ) + }) - return it("should send the file to the filestore", function(done){ - this.LocalFileWriter.writeStream.callsArgWith(2); - return this.FSPersistorManager.sendStream(this.location, this.name1, this.SourceStream, err=> { - this.FSPersistorManager.sendFile.called.should.equal(true); - return done(); - }); - }); - }); + return it('should send the file to the filestore', function(done) { + this.LocalFileWriter.writeStream.callsArgWith(2) + return this.FSPersistorManager.sendStream( + this.location, + this.name1, + this.SourceStream, + err => { + this.FSPersistorManager.sendFile.called.should.equal(true) + return done() + } + ) + }) + }) - describe("getFileStream", function() { + describe('getFileStream', function() { beforeEach(function() { - return this.opts = {};}); + return (this.opts = {}) + }) - it("should use correct file location", function(done) { - this.FSPersistorManager.getFileStream(this.location, this.name1, this.opts, (err,res) => {}); - this.Fs.open.calledWith(`${this.location}/${this.name1Filtered}`).should.equal(true); - return done(); - }); - - describe("with start and end options", function() { + it('should use correct file location', function(done) { + this.FSPersistorManager.getFileStream( + this.location, + this.name1, + this.opts, + (err, res) => {} + ) + this.Fs.open + .calledWith(`${this.location}/${this.name1Filtered}`) + .should.equal(true) + return done() + }) + describe('with start and end options', function() { beforeEach(function() { - this.fd = 2019; - this.opts_in = {start: 0, end: 8}; - this.opts = {start: 0, end: 8, fd: this.fd}; - return this.Fs.open.callsArgWith(2, null, this.fd); - }); + this.fd = 2019 + this.opts_in = { start: 0, end: 8 } + this.opts = { start: 0, end: 8, fd: this.fd } + return this.Fs.open.callsArgWith(2, null, this.fd) + }) return it('should pass the options to createReadStream', function(done) { - this.FSPersistorManager.getFileStream(this.location, this.name1, this.opts_in, (err,res)=> {}); - this.Fs.createReadStream.calledWith(null, this.opts).should.equal(true); - return done(); - }); - }); - - return describe("error conditions", function() { - - describe("when the file does not exist", function() { + this.FSPersistorManager.getFileStream( + this.location, + this.name1, + this.opts_in, + (err, res) => {} + ) + this.Fs.createReadStream.calledWith(null, this.opts).should.equal(true) + return done() + }) + }) + return describe('error conditions', function() { + describe('when the file does not exist', function() { beforeEach(function() { - this.fakeCode = 'ENOENT'; - const err = new Error(); - err.code = this.fakeCode; - return this.Fs.open.callsArgWith(2, err, null); - }); + this.fakeCode = 'ENOENT' + const err = new Error() + err.code = this.fakeCode + return this.Fs.open.callsArgWith(2, err, null) + }) - return it("should give a NotFoundError", function(done) { - return this.FSPersistorManager.getFileStream(this.location, this.name1, this.opts, (err,res)=> { - expect(res).to.equal(null); - expect(err).to.not.equal(null); - expect(err instanceof this.Errors.NotFoundError).to.equal(true); - return done(); - }); - }); - }); - - return describe("when some other error happens", function() { + return it('should give a NotFoundError', function(done) { + return this.FSPersistorManager.getFileStream( + this.location, + this.name1, + this.opts, + (err, res) => { + expect(res).to.equal(null) + expect(err).to.not.equal(null) + expect(err instanceof this.Errors.NotFoundError).to.equal(true) + return done() + } + ) + }) + }) + return describe('when some other error happens', function() { beforeEach(function() { - this.fakeCode = 'SOMETHINGHORRIBLE'; - const err = new Error(); - err.code = this.fakeCode; - return this.Fs.open.callsArgWith(2, err, null); - }); + this.fakeCode = 'SOMETHINGHORRIBLE' + const err = new Error() + err.code = this.fakeCode + return this.Fs.open.callsArgWith(2, err, null) + }) - return it("should give an Error", function(done) { - return this.FSPersistorManager.getFileStream(this.location, this.name1, this.opts, (err,res)=> { - expect(res).to.equal(null); - expect(err).to.not.equal(null); - expect(err instanceof Error).to.equal(true); - return done(); - }); - }); - }); - }); - }); + return it('should give an Error', function(done) { + return this.FSPersistorManager.getFileStream( + this.location, + this.name1, + this.opts, + (err, res) => { + expect(res).to.equal(null) + expect(err).to.not.equal(null) + expect(err instanceof Error).to.equal(true) + return done() + } + ) + }) + }) + }) + }) - describe("getFileSize", function() { - it("should return the file size", function(done) { - const expectedFileSize = 75382; - this.Fs.stat.yields(new Error("fs.stat got unexpected arguments")); - this.Fs.stat.withArgs(`${this.location}/${this.name1Filtered}`) - .yields(null, { size: expectedFileSize }); + describe('getFileSize', function() { + it('should return the file size', function(done) { + const expectedFileSize = 75382 + this.Fs.stat.yields(new Error('fs.stat got unexpected arguments')) + this.Fs.stat + .withArgs(`${this.location}/${this.name1Filtered}`) + .yields(null, { size: expectedFileSize }) - return this.FSPersistorManager.getFileSize(this.location, this.name1, (err, fileSize) => { - if (err != null) { - return done(err); + return this.FSPersistorManager.getFileSize( + this.location, + this.name1, + (err, fileSize) => { + if (err != null) { + return done(err) + } + expect(fileSize).to.equal(expectedFileSize) + return done() } - expect(fileSize).to.equal(expectedFileSize); - return done(); - }); - }); + ) + }) - it("should throw a NotFoundError if the file does not exist", function(done) { - const error = new Error(); - error.code = "ENOENT"; - this.Fs.stat.yields(error); + it('should throw a NotFoundError if the file does not exist', function(done) { + const error = new Error() + error.code = 'ENOENT' + this.Fs.stat.yields(error) - return this.FSPersistorManager.getFileSize(this.location, this.name1, (err, fileSize) => { - expect(err).to.be.instanceof(this.Errors.NotFoundError); - return done(); - }); - }); + return this.FSPersistorManager.getFileSize( + this.location, + this.name1, + (err, fileSize) => { + expect(err).to.be.instanceof(this.Errors.NotFoundError) + return done() + } + ) + }) - return it("should rethrow any other error", function(done) { - const error = new Error(); - this.Fs.stat.yields(error); + return it('should rethrow any other error', function(done) { + const error = new Error() + this.Fs.stat.yields(error) - return this.FSPersistorManager.getFileSize(this.location, this.name1, (err, fileSize) => { - expect(err).to.equal(error); - return done(); - }); - }); - }); + return this.FSPersistorManager.getFileSize( + this.location, + this.name1, + (err, fileSize) => { + expect(err).to.equal(error) + return done() + } + ) + }) + }) - describe("copyFile", function() { + describe('copyFile', function() { beforeEach(function() { - this.ReadStream= { + this.ReadStream = { on() {}, - pipe:sinon.stub() - }; - this.WriteStream= - {on() {}}; - this.Fs.createReadStream.returns(this.ReadStream); - return this.Fs.createWriteStream.returns(this.WriteStream); - }); + pipe: sinon.stub() + } + this.WriteStream = { on() {} } + this.Fs.createReadStream.returns(this.ReadStream) + return this.Fs.createWriteStream.returns(this.WriteStream) + }) - it("Should open the source for reading", function(done) { - this.FSPersistorManager.copyFile(this.location, this.name1, this.name2, function() {}); - this.Fs.createReadStream.calledWith(`${this.location}/${this.name1Filtered}`).should.equal(true); - return done(); - }); + it('Should open the source for reading', function(done) { + this.FSPersistorManager.copyFile( + this.location, + this.name1, + this.name2, + function() {} + ) + this.Fs.createReadStream + .calledWith(`${this.location}/${this.name1Filtered}`) + .should.equal(true) + return done() + }) - it("Should open the target for writing", function(done) { - this.FSPersistorManager.copyFile(this.location, this.name1, this.name2, function() {}); - this.Fs.createWriteStream.calledWith(`${this.location}/${this.name2}`).should.equal(true); - return done(); - }); + it('Should open the target for writing', function(done) { + this.FSPersistorManager.copyFile( + this.location, + this.name1, + this.name2, + function() {} + ) + this.Fs.createWriteStream + .calledWith(`${this.location}/${this.name2}`) + .should.equal(true) + return done() + }) - return it("Should pipe the source to the target", function(done) { - this.FSPersistorManager.copyFile(this.location, this.name1, this.name2, function() {}); - this.ReadStream.pipe.calledWith(this.WriteStream).should.equal(true); - return done(); - }); - }); + return it('Should pipe the source to the target', function(done) { + this.FSPersistorManager.copyFile( + this.location, + this.name1, + this.name2, + function() {} + ) + this.ReadStream.pipe.calledWith(this.WriteStream).should.equal(true) + return done() + }) + }) - describe("deleteFile", function() { + describe('deleteFile', function() { beforeEach(function() { - return this.Fs.unlink.callsArgWith(1,this.error); - }); + return this.Fs.unlink.callsArgWith(1, this.error) + }) - it("Should call unlink with correct options", function(done) { - return this.FSPersistorManager.deleteFile(this.location, this.name1, err => { - this.Fs.unlink.calledWith(`${this.location}/${this.name1Filtered}`).should.equal(true); - return done(); - }); - }); + it('Should call unlink with correct options', function(done) { + return this.FSPersistorManager.deleteFile( + this.location, + this.name1, + err => { + this.Fs.unlink + .calledWith(`${this.location}/${this.name1Filtered}`) + .should.equal(true) + return done() + } + ) + }) - return it("Should propogate the error", function(done) { - return this.FSPersistorManager.deleteFile(this.location, this.name1, err => { - err.should.equal(this.error); - return done(); - }); - }); - }); + return it('Should propogate the error', function(done) { + return this.FSPersistorManager.deleteFile( + this.location, + this.name1, + err => { + err.should.equal(this.error) + return done() + } + ) + }) + }) - - describe("deleteDirectory", function() { + describe('deleteDirectory', function() { beforeEach(function() { - return this.Rimraf.callsArgWith(1,this.error); - }); + return this.Rimraf.callsArgWith(1, this.error) + }) - it("Should call rmdir(rimraf) with correct options", function(done) { - return this.FSPersistorManager.deleteDirectory(this.location, this.name1, err => { - this.Rimraf.calledWith(`${this.location}/${this.name1Filtered}`).should.equal(true); - return done(); - }); - }); + it('Should call rmdir(rimraf) with correct options', function(done) { + return this.FSPersistorManager.deleteDirectory( + this.location, + this.name1, + err => { + this.Rimraf.calledWith( + `${this.location}/${this.name1Filtered}` + ).should.equal(true) + return done() + } + ) + }) - return it("Should propogate the error", function(done) { - return this.FSPersistorManager.deleteDirectory(this.location, this.name1, err => { - err.should.equal(this.error); - return done(); - }); - }); - }); + return it('Should propogate the error', function(done) { + return this.FSPersistorManager.deleteDirectory( + this.location, + this.name1, + err => { + err.should.equal(this.error) + return done() + } + ) + }) + }) - describe("checkIfFileExists", function() { + describe('checkIfFileExists', function() { beforeEach(function() { - return this.Fs.exists.callsArgWith(1,true); - }); + return this.Fs.exists.callsArgWith(1, true) + }) - it("Should call exists with correct options", function(done) { - return this.FSPersistorManager.checkIfFileExists(this.location, this.name1, exists => { - this.Fs.exists.calledWith(`${this.location}/${this.name1Filtered}`).should.equal(true); - return done(); - }); - }); + it('Should call exists with correct options', function(done) { + return this.FSPersistorManager.checkIfFileExists( + this.location, + this.name1, + exists => { + this.Fs.exists + .calledWith(`${this.location}/${this.name1Filtered}`) + .should.equal(true) + return done() + } + ) + }) // fs.exists simply returns false on any error, so... - it("should not return an error", function(done) { - return this.FSPersistorManager.checkIfFileExists(this.location, this.name1, (err,exists) => { - expect(err).to.be.null; - return done(); - }); - }); + it('should not return an error', function(done) { + return this.FSPersistorManager.checkIfFileExists( + this.location, + this.name1, + (err, exists) => { + expect(err).to.be.null + return done() + } + ) + }) - it("Should return true for existing files", function(done) { - this.Fs.exists.callsArgWith(1,true); - return this.FSPersistorManager.checkIfFileExists(this.location, this.name1, (err,exists) => { - exists.should.be.true; - return done(); - }); - }); + it('Should return true for existing files', function(done) { + this.Fs.exists.callsArgWith(1, true) + return this.FSPersistorManager.checkIfFileExists( + this.location, + this.name1, + (err, exists) => { + exists.should.be.true + return done() + } + ) + }) - return it("Should return false for non-existing files", function(done) { - this.Fs.exists.callsArgWith(1,false); - return this.FSPersistorManager.checkIfFileExists(this.location, this.name1, (err,exists) => { - exists.should.be.false; - return done(); - }); - }); - }); + return it('Should return false for non-existing files', function(done) { + this.Fs.exists.callsArgWith(1, false) + return this.FSPersistorManager.checkIfFileExists( + this.location, + this.name1, + (err, exists) => { + exists.should.be.false + return done() + } + ) + }) + }) - return describe("directorySize", function() { + return describe('directorySize', function() { + it('should propogate the error', function(done) { + this.Fs.readdir.callsArgWith(1, this.error) + return this.FSPersistorManager.directorySize( + this.location, + this.name1, + (err, totalsize) => { + err.should.equal(this.error) + return done() + } + ) + }) - it("should propogate the error", function(done) { - this.Fs.readdir.callsArgWith(1, this.error); - return this.FSPersistorManager.directorySize(this.location, this.name1, (err, totalsize) => { - err.should.equal(this.error); - return done(); - }); - }); - - return it("should sum directory files size", function(done) { - this.Fs.readdir.callsArgWith(1, null, [ {'file1': 'file1'}, {'file2': 'file2'} ]); - this.Fs.fstatSync.returns({size : 1024}); - return this.FSPersistorManager.directorySize(this.location, this.name1, (err, totalsize) => { - expect(totalsize).to.equal(2048); - return done(); - }); - }); - }); -}); + return it('should sum directory files size', function(done) { + this.Fs.readdir.callsArgWith(1, null, [ + { file1: 'file1' }, + { file2: 'file2' } + ]) + this.Fs.fstatSync.returns({ size: 1024 }) + return this.FSPersistorManager.directorySize( + this.location, + this.name1, + (err, totalsize) => { + expect(totalsize).to.equal(2048) + return done() + } + ) + }) + }) +}) diff --git a/services/filestore/test/unit/js/FileControllerTests.js b/services/filestore/test/unit/js/FileControllerTests.js index d60ca5fbd1..2505e87c9b 100644 --- a/services/filestore/test/unit/js/FileControllerTests.js +++ b/services/filestore/test/unit/js/FileControllerTests.js @@ -9,281 +9,289 @@ * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const { - assert -} = require("chai"); -const sinon = require('sinon'); -const chai = require('chai'); -const should = chai.should(); -const { - expect -} = chai; -const modulePath = "../../../app/js/FileController.js"; -const SandboxedModule = require('sandboxed-module'); +const { assert } = require('chai') +const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() +const { expect } = chai +const modulePath = '../../../app/js/FileController.js' +const SandboxedModule = require('sandboxed-module') -describe("FileController", function() { +describe('FileController', function() { + beforeEach(function() { + this.PersistorManager = { + sendStream: sinon.stub(), + copyFile: sinon.stub(), + deleteFile: sinon.stub() + } - beforeEach(function() { - this.PersistorManager = { - sendStream: sinon.stub(), - copyFile: sinon.stub(), - deleteFile:sinon.stub() - }; + this.settings = { + s3: { + buckets: { + user_files: 'user_files' + } + } + } + this.FileHandler = { + getFile: sinon.stub(), + getFileSize: sinon.stub(), + deleteFile: sinon.stub(), + insertFile: sinon.stub(), + getDirectorySize: sinon.stub() + } + this.LocalFileWriter = {} + this.controller = SandboxedModule.require(modulePath, { + requires: { + './LocalFileWriter': this.LocalFileWriter, + './FileHandler': this.FileHandler, + './PersistorManager': this.PersistorManager, + './Errors': (this.Errors = { NotFoundError: sinon.stub() }), + 'settings-sharelatex': this.settings, + 'metrics-sharelatex': { + inc() {} + }, + 'logger-sharelatex': { + log() {}, + err() {} + } + } + }) + this.project_id = 'project_id' + this.file_id = 'file_id' + this.bucket = 'user_files' + this.key = `${this.project_id}/${this.file_id}` + this.req = { + key: this.key, + bucket: this.bucket, + query: {}, + params: { + project_id: this.project_id, + file_id: this.file_id + }, + headers: {} + } + this.res = { + set: sinon.stub().returnsThis(), + status: sinon.stub().returnsThis() + } + return (this.fileStream = {}) + }) - this.settings = { - s3: { - buckets: { - user_files:"user_files" - } - } - }; - this.FileHandler = { - getFile: sinon.stub(), - getFileSize: sinon.stub(), - deleteFile: sinon.stub(), - insertFile: sinon.stub(), - getDirectorySize: sinon.stub() - }; - this.LocalFileWriter = {}; - this.controller = SandboxedModule.require(modulePath, { requires: { - "./LocalFileWriter":this.LocalFileWriter, - "./FileHandler": this.FileHandler, - "./PersistorManager":this.PersistorManager, - "./Errors": (this.Errors = - {NotFoundError: sinon.stub()}), - "settings-sharelatex": this.settings, - "metrics-sharelatex": { - inc() {} - }, - "logger-sharelatex": { - log() {}, - err() {} - } - } - } - ); - this.project_id = "project_id"; - this.file_id = "file_id"; - this.bucket = "user_files"; - this.key = `${this.project_id}/${this.file_id}`; - this.req = { - key:this.key, - bucket:this.bucket, - query:{}, - params: { - project_id:this.project_id, - file_id:this.file_id - }, - headers: {} - }; - this.res = { - set: sinon.stub().returnsThis(), - status: sinon.stub().returnsThis() - }; - return this.fileStream = {};}); + describe('getFile', function() { + it('should pipe the stream', function(done) { + this.FileHandler.getFile.callsArgWith(3, null, this.fileStream) + this.fileStream.pipe = res => { + res.should.equal(this.res) + return done() + } + return this.controller.getFile(this.req, this.res) + }) - describe("getFile", function() { + it('should send a 200 if the cacheWarm param is true', function(done) { + this.req.query.cacheWarm = true + this.FileHandler.getFile.callsArgWith(3, null, this.fileStream) + this.res.send = statusCode => { + statusCode.should.equal(200) + return done() + } + return this.controller.getFile(this.req, this.res) + }) - it("should pipe the stream", function(done){ - this.FileHandler.getFile.callsArgWith(3, null, this.fileStream); - this.fileStream.pipe = res=> { - res.should.equal(this.res); - return done(); - }; - return this.controller.getFile(this.req, this.res); - }); + it('should send a 500 if there is a problem', function(done) { + this.FileHandler.getFile.callsArgWith(3, 'error') + this.res.send = code => { + code.should.equal(500) + return done() + } + return this.controller.getFile(this.req, this.res) + }) - it("should send a 200 if the cacheWarm param is true", function(done){ - this.req.query.cacheWarm = true; - this.FileHandler.getFile.callsArgWith(3, null, this.fileStream); - this.res.send = statusCode=> { - statusCode.should.equal(200); - return done(); - }; - return this.controller.getFile(this.req, this.res); - }); + return describe("with a 'Range' header set", function() { + beforeEach(function() { + return (this.req.headers.range = 'bytes=0-8') + }) - it("should send a 500 if there is a problem", function(done){ - this.FileHandler.getFile.callsArgWith(3, "error"); - this.res.send = code=> { - code.should.equal(500); - return done(); - }; - return this.controller.getFile(this.req, this.res); - }); + return it("should pass 'start' and 'end' options to FileHandler", function(done) { + this.FileHandler.getFile.callsArgWith(3, null, this.fileStream) + this.fileStream.pipe = res => { + expect(this.FileHandler.getFile.lastCall.args[2].start).to.equal(0) + expect(this.FileHandler.getFile.lastCall.args[2].end).to.equal(8) + return done() + } + return this.controller.getFile(this.req, this.res) + }) + }) + }) - return describe("with a 'Range' header set", function() { + describe('getFileHead', function() { + it('should return the file size in a Content-Length header', function(done) { + const expectedFileSize = 84921 + this.FileHandler.getFileSize.yields( + new Error('FileHandler.getFileSize: unexpected arguments') + ) + this.FileHandler.getFileSize + .withArgs(this.bucket, this.key) + .yields(null, expectedFileSize) - beforeEach(function() { - return this.req.headers.range = 'bytes=0-8'; - }); + this.res.end = () => { + expect(this.res.status.lastCall.args[0]).to.equal(200) + expect( + this.res.set.calledWith('Content-Length', expectedFileSize) + ).to.equal(true) + return done() + } - return it("should pass 'start' and 'end' options to FileHandler", function(done) { - this.FileHandler.getFile.callsArgWith(3, null, this.fileStream); - this.fileStream.pipe = res=> { - expect(this.FileHandler.getFile.lastCall.args[2].start).to.equal(0); - expect(this.FileHandler.getFile.lastCall.args[2].end).to.equal(8); - return done(); - }; - return this.controller.getFile(this.req, this.res); - }); - }); - }); + return this.controller.getFileHead(this.req, this.res) + }) - describe("getFileHead", function() { - it("should return the file size in a Content-Length header", function(done) { - const expectedFileSize = 84921; - this.FileHandler.getFileSize.yields( - new Error("FileHandler.getFileSize: unexpected arguments") - ); - this.FileHandler.getFileSize.withArgs(this.bucket, this.key).yields(null, expectedFileSize); + it('should return a 404 is the file is not found', function(done) { + this.FileHandler.getFileSize.yields(new this.Errors.NotFoundError()) - this.res.end = () => { - expect(this.res.status.lastCall.args[0]).to.equal(200); - expect(this.res.set.calledWith("Content-Length", expectedFileSize)).to.equal(true); - return done(); - }; + this.res.end = () => { + expect(this.res.status.lastCall.args[0]).to.equal(404) + return done() + } - return this.controller.getFileHead(this.req, this.res); - }); + return this.controller.getFileHead(this.req, this.res) + }) - it("should return a 404 is the file is not found", function(done) { - this.FileHandler.getFileSize.yields(new this.Errors.NotFoundError()); + return it('should return a 500 on internal errors', function(done) { + this.FileHandler.getFileSize.yields(new Error()) - this.res.end = () => { - expect(this.res.status.lastCall.args[0]).to.equal(404); - return done(); - }; + this.res.end = () => { + expect(this.res.status.lastCall.args[0]).to.equal(500) + return done() + } - return this.controller.getFileHead(this.req, this.res); - }); + return this.controller.getFileHead(this.req, this.res) + }) + }) - return it("should return a 500 on internal errors", function(done) { - this.FileHandler.getFileSize.yields(new Error()); + describe('insertFile', () => + it('should send bucket name key and res to PersistorManager', function(done) { + this.FileHandler.insertFile.callsArgWith(3) + this.res.send = () => { + this.FileHandler.insertFile + .calledWith(this.bucket, this.key, this.req) + .should.equal(true) + return done() + } + return this.controller.insertFile(this.req, this.res) + })) - this.res.end = () => { - expect(this.res.status.lastCall.args[0]).to.equal(500); - return done(); - }; + describe('copyFile', function() { + beforeEach(function() { + this.oldFile_id = 'old_file_id' + this.oldProject_id = 'old_project_id' + return (this.req.body = { + source: { + project_id: this.oldProject_id, + file_id: this.oldFile_id + } + }) + }) - return this.controller.getFileHead(this.req, this.res); - }); - }); + it('should send bucket name and both keys to PersistorManager', function(done) { + this.PersistorManager.copyFile.callsArgWith(3) + this.res.send = code => { + code.should.equal(200) + this.PersistorManager.copyFile + .calledWith( + this.bucket, + `${this.oldProject_id}/${this.oldFile_id}`, + this.key + ) + .should.equal(true) + return done() + } + return this.controller.copyFile(this.req, this.res) + }) - describe("insertFile", () => it("should send bucket name key and res to PersistorManager", function(done){ - this.FileHandler.insertFile.callsArgWith(3); - this.res.send = () => { - this.FileHandler.insertFile.calledWith(this.bucket, this.key, this.req).should.equal(true); - return done(); - }; - return this.controller.insertFile(this.req, this.res); - })); + it('should send a 404 if the original file was not found', function(done) { + this.PersistorManager.copyFile.callsArgWith( + 3, + new this.Errors.NotFoundError() + ) + this.res.send = code => { + code.should.equal(404) + return done() + } + return this.controller.copyFile(this.req, this.res) + }) - describe("copyFile", function() { - beforeEach(function() { - this.oldFile_id = "old_file_id"; - this.oldProject_id = "old_project_id"; - return this.req.body = { - source: { - project_id: this.oldProject_id, - file_id: this.oldFile_id - } - }; - }); + return it('should send a 500 if there was an error', function(done) { + this.PersistorManager.copyFile.callsArgWith(3, 'error') + this.res.send = code => { + code.should.equal(500) + return done() + } + return this.controller.copyFile(this.req, this.res) + }) + }) - it("should send bucket name and both keys to PersistorManager", function(done){ - this.PersistorManager.copyFile.callsArgWith(3); - this.res.send = code=> { - code.should.equal(200); - this.PersistorManager.copyFile.calledWith(this.bucket, `${this.oldProject_id}/${this.oldFile_id}`, this.key).should.equal(true); - return done(); - }; - return this.controller.copyFile(this.req, this.res); - }); + describe('delete file', function() { + it('should tell the file handler', function(done) { + this.FileHandler.deleteFile.callsArgWith(2) + this.res.send = code => { + code.should.equal(204) + this.FileHandler.deleteFile + .calledWith(this.bucket, this.key) + .should.equal(true) + return done() + } + return this.controller.deleteFile(this.req, this.res) + }) - it("should send a 404 if the original file was not found", function(done) { - this.PersistorManager.copyFile.callsArgWith(3, new this.Errors.NotFoundError()); - this.res.send = code=> { - code.should.equal(404); - return done(); - }; - return this.controller.copyFile(this.req, this.res); - }); + return it('should send a 500 if there was an error', function(done) { + this.FileHandler.deleteFile.callsArgWith(2, 'error') + this.res.send = function(code) { + code.should.equal(500) + return done() + } + return this.controller.deleteFile(this.req, this.res) + }) + }) - return it("should send a 500 if there was an error", function(done){ - this.PersistorManager.copyFile.callsArgWith(3, "error"); - this.res.send = code=> { - code.should.equal(500); - return done(); - }; - return this.controller.copyFile(this.req, this.res); - }); - }); + describe('_get_range', function() { + it('should parse a valid Range header', function(done) { + const result = this.controller._get_range('bytes=0-200') + expect(result).to.not.equal(null) + expect(result.start).to.equal(0) + expect(result.end).to.equal(200) + return done() + }) - describe("delete file", function() { + it('should return null for an invalid Range header', function(done) { + const result = this.controller._get_range('wat') + expect(result).to.equal(null) + return done() + }) - it("should tell the file handler", function(done){ - this.FileHandler.deleteFile.callsArgWith(2); - this.res.send = code=> { - code.should.equal(204); - this.FileHandler.deleteFile.calledWith(this.bucket, this.key).should.equal(true); - return done(); - }; - return this.controller.deleteFile(this.req, this.res); - }); + return it("should return null for any type other than 'bytes'", function(done) { + const result = this.controller._get_range('carrots=0-200') + expect(result).to.equal(null) + return done() + }) + }) - return it("should send a 500 if there was an error", function(done){ - this.FileHandler.deleteFile.callsArgWith(2, "error"); - this.res.send = function(code){ - code.should.equal(500); - return done(); - }; - return this.controller.deleteFile(this.req, this.res); - }); - }); + return describe('directorySize', function() { + it('should return total directory size bytes', function(done) { + this.FileHandler.getDirectorySize.callsArgWith(2, null, 1024) + return this.controller.directorySize(this.req, { + json: result => { + expect(result['total bytes']).to.equal(1024) + return done() + } + }) + }) - describe("_get_range", function() { - - it("should parse a valid Range header", function(done) { - const result = this.controller._get_range('bytes=0-200'); - expect(result).to.not.equal(null); - expect(result.start).to.equal(0); - expect(result.end).to.equal(200); - return done(); - }); - - it("should return null for an invalid Range header", function(done) { - const result = this.controller._get_range('wat'); - expect(result).to.equal(null); - return done(); - }); - - return it("should return null for any type other than 'bytes'", function(done) { - const result = this.controller._get_range('carrots=0-200'); - expect(result).to.equal(null); - return done(); - }); - }); - - return describe("directorySize", function() { - - it("should return total directory size bytes", function(done) { - this.FileHandler.getDirectorySize.callsArgWith(2, null, 1024); - return this.controller.directorySize(this.req, { json:result=> { - expect(result['total bytes']).to.equal(1024); - return done(); - } - } - ); - }); - - return it("should send a 500 if there was an error", function(done){ - this.FileHandler.getDirectorySize.callsArgWith(2, "error"); - this.res.send = function(code){ - code.should.equal(500); - return done(); - }; - return this.controller.directorySize(this.req, this.res); - }); - }); -}); + return it('should send a 500 if there was an error', function(done) { + this.FileHandler.getDirectorySize.callsArgWith(2, 'error') + this.res.send = function(code) { + code.should.equal(500) + return done() + } + return this.controller.directorySize(this.req, this.res) + }) + }) +}) diff --git a/services/filestore/test/unit/js/FileConverterTests.js b/services/filestore/test/unit/js/FileConverterTests.js index 96cb0ee53a..1b548d592e 100644 --- a/services/filestore/test/unit/js/FileConverterTests.js +++ b/services/filestore/test/unit/js/FileConverterTests.js @@ -10,110 +10,110 @@ * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const { - assert -} = require("chai"); -const sinon = require('sinon'); -const chai = require('chai'); -const should = chai.should(); -const { - expect -} = chai; -const modulePath = "../../../app/js/FileConverter.js"; -const SandboxedModule = require('sandboxed-module'); +const { assert } = require('chai') +const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() +const { expect } = chai +const modulePath = '../../../app/js/FileConverter.js' +const SandboxedModule = require('sandboxed-module') -describe("FileConverter", function() { +describe('FileConverter', function() { + beforeEach(function() { + this.safe_exec = sinon.stub() + this.converter = SandboxedModule.require(modulePath, { + requires: { + './SafeExec': this.safe_exec, + 'logger-sharelatex': { + log() {}, + err() {} + }, + 'metrics-sharelatex': { + inc() {}, + Timer() { + return { done() {} } + } + }, + 'settings-sharelatex': (this.Settings = { + commands: { + convertCommandPrefix: [] + } + }) + } + }) - beforeEach(function() { + this.sourcePath = '/this/path/here.eps' + this.format = 'png' + return (this.error = 'Error') + }) - this.safe_exec = sinon.stub(); - this.converter = SandboxedModule.require(modulePath, { requires: { - "./SafeExec": this.safe_exec, - "logger-sharelatex": { - log() {}, - err() {} - }, - "metrics-sharelatex": { - inc() {}, - Timer() { - return {done() {}}; - } - }, - "settings-sharelatex": (this.Settings = { - commands: { - convertCommandPrefix: [] - } - }) - } - }); + describe('convert', function() { + it('should convert the source to the requested format', function(done) { + this.safe_exec.callsArgWith(2) + return this.converter.convert(this.sourcePath, this.format, err => { + const args = this.safe_exec.args[0][0] + args.indexOf(`${this.sourcePath}[0]`).should.not.equal(-1) + args.indexOf(`${this.sourcePath}.${this.format}`).should.not.equal(-1) + return done() + }) + }) - this.sourcePath = "/this/path/here.eps"; - this.format = "png"; - return this.error = "Error"; - }); + it('should return the dest path', function(done) { + this.safe_exec.callsArgWith(2) + return this.converter.convert( + this.sourcePath, + this.format, + (err, destPath) => { + destPath.should.equal(`${this.sourcePath}.${this.format}`) + return done() + } + ) + }) - describe("convert", function() { + it('should return the error from convert', function(done) { + this.safe_exec.callsArgWith(2, this.error) + return this.converter.convert(this.sourcePath, this.format, err => { + err.should.equal(this.error) + return done() + }) + }) - it("should convert the source to the requested format", function(done){ - this.safe_exec.callsArgWith(2); - return this.converter.convert(this.sourcePath, this.format, err=> { - const args = this.safe_exec.args[0][0]; - args.indexOf(`${this.sourcePath}[0]`).should.not.equal(-1); - args.indexOf(`${this.sourcePath}.${this.format}`).should.not.equal(-1); - return done(); - }); - }); + it('should not accapt an non aproved format', function(done) { + this.safe_exec.callsArgWith(2) + return this.converter.convert(this.sourcePath, 'ahhhhh', err => { + expect(err).to.exist + return done() + }) + }) - it("should return the dest path", function(done){ - this.safe_exec.callsArgWith(2); - return this.converter.convert(this.sourcePath, this.format, (err, destPath)=> { - destPath.should.equal(`${this.sourcePath}.${this.format}`); - return done(); - }); - }); + return it('should prefix the command with Settings.commands.convertCommandPrefix', function(done) { + this.safe_exec.callsArgWith(2) + this.Settings.commands.convertCommandPrefix = ['nice'] + return this.converter.convert(this.sourcePath, this.format, err => { + const command = this.safe_exec.args[0][0] + command[0].should.equal('nice') + return done() + }) + }) + }) - it("should return the error from convert", function(done){ - this.safe_exec.callsArgWith(2, this.error); - return this.converter.convert(this.sourcePath, this.format, err=> { - err.should.equal(this.error); - return done(); - }); - }); + describe('thumbnail', () => + it('should call converter resize with args', function(done) { + this.safe_exec.callsArgWith(2) + return this.converter.thumbnail(this.sourcePath, err => { + const args = this.safe_exec.args[0][0] + args.indexOf(`${this.sourcePath}[0]`).should.not.equal(-1) + return done() + }) + })) - it("should not accapt an non aproved format", function(done){ - this.safe_exec.callsArgWith(2); - return this.converter.convert(this.sourcePath, "ahhhhh", err=> { - expect(err).to.exist; - return done(); - }); - }); - - return it("should prefix the command with Settings.commands.convertCommandPrefix", function(done) { - this.safe_exec.callsArgWith(2); - this.Settings.commands.convertCommandPrefix = ["nice"]; - return this.converter.convert(this.sourcePath, this.format, err=> { - const command = this.safe_exec.args[0][0]; - command[0].should.equal("nice"); - return done(); - }); - }); - }); - - describe("thumbnail", () => it("should call converter resize with args", function(done){ - this.safe_exec.callsArgWith(2); - return this.converter.thumbnail(this.sourcePath, err=> { - const args = this.safe_exec.args[0][0]; - args.indexOf(`${this.sourcePath}[0]`).should.not.equal(-1); - return done(); - }); - })); - - return describe("preview", () => it("should call converter resize with args", function(done){ - this.safe_exec.callsArgWith(2); - return this.converter.preview(this.sourcePath, err=> { - const args = this.safe_exec.args[0][0]; - args.indexOf(`${this.sourcePath}[0]`).should.not.equal(-1); - return done(); - }); - })); -}); + return describe('preview', () => + it('should call converter resize with args', function(done) { + this.safe_exec.callsArgWith(2) + return this.converter.preview(this.sourcePath, err => { + const args = this.safe_exec.args[0][0] + args.indexOf(`${this.sourcePath}[0]`).should.not.equal(-1) + return done() + }) + })) +}) diff --git a/services/filestore/test/unit/js/FileHandlerTests.js b/services/filestore/test/unit/js/FileHandlerTests.js index 13c60f08da..e641ffdd16 100644 --- a/services/filestore/test/unit/js/FileHandlerTests.js +++ b/services/filestore/test/unit/js/FileHandlerTests.js @@ -10,273 +10,359 @@ * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const { - assert -} = require("chai"); -const sinon = require('sinon'); -const chai = require('chai'); -const should = chai.should(); -const { - expect -} = chai; -const modulePath = "../../../app/js/FileHandler.js"; -const SandboxedModule = require('sandboxed-module'); +const { assert } = require('chai') +const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() +const { expect } = chai +const modulePath = '../../../app/js/FileHandler.js' +const SandboxedModule = require('sandboxed-module') -describe("FileHandler", function() { +describe('FileHandler', function() { + beforeEach(function() { + this.settings = { + s3: { + buckets: { + user_files: 'user_files' + } + } + } + this.PersistorManager = { + getFileStream: sinon.stub(), + checkIfFileExists: sinon.stub(), + deleteFile: sinon.stub(), + deleteDirectory: sinon.stub(), + sendStream: sinon.stub(), + insertFile: sinon.stub(), + directorySize: sinon.stub() + } + this.LocalFileWriter = { + writeStream: sinon.stub(), + getStream: sinon.stub(), + deleteFile: sinon.stub() + } + this.FileConverter = { + convert: sinon.stub(), + thumbnail: sinon.stub(), + preview: sinon.stub() + } + this.keyBuilder = { + addCachingToKey: sinon.stub(), + getConvertedFolderKey: sinon.stub() + } + this.ImageOptimiser = { compressPng: sinon.stub() } + this.handler = SandboxedModule.require(modulePath, { + requires: { + 'settings-sharelatex': this.settings, + './PersistorManager': this.PersistorManager, + './LocalFileWriter': this.LocalFileWriter, + './FileConverter': this.FileConverter, + './KeyBuilder': this.keyBuilder, + './ImageOptimiser': this.ImageOptimiser, + 'logger-sharelatex': { + log() {}, + err() {} + } + } + }) + this.bucket = 'my_bucket' + this.key = 'key/here' + this.stubbedPath = '/var/somewhere/path' + this.format = 'png' + return (this.formattedStubbedPath = `${this.stubbedPath}.${this.format}`) + }) - beforeEach(function() { - this.settings = { - s3: { - buckets: { - user_files:"user_files" - } - } - }; - this.PersistorManager = { - getFileStream: sinon.stub(), - checkIfFileExists: sinon.stub(), - deleteFile: sinon.stub(), - deleteDirectory: sinon.stub(), - sendStream: sinon.stub(), - insertFile: sinon.stub(), - directorySize: sinon.stub() - }; - this.LocalFileWriter = { - writeStream: sinon.stub(), - getStream: sinon.stub(), - deleteFile: sinon.stub() - }; - this.FileConverter = { - convert: sinon.stub(), - thumbnail: sinon.stub(), - preview: sinon.stub() - }; - this.keyBuilder = { - addCachingToKey: sinon.stub(), - getConvertedFolderKey: sinon.stub() - }; - this.ImageOptimiser = - {compressPng: sinon.stub()}; - this.handler = SandboxedModule.require(modulePath, { requires: { - "settings-sharelatex": this.settings, - "./PersistorManager":this.PersistorManager, - "./LocalFileWriter":this.LocalFileWriter, - "./FileConverter":this.FileConverter, - "./KeyBuilder": this.keyBuilder, - "./ImageOptimiser":this.ImageOptimiser, - "logger-sharelatex": { - log() {}, - err() {} - } - } - } - ); - this.bucket = "my_bucket"; - this.key = "key/here"; - this.stubbedPath = "/var/somewhere/path"; - this.format = "png"; - return this.formattedStubbedPath = `${this.stubbedPath}.${this.format}`; - }); + describe('insertFile', function() { + beforeEach(function() { + this.stream = {} + this.PersistorManager.deleteDirectory.callsArgWith(2) + return this.PersistorManager.sendStream.callsArgWith(3) + }) - describe("insertFile", function() { - beforeEach(function() { - this.stream = {}; - this.PersistorManager.deleteDirectory.callsArgWith(2); - return this.PersistorManager.sendStream.callsArgWith(3); - }); + it('should send file to the filestore', function(done) { + return this.handler.insertFile(this.bucket, this.key, this.stream, () => { + this.PersistorManager.sendStream + .calledWith(this.bucket, this.key, this.stream) + .should.equal(true) + return done() + }) + }) - it("should send file to the filestore", function(done){ - return this.handler.insertFile(this.bucket, this.key, this.stream, () => { - this.PersistorManager.sendStream.calledWith(this.bucket, this.key, this.stream).should.equal(true); - return done(); - }); - }); + return it('should delete the convetedKey folder', function(done) { + this.keyBuilder.getConvertedFolderKey.returns(this.stubbedConvetedKey) + return this.handler.insertFile(this.bucket, this.key, this.stream, () => { + this.PersistorManager.deleteDirectory + .calledWith(this.bucket, this.stubbedConvetedKey) + .should.equal(true) + return done() + }) + }) + }) - return it("should delete the convetedKey folder", function(done){ - this.keyBuilder.getConvertedFolderKey.returns(this.stubbedConvetedKey); - return this.handler.insertFile(this.bucket, this.key, this.stream, () => { - this.PersistorManager.deleteDirectory.calledWith(this.bucket, this.stubbedConvetedKey).should.equal(true); - return done(); - }); - }); - }); + describe('deleteFile', function() { + beforeEach(function() { + this.keyBuilder.getConvertedFolderKey.returns(this.stubbedConvetedKey) + this.PersistorManager.deleteFile.callsArgWith(2) + return this.PersistorManager.deleteDirectory.callsArgWith(2) + }) - describe("deleteFile", function() { - beforeEach(function() { - this.keyBuilder.getConvertedFolderKey.returns(this.stubbedConvetedKey); - this.PersistorManager.deleteFile.callsArgWith(2); - return this.PersistorManager.deleteDirectory.callsArgWith(2); - }); + it('should tell the filestore manager to delete the file', function(done) { + return this.handler.deleteFile(this.bucket, this.key, () => { + this.PersistorManager.deleteFile + .calledWith(this.bucket, this.key) + .should.equal(true) + return done() + }) + }) - it("should tell the filestore manager to delete the file", function(done){ - return this.handler.deleteFile(this.bucket, this.key, () => { - this.PersistorManager.deleteFile.calledWith(this.bucket, this.key).should.equal(true); - return done(); - }); - }); + return it('should tell the filestore manager to delete the cached foler', function(done) { + return this.handler.deleteFile(this.bucket, this.key, () => { + this.PersistorManager.deleteDirectory + .calledWith(this.bucket, this.stubbedConvetedKey) + .should.equal(true) + return done() + }) + }) + }) - return it("should tell the filestore manager to delete the cached foler", function(done){ - return this.handler.deleteFile(this.bucket, this.key, () => { - this.PersistorManager.deleteDirectory.calledWith(this.bucket, this.stubbedConvetedKey).should.equal(true); - return done(); - }); - }); - }); + describe('getFile', function() { + beforeEach(function() { + this.handler._getStandardFile = sinon.stub().callsArgWith(3) + return (this.handler._getConvertedFile = sinon.stub().callsArgWith(3)) + }) - describe("getFile", function() { - beforeEach(function() { - this.handler._getStandardFile = sinon.stub().callsArgWith(3); - return this.handler._getConvertedFile = sinon.stub().callsArgWith(3); - }); + it('should call _getStandardFile if no format or style are defined', function(done) { + return this.handler.getFile(this.bucket, this.key, null, () => { + this.handler._getStandardFile.called.should.equal(true) + this.handler._getConvertedFile.called.should.equal(false) + return done() + }) + }) - it("should call _getStandardFile if no format or style are defined", function(done){ + it('should pass options to _getStandardFile', function(done) { + const options = { start: 0, end: 8 } + return this.handler.getFile(this.bucket, this.key, options, () => { + expect(this.handler._getStandardFile.lastCall.args[2].start).to.equal(0) + expect(this.handler._getStandardFile.lastCall.args[2].end).to.equal(8) + return done() + }) + }) - return this.handler.getFile(this.bucket, this.key, null, () => { - this.handler._getStandardFile.called.should.equal(true); - this.handler._getConvertedFile.called.should.equal(false); - return done(); - }); - }); + return it('should call _getConvertedFile if a format is defined', function(done) { + return this.handler.getFile( + this.bucket, + this.key, + { format: 'png' }, + () => { + this.handler._getStandardFile.called.should.equal(false) + this.handler._getConvertedFile.called.should.equal(true) + return done() + } + ) + }) + }) - it("should pass options to _getStandardFile", function(done) { - const options = {start: 0, end: 8}; - return this.handler.getFile(this.bucket, this.key, options, () => { - expect(this.handler._getStandardFile.lastCall.args[2].start).to.equal(0); - expect(this.handler._getStandardFile.lastCall.args[2].end).to.equal(8); - return done(); - }); - }); + describe('_getStandardFile', function() { + beforeEach(function() { + this.fileStream = { on() {} } + return this.PersistorManager.getFileStream.callsArgWith( + 3, + 'err', + this.fileStream + ) + }) - return it("should call _getConvertedFile if a format is defined", function(done){ - return this.handler.getFile(this.bucket, this.key, {format:"png"}, () => { - this.handler._getStandardFile.called.should.equal(false); - this.handler._getConvertedFile.called.should.equal(true); - return done(); - }); - }); - }); + it('should get the stream', function(done) { + return this.handler.getFile(this.bucket, this.key, null, () => { + this.PersistorManager.getFileStream + .calledWith(this.bucket, this.key) + .should.equal(true) + return done() + }) + }) - describe("_getStandardFile", function() { + it('should return the stream and error', function(done) { + return this.handler.getFile( + this.bucket, + this.key, + null, + (err, stream) => { + err.should.equal('err') + stream.should.equal(this.fileStream) + return done() + } + ) + }) - beforeEach(function() { - this.fileStream = {on() {}}; - return this.PersistorManager.getFileStream.callsArgWith(3, "err", this.fileStream); - }); + return it('should pass options to PersistorManager', function(done) { + return this.handler.getFile( + this.bucket, + this.key, + { start: 0, end: 8 }, + () => { + expect( + this.PersistorManager.getFileStream.lastCall.args[2].start + ).to.equal(0) + expect( + this.PersistorManager.getFileStream.lastCall.args[2].end + ).to.equal(8) + return done() + } + ) + }) + }) - it("should get the stream", function(done){ - return this.handler.getFile(this.bucket, this.key, null, () => { - this.PersistorManager.getFileStream.calledWith(this.bucket, this.key).should.equal(true); - return done(); - }); - }); + describe('_getConvertedFile', function() { + it('should getFileStream if it does exists', function(done) { + this.PersistorManager.checkIfFileExists.callsArgWith(2, null, true) + this.PersistorManager.getFileStream.callsArgWith(3) + return this.handler._getConvertedFile(this.bucket, this.key, {}, () => { + this.PersistorManager.getFileStream + .calledWith(this.bucket) + .should.equal(true) + return done() + }) + }) - it("should return the stream and error", function(done){ - return this.handler.getFile(this.bucket, this.key, null, (err, stream)=> { - err.should.equal("err"); - stream.should.equal(this.fileStream); - return done(); - }); - }); + return it('should call _getConvertedFileAndCache if it does exists', function(done) { + this.PersistorManager.checkIfFileExists.callsArgWith(2, null, false) + this.handler._getConvertedFileAndCache = sinon.stub().callsArgWith(4) + return this.handler._getConvertedFile(this.bucket, this.key, {}, () => { + this.handler._getConvertedFileAndCache + .calledWith(this.bucket, this.key) + .should.equal(true) + return done() + }) + }) + }) - return it("should pass options to PersistorManager", function(done) { - return this.handler.getFile(this.bucket, this.key, {start: 0, end: 8}, () => { - expect(this.PersistorManager.getFileStream.lastCall.args[2].start).to.equal(0); - expect(this.PersistorManager.getFileStream.lastCall.args[2].end).to.equal(8); - return done(); - }); - }); - }); + describe('_getConvertedFileAndCache', () => + it('should _convertFile ', function(done) { + this.stubbedStream = { something: 'here' } + this.localStream = { + on() {} + } + this.PersistorManager.sendFile = sinon.stub().callsArgWith(3) + this.LocalFileWriter.getStream = sinon + .stub() + .callsArgWith(1, null, this.localStream) + this.convetedKey = this.key + 'converted' + this.handler._convertFile = sinon + .stub() + .callsArgWith(3, null, this.stubbedPath) + this.ImageOptimiser.compressPng = sinon.stub().callsArgWith(1) + return this.handler._getConvertedFileAndCache( + this.bucket, + this.key, + this.convetedKey, + {}, + (err, fsStream) => { + this.handler._convertFile.called.should.equal(true) + this.PersistorManager.sendFile + .calledWith(this.bucket, this.convetedKey, this.stubbedPath) + .should.equal(true) + this.ImageOptimiser.compressPng + .calledWith(this.stubbedPath) + .should.equal(true) + this.LocalFileWriter.getStream + .calledWith(this.stubbedPath) + .should.equal(true) + fsStream.should.equal(this.localStream) + return done() + } + ) + })) + describe('_convertFile', function() { + beforeEach(function() { + this.FileConverter.convert.callsArgWith( + 2, + null, + this.formattedStubbedPath + ) + this.FileConverter.thumbnail.callsArgWith( + 1, + null, + this.formattedStubbedPath + ) + this.FileConverter.preview.callsArgWith( + 1, + null, + this.formattedStubbedPath + ) + this.handler._writeS3FileToDisk = sinon + .stub() + .callsArgWith(3, null, this.stubbedPath) + return this.LocalFileWriter.deleteFile.callsArgWith(1) + }) - describe("_getConvertedFile", function() { + it('should call thumbnail on the writer path if style was thumbnail was specified', function(done) { + return this.handler._convertFile( + this.bucket, + this.key, + { style: 'thumbnail' }, + (err, path) => { + path.should.equal(this.formattedStubbedPath) + this.FileConverter.thumbnail + .calledWith(this.stubbedPath) + .should.equal(true) + this.LocalFileWriter.deleteFile + .calledWith(this.stubbedPath) + .should.equal(true) + return done() + } + ) + }) - it("should getFileStream if it does exists", function(done){ - this.PersistorManager.checkIfFileExists.callsArgWith(2, null, true); - this.PersistorManager.getFileStream.callsArgWith(3); - return this.handler._getConvertedFile(this.bucket, this.key, {}, () => { - this.PersistorManager.getFileStream.calledWith(this.bucket).should.equal(true); - return done(); - }); - }); + it('should call preview on the writer path if style was preview was specified', function(done) { + return this.handler._convertFile( + this.bucket, + this.key, + { style: 'preview' }, + (err, path) => { + path.should.equal(this.formattedStubbedPath) + this.FileConverter.preview + .calledWith(this.stubbedPath) + .should.equal(true) + this.LocalFileWriter.deleteFile + .calledWith(this.stubbedPath) + .should.equal(true) + return done() + } + ) + }) - return it("should call _getConvertedFileAndCache if it does exists", function(done){ - this.PersistorManager.checkIfFileExists.callsArgWith(2, null, false); - this.handler._getConvertedFileAndCache = sinon.stub().callsArgWith(4); - return this.handler._getConvertedFile(this.bucket, this.key, {}, () => { - this.handler._getConvertedFileAndCache.calledWith(this.bucket, this.key).should.equal(true); - return done(); - }); - }); - }); + return it('should call convert on the writer path if a format was specified', function(done) { + return this.handler._convertFile( + this.bucket, + this.key, + { format: this.format }, + (err, path) => { + path.should.equal(this.formattedStubbedPath) + this.FileConverter.convert + .calledWith(this.stubbedPath, this.format) + .should.equal(true) + this.LocalFileWriter.deleteFile + .calledWith(this.stubbedPath) + .should.equal(true) + return done() + } + ) + }) + }) - describe("_getConvertedFileAndCache", () => it("should _convertFile ", function(done){ - this.stubbedStream = {"something":"here"}; - this.localStream = { - on() {} - }; - this.PersistorManager.sendFile = sinon.stub().callsArgWith(3); - this.LocalFileWriter.getStream = sinon.stub().callsArgWith(1, null, this.localStream); - this.convetedKey = this.key+"converted"; - this.handler._convertFile = sinon.stub().callsArgWith(3, null, this.stubbedPath); - this.ImageOptimiser.compressPng = sinon.stub().callsArgWith(1); - return this.handler._getConvertedFileAndCache(this.bucket, this.key, this.convetedKey, {}, (err, fsStream)=> { - this.handler._convertFile.called.should.equal(true); - this.PersistorManager.sendFile.calledWith(this.bucket, this.convetedKey, this.stubbedPath).should.equal(true); - this.ImageOptimiser.compressPng.calledWith(this.stubbedPath).should.equal(true); - this.LocalFileWriter.getStream.calledWith(this.stubbedPath).should.equal(true); - fsStream.should.equal(this.localStream); - return done(); - }); - })); + return describe('getDirectorySize', function() { + beforeEach(function() { + return this.PersistorManager.directorySize.callsArgWith(2) + }) - describe("_convertFile", function() { - beforeEach(function() { - this.FileConverter.convert.callsArgWith(2, null, this.formattedStubbedPath); - this.FileConverter.thumbnail.callsArgWith(1, null, this.formattedStubbedPath); - this.FileConverter.preview.callsArgWith(1, null, this.formattedStubbedPath); - this.handler._writeS3FileToDisk = sinon.stub().callsArgWith(3, null, this.stubbedPath); - return this.LocalFileWriter.deleteFile.callsArgWith(1); - }); - - it("should call thumbnail on the writer path if style was thumbnail was specified", function(done){ - return this.handler._convertFile(this.bucket, this.key, {style:"thumbnail"}, (err, path)=> { - path.should.equal(this.formattedStubbedPath); - this.FileConverter.thumbnail.calledWith(this.stubbedPath).should.equal(true); - this.LocalFileWriter.deleteFile.calledWith(this.stubbedPath).should.equal(true); - return done(); - }); - }); - - it("should call preview on the writer path if style was preview was specified", function(done){ - return this.handler._convertFile(this.bucket, this.key, {style:"preview"}, (err, path)=> { - path.should.equal(this.formattedStubbedPath); - this.FileConverter.preview.calledWith(this.stubbedPath).should.equal(true); - this.LocalFileWriter.deleteFile.calledWith(this.stubbedPath).should.equal(true); - return done(); - }); - }); - - return it("should call convert on the writer path if a format was specified", function(done){ - return this.handler._convertFile(this.bucket, this.key, {format:this.format}, (err, path)=> { - path.should.equal(this.formattedStubbedPath); - this.FileConverter.convert.calledWith(this.stubbedPath, this.format).should.equal(true); - this.LocalFileWriter.deleteFile.calledWith(this.stubbedPath).should.equal(true); - return done(); - }); - }); - }); - - return describe("getDirectorySize", function() { - - beforeEach(function() { - return this.PersistorManager.directorySize.callsArgWith(2); - }); - - return it("should call the filestore manager to get directory size", function(done){ - return this.handler.getDirectorySize(this.bucket, this.key, () => { - this.PersistorManager.directorySize.calledWith(this.bucket, this.key).should.equal(true); - return done(); - }); - }); - }); -}); + return it('should call the filestore manager to get directory size', function(done) { + return this.handler.getDirectorySize(this.bucket, this.key, () => { + this.PersistorManager.directorySize + .calledWith(this.bucket, this.key) + .should.equal(true) + return done() + }) + }) + }) +}) diff --git a/services/filestore/test/unit/js/ImageOptimiserTests.js b/services/filestore/test/unit/js/ImageOptimiserTests.js index 4e6cb858f4..6fdb09f89d 100644 --- a/services/filestore/test/unit/js/ImageOptimiserTests.js +++ b/services/filestore/test/unit/js/ImageOptimiserTests.js @@ -10,82 +10,72 @@ * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const { - assert -} = require("chai"); -const sinon = require('sinon'); -const chai = require('chai'); -const should = chai.should(); -const { - expect -} = chai; -const modulePath = "../../../app/js/ImageOptimiser.js"; -const SandboxedModule = require('sandboxed-module'); +const { assert } = require('chai') +const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() +const { expect } = chai +const modulePath = '../../../app/js/ImageOptimiser.js' +const SandboxedModule = require('sandboxed-module') -describe("ImageOptimiser", function() { +describe('ImageOptimiser', function() { + beforeEach(function() { + this.child_process = { exec: sinon.stub() } + this.settings = { enableConversions: true } + this.optimiser = SandboxedModule.require(modulePath, { + requires: { + child_process: this.child_process, + 'logger-sharelatex': { + log() {}, + err() {}, + warn() {} + }, + 'settings-sharelatex': this.settings + } + }) - beforeEach(function() { - this.child_process = - {exec : sinon.stub()}; - this.settings = - {enableConversions:true}; - this.optimiser = SandboxedModule.require(modulePath, { requires: { - 'child_process': this.child_process, - "logger-sharelatex": { - log() {}, - err() {}, - warn() {} - }, - "settings-sharelatex": this.settings - } - } - ); - + this.sourcePath = '/this/path/here.eps' + return (this.error = 'Error') + }) - this.sourcePath = "/this/path/here.eps"; - return this.error = "Error"; - }); + describe('compressPng', function() { + it('convert the file', function(done) { + this.child_process.exec.callsArgWith(2) + return this.optimiser.compressPng(this.sourcePath, err => { + const args = this.child_process.exec.args[0][0] + args.should.equal(`optipng ${this.sourcePath}`) + return done() + }) + }) - describe("compressPng", function() { - + return it('should return the error', function(done) { + this.child_process.exec.callsArgWith(2, this.error) + return this.optimiser.compressPng(this.sourcePath, err => { + err.should.equal(this.error) + return done() + }) + }) + }) - it("convert the file", function(done){ - this.child_process.exec.callsArgWith(2); - return this.optimiser.compressPng(this.sourcePath, err=> { - const args = this.child_process.exec.args[0][0]; - args.should.equal(`optipng ${this.sourcePath}`); - return done(); - }); - }); + describe('when enableConversions is disabled', () => + it('should produce an error', function(done) { + this.settings.enableConversions = false + this.child_process.exec.callsArgWith(2) + return this.optimiser.compressPng(this.sourcePath, err => { + this.child_process.exec.called.should.equal(false) + expect(err).to.exist + return done() + }) + })) - - return it("should return the error", function(done){ - this.child_process.exec.callsArgWith(2, this.error); - return this.optimiser.compressPng(this.sourcePath, err=> { - err.should.equal(this.error); - return done(); - }); - }); - }); - - describe('when enableConversions is disabled', () => it('should produce an error', function(done) { - this.settings.enableConversions = false; - this.child_process.exec.callsArgWith(2); - return this.optimiser.compressPng(this.sourcePath, err=> { - this.child_process.exec.called.should.equal(false); - expect(err).to.exist; - return done(); - }); - })); - - - return describe('when optimiser is sigkilled', () => it('should not produce an error', function(done) { - this.error = new Error('woops'); - this.error.signal = 'SIGKILL'; - this.child_process.exec.callsArgWith(2, this.error); - return this.optimiser.compressPng(this.sourcePath, err=> { - expect(err).to.equal(null); - return done(); - }); - })); -}); + return describe('when optimiser is sigkilled', () => + it('should not produce an error', function(done) { + this.error = new Error('woops') + this.error.signal = 'SIGKILL' + this.child_process.exec.callsArgWith(2, this.error) + return this.optimiser.compressPng(this.sourcePath, err => { + expect(err).to.equal(null) + return done() + }) + })) +}) diff --git a/services/filestore/test/unit/js/KeybuilderTests.js b/services/filestore/test/unit/js/KeybuilderTests.js index 1e99899be7..09a0ea8717 100644 --- a/services/filestore/test/unit/js/KeybuilderTests.js +++ b/services/filestore/test/unit/js/KeybuilderTests.js @@ -10,56 +10,49 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const { - assert -} = require("chai"); -const sinon = require('sinon'); -const chai = require('chai'); -const should = chai.should(); -const { - expect -} = chai; -const modulePath = "../../../app/js/KeyBuilder.js"; -const SandboxedModule = require('sandboxed-module'); +const { assert } = require('chai') +const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() +const { expect } = chai +const modulePath = '../../../app/js/KeyBuilder.js' +const SandboxedModule = require('sandboxed-module') -describe("LocalFileWriter", function() { +describe('LocalFileWriter', function() { + beforeEach(function() { + this.keyBuilder = SandboxedModule.require(modulePath, { + requires: { + 'logger-sharelatex': { + log() {}, + err() {} + } + } + }) + return (this.key = '123/456') + }) - beforeEach(function() { + return describe('cachedKey', function() { + it('should add the fomat on', function() { + const opts = { format: 'png' } + const newKey = this.keyBuilder.addCachingToKey(this.key, opts) + return newKey.should.equal(`${this.key}-converted-cache/format-png`) + }) - this.keyBuilder = SandboxedModule.require(modulePath, { requires: { - "logger-sharelatex": { - log() {}, - err() {} - } - } - } - ); - return this.key = "123/456"; - }); - - return describe("cachedKey", function() { + it('should add the style on', function() { + const opts = { style: 'thumbnail' } + const newKey = this.keyBuilder.addCachingToKey(this.key, opts) + return newKey.should.equal(`${this.key}-converted-cache/style-thumbnail`) + }) - it("should add the fomat on", function() { - const opts = - {format: "png"}; - const newKey = this.keyBuilder.addCachingToKey(this.key, opts); - return newKey.should.equal(`${this.key}-converted-cache/format-png`); - }); - - it("should add the style on", function() { - const opts = - {style: "thumbnail"}; - const newKey = this.keyBuilder.addCachingToKey(this.key, opts); - return newKey.should.equal(`${this.key}-converted-cache/style-thumbnail`); - }); - - return it("should add format on first", function() { - const opts = { - style: "thumbnail", - format: "png" - }; - const newKey = this.keyBuilder.addCachingToKey(this.key, opts); - return newKey.should.equal(`${this.key}-converted-cache/format-png-style-thumbnail`); - }); - }); -}); + return it('should add format on first', function() { + const opts = { + style: 'thumbnail', + format: 'png' + } + const newKey = this.keyBuilder.addCachingToKey(this.key, opts) + return newKey.should.equal( + `${this.key}-converted-cache/format-png-style-thumbnail` + ) + }) + }) +}) diff --git a/services/filestore/test/unit/js/LocalFileWriterTests.js b/services/filestore/test/unit/js/LocalFileWriterTests.js index 1bf131a3c4..e7ea993ab9 100644 --- a/services/filestore/test/unit/js/LocalFileWriterTests.js +++ b/services/filestore/test/unit/js/LocalFileWriterTests.js @@ -11,118 +11,112 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const { - assert -} = require("chai"); -const sinon = require('sinon'); -const chai = require('chai'); -const should = chai.should(); -const { - expect -} = chai; -const modulePath = "../../../app/js/LocalFileWriter.js"; -const SandboxedModule = require('sandboxed-module'); +const { assert } = require('chai') +const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() +const { expect } = chai +const modulePath = '../../../app/js/LocalFileWriter.js' +const SandboxedModule = require('sandboxed-module') -describe("LocalFileWriter", function() { +describe('LocalFileWriter', function() { + beforeEach(function() { + this.writeStream = { + on(type, cb) { + if (type === 'finish') { + return cb() + } + } + } + this.readStream = { on() {} } + this.fs = { + createWriteStream: sinon.stub().returns(this.writeStream), + createReadStream: sinon.stub().returns(this.readStream), + unlink: sinon.stub() + } + this.settings = { + path: { + uploadFolder: 'somewhere' + } + } + this.writer = SandboxedModule.require(modulePath, { + requires: { + fs: this.fs, + 'logger-sharelatex': { + log() {}, + err() {} + }, + 'settings-sharelatex': this.settings, + 'metrics-sharelatex': { + inc() {}, + Timer() { + return { done() {} } + } + } + } + }) - beforeEach(function() { + return (this.stubbedFsPath = 'something/uploads/eio2k1j3') + }) - this.writeStream = { - on(type, cb){ - if (type === "finish") { - return cb(); - } - } - }; - this.readStream = - {on() {}}; - this.fs = { - createWriteStream : sinon.stub().returns(this.writeStream), - createReadStream: sinon.stub().returns(this.readStream), - unlink: sinon.stub() - }; - this.settings = { - path: { - uploadFolder:"somewhere" - } - }; - this.writer = SandboxedModule.require(modulePath, { requires: { - "fs": this.fs, - "logger-sharelatex": { - log() {}, - err() {} - }, - "settings-sharelatex":this.settings, - "metrics-sharelatex": { - inc() {}, - Timer() { - return {done() {}}; - } - } - } - } - ); + describe('writeStrem', function() { + beforeEach(function() { + return (this.writer._getPath = sinon.stub().returns(this.stubbedFsPath)) + }) - return this.stubbedFsPath = "something/uploads/eio2k1j3"; - }); + it('write the stream to ./uploads', function(done) { + const stream = { + pipe: dest => { + dest.should.equal(this.writeStream) + return done() + }, + on() {} + } + return this.writer.writeStream(stream, null, () => {}) + }) - describe("writeStrem", function() { - beforeEach(function() { - return this.writer._getPath = sinon.stub().returns(this.stubbedFsPath); - }); + return it('should send the path in the callback', function(done) { + const stream = { + pipe: dest => {}, + on(type, cb) { + if (type === 'end') { + return cb() + } + } + } + return this.writer.writeStream(stream, null, (err, fsPath) => { + fsPath.should.equal(this.stubbedFsPath) + return done() + }) + }) + }) - it("write the stream to ./uploads", function(done){ - const stream = { - pipe: dest=> { - dest.should.equal(this.writeStream); - return done(); - }, - on() {} - }; - return this.writer.writeStream(stream, null, ()=> {}); - }); + describe('getStream', function() { + it('should read the stream from the file ', function(done) { + return this.writer.getStream(this.stubbedFsPath, (err, stream) => { + this.fs.createReadStream + .calledWith(this.stubbedFsPath) + .should.equal(true) + return done() + }) + }) - return it("should send the path in the callback", function(done){ - const stream = { - pipe: dest=> {}, - on(type, cb){ - if (type === "end") { - return cb(); - } - } - }; - return this.writer.writeStream(stream, null, (err, fsPath)=> { - fsPath.should.equal(this.stubbedFsPath); - return done(); - }); - }); - }); - - describe("getStream", function() { - - it("should read the stream from the file ", function(done){ - return this.writer.getStream(this.stubbedFsPath, (err, stream)=> { - this.fs.createReadStream.calledWith(this.stubbedFsPath).should.equal(true); - return done(); - }); - }); - - return it("should send the stream in the callback", function(done){ - return this.writer.getStream(this.stubbedFsPath, (err, readStream)=> { - readStream.should.equal(this.readStream); - return done(); - }); - }); - }); - - return describe("delete file", () => it("should unlink the file", function(done){ - const error = "my error"; - this.fs.unlink.callsArgWith(1, error); - return this.writer.deleteFile(this.stubbedFsPath, err=> { - this.fs.unlink.calledWith(this.stubbedFsPath).should.equal(true); - err.should.equal(error); - return done(); - }); - })); -}); + return it('should send the stream in the callback', function(done) { + return this.writer.getStream(this.stubbedFsPath, (err, readStream) => { + readStream.should.equal(this.readStream) + return done() + }) + }) + }) + return describe('delete file', () => + it('should unlink the file', function(done) { + const error = 'my error' + this.fs.unlink.callsArgWith(1, error) + return this.writer.deleteFile(this.stubbedFsPath, err => { + this.fs.unlink.calledWith(this.stubbedFsPath).should.equal(true) + err.should.equal(error) + return done() + }) + })) +}) diff --git a/services/filestore/test/unit/js/PersistorManagerTests.js b/services/filestore/test/unit/js/PersistorManagerTests.js index 620d6f0083..ff49c05ce9 100644 --- a/services/filestore/test/unit/js/PersistorManagerTests.js +++ b/services/filestore/test/unit/js/PersistorManagerTests.js @@ -9,129 +9,129 @@ * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const logger = require("logger-sharelatex"); -const { - assert -} = require("chai"); -const sinon = require('sinon'); -const chai = require('chai'); -const should = chai.should(); -const { - expect -} = chai; -const modulePath = "../../../app/js/PersistorManager.js"; -const SandboxedModule = require('sandboxed-module'); +const logger = require('logger-sharelatex') +const { assert } = require('chai') +const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() +const { expect } = chai +const modulePath = '../../../app/js/PersistorManager.js' +const SandboxedModule = require('sandboxed-module') +describe('PersistorManagerTests', function() { + beforeEach(function() { + return (this.S3PersistorManager = { + getFileStream: sinon.stub(), + checkIfFileExists: sinon.stub(), + deleteFile: sinon.stub(), + deleteDirectory: sinon.stub(), + sendStream: sinon.stub(), + insertFile: sinon.stub() + }) + }) -describe("PersistorManagerTests", function() { - - beforeEach(function() { - return this.S3PersistorManager = { - getFileStream: sinon.stub(), - checkIfFileExists: sinon.stub(), - deleteFile: sinon.stub(), - deleteDirectory: sinon.stub(), - sendStream: sinon.stub(), - insertFile: sinon.stub() - }; - }); - - describe("test s3 mixin", function() { - beforeEach(function() { - this.settings = { - filestore: { - backend: "s3" - } - }; - this.requires = { - "./S3PersistorManager": this.S3PersistorManager, - "settings-sharelatex": this.settings, - "logger-sharelatex": { - log() {}, - err() {} - } - }; - return this.PersistorManager = SandboxedModule.require(modulePath, {requires: this.requires}); - }); - - it("should load getFileStream", function(done) { - this.PersistorManager.should.respondTo("getFileStream"); - this.PersistorManager.getFileStream(); - this.S3PersistorManager.getFileStream.calledOnce.should.equal(true); - return done(); - }); - - it("should load checkIfFileExists", function(done) { - this.PersistorManager.checkIfFileExists(); - this.S3PersistorManager.checkIfFileExists.calledOnce.should.equal(true); - return done(); - }); - - it("should load deleteFile", function(done) { - this.PersistorManager.deleteFile(); - this.S3PersistorManager.deleteFile.calledOnce.should.equal(true); - return done(); - }); - - it("should load deleteDirectory", function(done) { - this.PersistorManager.deleteDirectory(); - this.S3PersistorManager.deleteDirectory.calledOnce.should.equal(true); - return done(); - }); - - it("should load sendStream", function(done) { - this.PersistorManager.sendStream(); - this.S3PersistorManager.sendStream.calledOnce.should.equal(true); - return done(); - }); - - return it("should load insertFile", function(done) { - this.PersistorManager.insertFile(); - this.S3PersistorManager.insertFile.calledOnce.should.equal(true); - return done(); - }); - }); - - describe("test unspecified mixins", () => it("should load s3 when no wrapper specified", function(done) { - this.settings = {filestore:{}}; - this.requires = { - "./S3PersistorManager": this.S3PersistorManager, - "settings-sharelatex": this.settings, - "logger-sharelatex": { - log() {}, - err() {} - } - }; - this.PersistorManager = SandboxedModule.require(modulePath, {requires: this.requires}); - this.PersistorManager.should.respondTo("getFileStream"); - this.PersistorManager.getFileStream(); - this.S3PersistorManager.getFileStream.calledOnce.should.equal(true); - return done(); - })); - - return describe("test invalid mixins", () => it("should not load an invalid wrapper", function(done) { - this.settings = { - filestore: { - backend:"magic" - } - }; - this.requires = { - "./S3PersistorManager": this.S3PersistorManager, - "settings-sharelatex": this.settings, - "logger-sharelatex": { - log() {}, - err() {} - } - }; - this.fsWrapper=null; - try { - this.PersistorManager=SandboxedModule.require(modulePath, {requires: this.requires}); - } catch (error) { - assert.equal("Unknown filestore backend: magic",error.message); + describe('test s3 mixin', function() { + beforeEach(function() { + this.settings = { + filestore: { + backend: 's3' } - assert.isNull(this.fsWrapper); - return done(); - })); -}); + } + this.requires = { + './S3PersistorManager': this.S3PersistorManager, + 'settings-sharelatex': this.settings, + 'logger-sharelatex': { + log() {}, + err() {} + } + } + return (this.PersistorManager = SandboxedModule.require(modulePath, { + requires: this.requires + })) + }) + it('should load getFileStream', function(done) { + this.PersistorManager.should.respondTo('getFileStream') + this.PersistorManager.getFileStream() + this.S3PersistorManager.getFileStream.calledOnce.should.equal(true) + return done() + }) + it('should load checkIfFileExists', function(done) { + this.PersistorManager.checkIfFileExists() + this.S3PersistorManager.checkIfFileExists.calledOnce.should.equal(true) + return done() + }) + + it('should load deleteFile', function(done) { + this.PersistorManager.deleteFile() + this.S3PersistorManager.deleteFile.calledOnce.should.equal(true) + return done() + }) + + it('should load deleteDirectory', function(done) { + this.PersistorManager.deleteDirectory() + this.S3PersistorManager.deleteDirectory.calledOnce.should.equal(true) + return done() + }) + + it('should load sendStream', function(done) { + this.PersistorManager.sendStream() + this.S3PersistorManager.sendStream.calledOnce.should.equal(true) + return done() + }) + + return it('should load insertFile', function(done) { + this.PersistorManager.insertFile() + this.S3PersistorManager.insertFile.calledOnce.should.equal(true) + return done() + }) + }) + + describe('test unspecified mixins', () => + it('should load s3 when no wrapper specified', function(done) { + this.settings = { filestore: {} } + this.requires = { + './S3PersistorManager': this.S3PersistorManager, + 'settings-sharelatex': this.settings, + 'logger-sharelatex': { + log() {}, + err() {} + } + } + this.PersistorManager = SandboxedModule.require(modulePath, { + requires: this.requires + }) + this.PersistorManager.should.respondTo('getFileStream') + this.PersistorManager.getFileStream() + this.S3PersistorManager.getFileStream.calledOnce.should.equal(true) + return done() + })) + + return describe('test invalid mixins', () => + it('should not load an invalid wrapper', function(done) { + this.settings = { + filestore: { + backend: 'magic' + } + } + this.requires = { + './S3PersistorManager': this.S3PersistorManager, + 'settings-sharelatex': this.settings, + 'logger-sharelatex': { + log() {}, + err() {} + } + } + this.fsWrapper = null + try { + this.PersistorManager = SandboxedModule.require(modulePath, { + requires: this.requires + }) + } catch (error) { + assert.equal('Unknown filestore backend: magic', error.message) + } + assert.isNull(this.fsWrapper) + return done() + })) +}) diff --git a/services/filestore/test/unit/js/S3PersistorManagerTests.js b/services/filestore/test/unit/js/S3PersistorManagerTests.js index a3a7d16825..c0a03696a8 100644 --- a/services/filestore/test/unit/js/S3PersistorManagerTests.js +++ b/services/filestore/test/unit/js/S3PersistorManagerTests.js @@ -11,444 +11,609 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const { - assert -} = require("chai"); -const sinon = require('sinon'); -const chai = require('chai'); -const should = chai.should(); -const { - expect -} = chai; -const modulePath = "../../../app/js/S3PersistorManager.js"; -const SandboxedModule = require('sandboxed-module'); +const { assert } = require('chai') +const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() +const { expect } = chai +const modulePath = '../../../app/js/S3PersistorManager.js' +const SandboxedModule = require('sandboxed-module') -describe("S3PersistorManagerTests", function() { +describe('S3PersistorManagerTests', function() { + beforeEach(function() { + this.settings = { + filestore: { + backend: 's3', + s3: { + secret: 'secret', + key: 'this_key' + }, + stores: { + user_files: 'sl_user_files' + } + } + } + this.knoxClient = { + putFile: sinon.stub(), + copyFile: sinon.stub(), + list: sinon.stub(), + deleteMultiple: sinon.stub(), + get: sinon.stub() + } + this.knox = { createClient: sinon.stub().returns(this.knoxClient) } + this.s3EventHandlers = {} + this.s3Request = { + on: sinon.stub().callsFake((event, callback) => { + return (this.s3EventHandlers[event] = callback) + }), + send: sinon.stub() + } + this.s3Response = { + httpResponse: { + createUnbufferedStream: sinon.stub() + } + } + this.s3Client = { + copyObject: sinon.stub(), + headObject: sinon.stub(), + getObject: sinon.stub().returns(this.s3Request) + } + this.awsS3 = sinon.stub().returns(this.s3Client) + this.LocalFileWriter = { + writeStream: sinon.stub(), + deleteFile: sinon.stub() + } + this.request = sinon.stub() + this.requires = { + knox: this.knox, + 'aws-sdk/clients/s3': this.awsS3, + 'settings-sharelatex': this.settings, + './LocalFileWriter': this.LocalFileWriter, + 'logger-sharelatex': { + log() {}, + err() {} + }, + request: this.request, + './Errors': (this.Errors = { NotFoundError: sinon.stub() }) + } + this.key = 'my/key' + this.bucketName = 'my-bucket' + this.error = 'my errror' + return (this.S3PersistorManager = SandboxedModule.require(modulePath, { + requires: this.requires + })) + }) - beforeEach(function() { - this.settings = { - filestore: { - backend: "s3", - s3: { - secret: "secret", - key: "this_key" - }, - stores: { - user_files:"sl_user_files" - } - } - }; - this.knoxClient = { - putFile:sinon.stub(), - copyFile:sinon.stub(), - list: sinon.stub(), - deleteMultiple: sinon.stub(), - get: sinon.stub() - }; - this.knox = - {createClient: sinon.stub().returns(this.knoxClient)}; - this.s3EventHandlers = {}; - this.s3Request = { - on: sinon.stub().callsFake((event, callback) => { - return this.s3EventHandlers[event] = callback; - }), - send: sinon.stub() - }; - this.s3Response = { - httpResponse: { - createUnbufferedStream: sinon.stub() - } - }; - this.s3Client = { - copyObject: sinon.stub(), - headObject: sinon.stub(), - getObject: sinon.stub().returns(this.s3Request) - }; - this.awsS3 = sinon.stub().returns(this.s3Client); - this.LocalFileWriter = { - writeStream: sinon.stub(), - deleteFile: sinon.stub() - }; - this.request = sinon.stub(); - this.requires = { - "knox": this.knox, - "aws-sdk/clients/s3": this.awsS3, - "settings-sharelatex": this.settings, - "./LocalFileWriter":this.LocalFileWriter, - "logger-sharelatex": { - log() {}, - err() {} - }, - "request": this.request, - "./Errors": (this.Errors = - {NotFoundError: sinon.stub()}) - }; - this.key = "my/key"; - this.bucketName = "my-bucket"; - this.error = "my errror"; - return this.S3PersistorManager = SandboxedModule.require(modulePath, {requires: this.requires}); - }); + describe('getFileStream', function() { + describe('success', function() { + beforeEach(function() { + this.expectedStream = { expectedStream: true } + this.expectedStream.on = sinon.stub() + this.s3Request.send.callsFake(() => { + return this.s3EventHandlers.httpHeaders( + 200, + {}, + this.s3Response, + 'OK' + ) + }) + return this.s3Response.httpResponse.createUnbufferedStream.returns( + this.expectedStream + ) + }) - describe("getFileStream", function() { - describe("success", function() { - beforeEach(function() { - this.expectedStream = { expectedStream: true }; - this.expectedStream.on = sinon.stub(); - this.s3Request.send.callsFake(() => { - return this.s3EventHandlers.httpHeaders(200, {}, this.s3Response, "OK"); - }); - return this.s3Response.httpResponse.createUnbufferedStream.returns(this.expectedStream); - }); + it('returns a stream', function(done) { + return this.S3PersistorManager.getFileStream( + this.bucketName, + this.key, + {}, + (err, stream) => { + if (err != null) { + return done(err) + } + expect(stream).to.equal(this.expectedStream) + return done() + } + ) + }) - it("returns a stream", function(done) { - return this.S3PersistorManager.getFileStream(this.bucketName, this.key, {}, (err, stream) => { - if (err != null) { - return done(err); - } - expect(stream).to.equal(this.expectedStream); - return done(); - }); - }); + it('sets the AWS client up with credentials from settings', function(done) { + return this.S3PersistorManager.getFileStream( + this.bucketName, + this.key, + {}, + (err, stream) => { + if (err != null) { + return done(err) + } + expect(this.awsS3.lastCall.args).to.deep.equal([ + { + credentials: { + accessKeyId: this.settings.filestore.s3.key, + secretAccessKey: this.settings.filestore.s3.secret + } + } + ]) + return done() + } + ) + }) - it("sets the AWS client up with credentials from settings", function(done) { - return this.S3PersistorManager.getFileStream(this.bucketName, this.key, {}, (err, stream) => { - if (err != null) { - return done(err); - } - expect(this.awsS3.lastCall.args).to.deep.equal([{ - credentials: { - accessKeyId: this.settings.filestore.s3.key, - secretAccessKey: this.settings.filestore.s3.secret - } - }]); - return done(); - }); - }); + it('fetches the right key from the right bucket', function(done) { + return this.S3PersistorManager.getFileStream( + this.bucketName, + this.key, + {}, + (err, stream) => { + if (err != null) { + return done(err) + } + expect(this.s3Client.getObject.lastCall.args).to.deep.equal([ + { + Bucket: this.bucketName, + Key: this.key + } + ]) + return done() + } + ) + }) - it("fetches the right key from the right bucket", function(done) { - return this.S3PersistorManager.getFileStream(this.bucketName, this.key, {}, (err, stream) => { - if (err != null) { - return done(err); - } - expect(this.s3Client.getObject.lastCall.args).to.deep.equal([{ - Bucket: this.bucketName, - Key: this.key - }]); - return done(); - }); - }); + it('accepts alternative credentials', function(done) { + const accessKeyId = 'that_key' + const secret = 'that_secret' + const opts = { + credentials: { + auth_key: accessKeyId, + auth_secret: secret + } + } + return this.S3PersistorManager.getFileStream( + this.bucketName, + this.key, + opts, + (err, stream) => { + if (err != null) { + return done(err) + } + expect(this.awsS3.lastCall.args).to.deep.equal([ + { + credentials: { + accessKeyId, + secretAccessKey: secret + } + } + ]) + expect(stream).to.equal(this.expectedStream) + return done() + } + ) + }) - it("accepts alternative credentials", function(done) { - const accessKeyId = "that_key"; - const secret = "that_secret"; - const opts = { - credentials: { - auth_key: accessKeyId, - auth_secret: secret - } - }; - return this.S3PersistorManager.getFileStream(this.bucketName, this.key, opts, (err, stream) => { - if (err != null) { - return done(err); - } - expect(this.awsS3.lastCall.args).to.deep.equal([{ - credentials: { - accessKeyId, - secretAccessKey: secret - } - }]); - expect(stream).to.equal(this.expectedStream); - return done(); - }); - }); + return it('accepts byte range', function(done) { + const start = 0 + const end = 8 + const opts = { start, end } + return this.S3PersistorManager.getFileStream( + this.bucketName, + this.key, + opts, + (err, stream) => { + if (err != null) { + return done(err) + } + expect(this.s3Client.getObject.lastCall.args).to.deep.equal([ + { + Bucket: this.bucketName, + Key: this.key, + Range: `bytes=${start}-${end}` + } + ]) + expect(stream).to.equal(this.expectedStream) + return done() + } + ) + }) + }) - return it("accepts byte range", function(done) { - const start = 0; - const end = 8; - const opts = { start, end }; - return this.S3PersistorManager.getFileStream(this.bucketName, this.key, opts, (err, stream) => { - if (err != null) { - return done(err); - } - expect(this.s3Client.getObject.lastCall.args).to.deep.equal([{ - Bucket: this.bucketName, - Key: this.key, - Range: `bytes=${start}-${end}` - }]); - expect(stream).to.equal(this.expectedStream); - return done(); - }); - }); - }); + return describe('errors', function() { + describe("when the file doesn't exist", function() { + beforeEach(function() { + return this.s3Request.send.callsFake(() => { + return this.s3EventHandlers.httpHeaders( + 404, + {}, + this.s3Response, + 'Not found' + ) + }) + }) - return describe("errors", function() { - describe("when the file doesn't exist", function() { - beforeEach(function() { - return this.s3Request.send.callsFake(() => { - return this.s3EventHandlers.httpHeaders(404, {}, this.s3Response, "Not found"); - }); - }); + return it('returns a NotFoundError that indicates the bucket and key', function(done) { + return this.S3PersistorManager.getFileStream( + this.bucketName, + this.key, + {}, + (err, stream) => { + expect(err).to.be.instanceof(this.Errors.NotFoundError) + const errMsg = this.Errors.NotFoundError.lastCall.args[0] + expect(errMsg).to.match(new RegExp(`.*${this.bucketName}.*`)) + expect(errMsg).to.match(new RegExp(`.*${this.key}.*`)) + return done() + } + ) + }) + }) - return it("returns a NotFoundError that indicates the bucket and key", function(done) { - return this.S3PersistorManager.getFileStream(this.bucketName, this.key, {}, (err, stream) => { - expect(err).to.be.instanceof(this.Errors.NotFoundError); - const errMsg = this.Errors.NotFoundError.lastCall.args[0]; - expect(errMsg).to.match(new RegExp(`.*${this.bucketName}.*`)); - expect(errMsg).to.match(new RegExp(`.*${this.key}.*`)); - return done(); - }); - }); - }); + describe('when S3 encounters an unkown error', function() { + beforeEach(function() { + return this.s3Request.send.callsFake(() => { + return this.s3EventHandlers.httpHeaders( + 500, + {}, + this.s3Response, + 'Internal server error' + ) + }) + }) - describe("when S3 encounters an unkown error", function() { - beforeEach(function() { - return this.s3Request.send.callsFake(() => { - return this.s3EventHandlers.httpHeaders(500, {}, this.s3Response, "Internal server error"); - }); - }); + return it('returns an error', function(done) { + return this.S3PersistorManager.getFileStream( + this.bucketName, + this.key, + {}, + (err, stream) => { + expect(err).to.be.instanceof(Error) + return done() + } + ) + }) + }) - return it("returns an error", function(done) { - return this.S3PersistorManager.getFileStream(this.bucketName, this.key, {}, (err, stream) => { - expect(err).to.be.instanceof(Error); - return done(); - }); - }); - }); + return describe('when the S3 request errors out before receiving HTTP headers', function() { + beforeEach(function() { + return this.s3Request.send.callsFake(() => { + return this.s3EventHandlers.error(new Error('connection failed')) + }) + }) - return describe("when the S3 request errors out before receiving HTTP headers", function() { - beforeEach(function() { - return this.s3Request.send.callsFake(() => { - return this.s3EventHandlers.error(new Error("connection failed")); - }); - }); + return it('returns an error', function(done) { + return this.S3PersistorManager.getFileStream( + this.bucketName, + this.key, + {}, + (err, stream) => { + expect(err).to.be.instanceof(Error) + return done() + } + ) + }) + }) + }) + }) - return it("returns an error", function(done) { - return this.S3PersistorManager.getFileStream(this.bucketName, this.key, {}, (err, stream) => { - expect(err).to.be.instanceof(Error); - return done(); - }); - }); - }); - }); - }); + describe('getFileSize', function() { + it('should obtain the file size from S3', function(done) { + const expectedFileSize = 123 + this.s3Client.headObject.yields( + new Error('s3Client.headObject got unexpected arguments') + ) + this.s3Client.headObject + .withArgs({ + Bucket: this.bucketName, + Key: this.key + }) + .yields(null, { ContentLength: expectedFileSize }) - describe("getFileSize", function() { - it("should obtain the file size from S3", function(done) { - const expectedFileSize = 123; - this.s3Client.headObject.yields(new Error( - "s3Client.headObject got unexpected arguments" - )); - this.s3Client.headObject.withArgs({ - Bucket: this.bucketName, - Key: this.key - }).yields(null, { ContentLength: expectedFileSize }); + return this.S3PersistorManager.getFileSize( + this.bucketName, + this.key, + (err, fileSize) => { + if (err != null) { + return done(err) + } + expect(fileSize).to.equal(expectedFileSize) + return done() + } + ) + }) - return this.S3PersistorManager.getFileSize(this.bucketName, this.key, (err, fileSize) => { - if (err != null) { - return done(err); - } - expect(fileSize).to.equal(expectedFileSize); - return done(); - }); - }); + ;[403, 404].forEach(statusCode => + it(`should throw NotFoundError when S3 responds with ${statusCode}`, function(done) { + const error = new Error() + error.statusCode = statusCode + this.s3Client.headObject.yields(error) - [403, 404].forEach(statusCode => it(`should throw NotFoundError when S3 responds with ${statusCode}`, function(done) { - const error = new Error(); - error.statusCode = statusCode; - this.s3Client.headObject.yields(error); + return this.S3PersistorManager.getFileSize( + this.bucketName, + this.key, + (err, fileSize) => { + expect(err).to.be.an.instanceof(this.Errors.NotFoundError) + return done() + } + ) + }) + ) - return this.S3PersistorManager.getFileSize(this.bucketName, this.key, (err, fileSize) => { - expect(err).to.be.an.instanceof(this.Errors.NotFoundError); - return done(); - }); - })); + return it('should rethrow any other error', function(done) { + const error = new Error() + this.s3Client.headObject.yields(error) + this.s3Client.headObject.yields(error) - return it("should rethrow any other error", function(done) { - const error = new Error(); - this.s3Client.headObject.yields(error); - this.s3Client.headObject.yields(error); + return this.S3PersistorManager.getFileSize( + this.bucketName, + this.key, + (err, fileSize) => { + expect(err).to.equal(error) + return done() + } + ) + }) + }) - return this.S3PersistorManager.getFileSize(this.bucketName, this.key, (err, fileSize) => { - expect(err).to.equal(error); - return done(); - }); - }); - }); + describe('sendFile', function() { + beforeEach(function() { + return this.knoxClient.putFile.returns({ on() {} }) + }) - describe("sendFile", function() { + it('should put file with knox', function(done) { + this.LocalFileWriter.deleteFile.callsArgWith(1) + this.knoxClient.putFile.callsArgWith(2, this.error) + return this.S3PersistorManager.sendFile( + this.bucketName, + this.key, + this.fsPath, + err => { + this.knoxClient.putFile + .calledWith(this.fsPath, this.key) + .should.equal(true) + err.should.equal(this.error) + return done() + } + ) + }) - beforeEach(function() { - return this.knoxClient.putFile.returns({on() {}}); - }); + return it('should delete the file and pass the error with it', function(done) { + this.LocalFileWriter.deleteFile.callsArgWith(1) + this.knoxClient.putFile.callsArgWith(2, this.error) + return this.S3PersistorManager.sendFile( + this.bucketName, + this.key, + this.fsPath, + err => { + this.knoxClient.putFile + .calledWith(this.fsPath, this.key) + .should.equal(true) + err.should.equal(this.error) + return done() + } + ) + }) + }) - it("should put file with knox", function(done){ - this.LocalFileWriter.deleteFile.callsArgWith(1); - this.knoxClient.putFile.callsArgWith(2, this.error); - return this.S3PersistorManager.sendFile(this.bucketName, this.key, this.fsPath, err=> { - this.knoxClient.putFile.calledWith(this.fsPath, this.key).should.equal(true); - err.should.equal(this.error); - return done(); - }); - }); + describe('sendStream', function() { + beforeEach(function() { + this.fsPath = 'to/some/where' + this.origin = { on() {} } + return (this.S3PersistorManager.sendFile = sinon.stub().callsArgWith(3)) + }) - return it("should delete the file and pass the error with it", function(done){ - this.LocalFileWriter.deleteFile.callsArgWith(1); - this.knoxClient.putFile.callsArgWith(2, this.error); - return this.S3PersistorManager.sendFile(this.bucketName, this.key, this.fsPath, err=> { - this.knoxClient.putFile.calledWith(this.fsPath, this.key).should.equal(true); - err.should.equal(this.error); - return done(); - }); - }); - }); + it('should send stream to LocalFileWriter', function(done) { + this.LocalFileWriter.deleteFile.callsArgWith(1) + this.LocalFileWriter.writeStream.callsArgWith(2, null, this.fsPath) + return this.S3PersistorManager.sendStream( + this.bucketName, + this.key, + this.origin, + () => { + this.LocalFileWriter.writeStream + .calledWith(this.origin) + .should.equal(true) + return done() + } + ) + }) - describe("sendStream", function() { - beforeEach(function() { - this.fsPath = "to/some/where"; - this.origin = - {on() {}}; - return this.S3PersistorManager.sendFile = sinon.stub().callsArgWith(3); - }); + it('should return the error from LocalFileWriter', function(done) { + this.LocalFileWriter.deleteFile.callsArgWith(1) + this.LocalFileWriter.writeStream.callsArgWith(2, this.error) + return this.S3PersistorManager.sendStream( + this.bucketName, + this.key, + this.origin, + err => { + err.should.equal(this.error) + return done() + } + ) + }) - it("should send stream to LocalFileWriter", function(done){ - this.LocalFileWriter.deleteFile.callsArgWith(1); - this.LocalFileWriter.writeStream.callsArgWith(2, null, this.fsPath); - return this.S3PersistorManager.sendStream(this.bucketName, this.key, this.origin, () => { - this.LocalFileWriter.writeStream.calledWith(this.origin).should.equal(true); - return done(); - }); - }); + return it('should send the file to the filestore', function(done) { + this.LocalFileWriter.deleteFile.callsArgWith(1) + this.LocalFileWriter.writeStream.callsArgWith(2) + return this.S3PersistorManager.sendStream( + this.bucketName, + this.key, + this.origin, + err => { + this.S3PersistorManager.sendFile.called.should.equal(true) + return done() + } + ) + }) + }) - it("should return the error from LocalFileWriter", function(done){ - this.LocalFileWriter.deleteFile.callsArgWith(1); - this.LocalFileWriter.writeStream.callsArgWith(2, this.error); - return this.S3PersistorManager.sendStream(this.bucketName, this.key, this.origin, err=> { - err.should.equal(this.error); - return done(); - }); - }); + describe('copyFile', function() { + beforeEach(function() { + this.sourceKey = 'my/key' + return (this.destKey = 'my/dest/key') + }) - return it("should send the file to the filestore", function(done){ - this.LocalFileWriter.deleteFile.callsArgWith(1); - this.LocalFileWriter.writeStream.callsArgWith(2); - return this.S3PersistorManager.sendStream(this.bucketName, this.key, this.origin, err=> { - this.S3PersistorManager.sendFile.called.should.equal(true); - return done(); - }); - }); - }); + it('should use AWS SDK to copy file', function(done) { + this.s3Client.copyObject.callsArgWith(1, this.error) + return this.S3PersistorManager.copyFile( + this.bucketName, + this.sourceKey, + this.destKey, + err => { + err.should.equal(this.error) + this.s3Client.copyObject + .calledWith({ + Bucket: this.bucketName, + Key: this.destKey, + CopySource: this.bucketName + '/' + this.key + }) + .should.equal(true) + return done() + } + ) + }) - describe("copyFile", function() { - beforeEach(function() { - this.sourceKey = "my/key"; - return this.destKey = "my/dest/key"; - }); + return it('should return a NotFoundError object if the original file does not exist', function(done) { + const NoSuchKeyError = { code: 'NoSuchKey' } + this.s3Client.copyObject.callsArgWith(1, NoSuchKeyError) + return this.S3PersistorManager.copyFile( + this.bucketName, + this.sourceKey, + this.destKey, + err => { + expect(err instanceof this.Errors.NotFoundError).to.equal(true) + return done() + } + ) + }) + }) - it("should use AWS SDK to copy file", function(done){ - this.s3Client.copyObject.callsArgWith(1, this.error); - return this.S3PersistorManager.copyFile(this.bucketName, this.sourceKey, this.destKey, err=> { - err.should.equal(this.error); - this.s3Client.copyObject.calledWith({Bucket: this.bucketName, Key: this.destKey, CopySource: this.bucketName + '/' + this.key}).should.equal(true); - return done(); - }); - }); + describe('deleteDirectory', () => + it('should list the contents passing them onto multi delete', function(done) { + const data = { Contents: [{ Key: '1234' }, { Key: '456' }] } + this.knoxClient.list.callsArgWith(1, null, data) + this.knoxClient.deleteMultiple.callsArgWith(1) + return this.S3PersistorManager.deleteDirectory( + this.bucketName, + this.key, + err => { + this.knoxClient.deleteMultiple + .calledWith(['1234', '456']) + .should.equal(true) + return done() + } + ) + })) - return it("should return a NotFoundError object if the original file does not exist", function(done){ - const NoSuchKeyError = {code: "NoSuchKey"}; - this.s3Client.copyObject.callsArgWith(1, NoSuchKeyError); - return this.S3PersistorManager.copyFile(this.bucketName, this.sourceKey, this.destKey, err=> { - expect(err instanceof this.Errors.NotFoundError).to.equal(true); - return done(); - }); - }); - }); + describe('deleteFile', function() { + it('should use correct options', function(done) { + this.request.callsArgWith(1) - describe("deleteDirectory", () => it("should list the contents passing them onto multi delete", function(done){ - const data = - {Contents: [{Key:"1234"}, {Key: "456"}]}; - this.knoxClient.list.callsArgWith(1, null, data); - this.knoxClient.deleteMultiple.callsArgWith(1); - return this.S3PersistorManager.deleteDirectory(this.bucketName, this.key, err=> { - this.knoxClient.deleteMultiple.calledWith(["1234","456"]).should.equal(true); - return done(); - }); - })); + return this.S3PersistorManager.deleteFile( + this.bucketName, + this.key, + err => { + const opts = this.request.args[0][0] + assert.deepEqual(opts.aws, { + key: this.settings.filestore.s3.key, + secret: this.settings.filestore.s3.secret, + bucket: this.bucketName + }) + opts.method.should.equal('delete') + opts.timeout.should.equal(30 * 1000) + opts.uri.should.equal( + `https://${this.bucketName}.s3.amazonaws.com/${this.key}` + ) + return done() + } + ) + }) - describe("deleteFile", function() { + return it('should return the error', function(done) { + this.request.callsArgWith(1, this.error) - it("should use correct options", function(done){ - this.request.callsArgWith(1); + return this.S3PersistorManager.deleteFile( + this.bucketName, + this.key, + err => { + err.should.equal(this.error) + return done() + } + ) + }) + }) - return this.S3PersistorManager.deleteFile(this.bucketName, this.key, err=> { - const opts = this.request.args[0][0]; - assert.deepEqual(opts.aws, {key:this.settings.filestore.s3.key, secret:this.settings.filestore.s3.secret, bucket:this.bucketName}); - opts.method.should.equal("delete"); - opts.timeout.should.equal((30*1000)); - opts.uri.should.equal(`https://${this.bucketName}.s3.amazonaws.com/${this.key}`); - return done(); - }); - }); + describe('checkIfFileExists', function() { + it('should use correct options', function(done) { + this.request.callsArgWith(1, null, { statusCode: 200 }) - return it("should return the error", function(done){ - this.request.callsArgWith(1, this.error); + return this.S3PersistorManager.checkIfFileExists( + this.bucketName, + this.key, + err => { + const opts = this.request.args[0][0] + assert.deepEqual(opts.aws, { + key: this.settings.filestore.s3.key, + secret: this.settings.filestore.s3.secret, + bucket: this.bucketName + }) + opts.method.should.equal('head') + opts.timeout.should.equal(30 * 1000) + opts.uri.should.equal( + `https://${this.bucketName}.s3.amazonaws.com/${this.key}` + ) + return done() + } + ) + }) - return this.S3PersistorManager.deleteFile(this.bucketName, this.key, err=> { - err.should.equal(this.error); - return done(); - }); - }); - }); + it('should return true for a 200', function(done) { + this.request.callsArgWith(1, null, { statusCode: 200 }) - describe("checkIfFileExists", function() { + return this.S3PersistorManager.checkIfFileExists( + this.bucketName, + this.key, + (err, exists) => { + exists.should.equal(true) + return done() + } + ) + }) - it("should use correct options", function(done){ - this.request.callsArgWith(1, null, {statusCode:200}); + it('should return false for a non 200', function(done) { + this.request.callsArgWith(1, null, { statusCode: 404 }) - return this.S3PersistorManager.checkIfFileExists(this.bucketName, this.key, err=> { - const opts = this.request.args[0][0]; - assert.deepEqual(opts.aws, {key:this.settings.filestore.s3.key, secret:this.settings.filestore.s3.secret, bucket:this.bucketName}); - opts.method.should.equal("head"); - opts.timeout.should.equal((30*1000)); - opts.uri.should.equal(`https://${this.bucketName}.s3.amazonaws.com/${this.key}`); - return done(); - }); - }); + return this.S3PersistorManager.checkIfFileExists( + this.bucketName, + this.key, + (err, exists) => { + exists.should.equal(false) + return done() + } + ) + }) - it("should return true for a 200", function(done){ - this.request.callsArgWith(1, null, {statusCode:200}); + return it('should return the error', function(done) { + this.request.callsArgWith(1, this.error, {}) - return this.S3PersistorManager.checkIfFileExists(this.bucketName, this.key, (err, exists)=> { - exists.should.equal(true); - return done(); - }); - }); + return this.S3PersistorManager.checkIfFileExists( + this.bucketName, + this.key, + err => { + err.should.equal(this.error) + return done() + } + ) + }) + }) - it("should return false for a non 200", function(done){ - this.request.callsArgWith(1, null, {statusCode:404}); - - return this.S3PersistorManager.checkIfFileExists(this.bucketName, this.key, (err, exists)=> { - exists.should.equal(false); - return done(); - }); - }); - - return it("should return the error", function(done){ - this.request.callsArgWith(1, this.error, {}); - - return this.S3PersistorManager.checkIfFileExists(this.bucketName, this.key, err=> { - err.should.equal(this.error); - return done(); - }); - }); - }); - - return describe("directorySize", () => it("should sum directory files size", function(done) { - const data = - {Contents: [ {Size: 1024}, {Size: 2048} ]}; - this.knoxClient.list.callsArgWith(1, null, data); - return this.S3PersistorManager.directorySize(this.bucketName, this.key, (err, totalSize)=> { - totalSize.should.equal(3072); - return done(); - }); - })); -}); + return describe('directorySize', () => + it('should sum directory files size', function(done) { + const data = { Contents: [{ Size: 1024 }, { Size: 2048 }] } + this.knoxClient.list.callsArgWith(1, null, data) + return this.S3PersistorManager.directorySize( + this.bucketName, + this.key, + (err, totalSize) => { + totalSize.should.equal(3072) + return done() + } + ) + })) +}) diff --git a/services/filestore/test/unit/js/SafeExecTests.js b/services/filestore/test/unit/js/SafeExecTests.js index f9a0e51ca8..2b629947f5 100644 --- a/services/filestore/test/unit/js/SafeExecTests.js +++ b/services/filestore/test/unit/js/SafeExecTests.js @@ -9,73 +9,87 @@ * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const { - assert -} = require("chai"); -const sinon = require('sinon'); -const chai = require('chai'); -const should = chai.should(); -const { - expect -} = chai; -const modulePath = "../../../app/js/SafeExec.js"; -const SandboxedModule = require('sandboxed-module'); +const { assert } = require('chai') +const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() +const { expect } = chai +const modulePath = '../../../app/js/SafeExec.js' +const SandboxedModule = require('sandboxed-module') -describe("SafeExec", function() { +describe('SafeExec', function() { + beforeEach(function() { + this.settings = { enableConversions: true } + this.safe_exec = SandboxedModule.require(modulePath, { + requires: { + 'logger-sharelatex': { + log() {}, + err() {} + }, + 'settings-sharelatex': this.settings + } + }) + return (this.options = { timeout: 10 * 1000, killSignal: 'SIGTERM' }) + }) - beforeEach(function() { - this.settings = - {enableConversions:true}; - this.safe_exec = SandboxedModule.require(modulePath, { requires: { - "logger-sharelatex": { - log() {}, - err() {} - }, - "settings-sharelatex": this.settings - } - } - ); - return this.options = {timeout: 10*1000, killSignal: "SIGTERM" };}); + return describe('safe_exec', function() { + it('should execute a valid command', function(done) { + return this.safe_exec( + ['/bin/echo', 'hello'], + this.options, + (err, stdout, stderr) => { + stdout.should.equal('hello\n') + should.not.exist(err) + return done() + } + ) + }) - return describe("safe_exec", function() { + it('should error when conversions are disabled', function(done) { + this.settings.enableConversions = false + return this.safe_exec( + ['/bin/echo', 'hello'], + this.options, + (err, stdout, stderr) => { + expect(err).to.exist + return done() + } + ) + }) - it("should execute a valid command", function(done) { - return this.safe_exec(["/bin/echo", "hello"], this.options, (err, stdout, stderr) => { - stdout.should.equal("hello\n"); - should.not.exist(err); - return done(); - }); - }); + it('should execute a command with non-zero exit status', function(done) { + return this.safe_exec( + ['/usr/bin/env', 'false'], + this.options, + (err, stdout, stderr) => { + stdout.should.equal('') + stderr.should.equal('') + err.message.should.equal('exit status 1') + return done() + } + ) + }) - it("should error when conversions are disabled", function(done) { - this.settings.enableConversions = false; - return this.safe_exec(["/bin/echo", "hello"], this.options, (err, stdout, stderr) => { - expect(err).to.exist; - return done(); - }); - }); + it('should handle an invalid command', function(done) { + return this.safe_exec( + ['/bin/foobar'], + this.options, + (err, stdout, stderr) => { + err.code.should.equal('ENOENT') + return done() + } + ) + }) - it("should execute a command with non-zero exit status", function(done) { - return this.safe_exec(["/usr/bin/env", "false"], this.options, (err, stdout, stderr) => { - stdout.should.equal(""); - stderr.should.equal(""); - err.message.should.equal("exit status 1"); - return done(); - }); - }); - - it("should handle an invalid command", function(done) { - return this.safe_exec(["/bin/foobar"], this.options, (err, stdout, stderr) => { - err.code.should.equal("ENOENT"); - return done(); - }); - }); - - return it("should handle a command that runs too long", function(done) { - return this.safe_exec(["/bin/sleep", "10"], {timeout: 500, killSignal: "SIGTERM"}, (err, stdout, stderr) => { - err.should.equal("SIGTERM"); - return done(); - }); - }); - }); -}); + return it('should handle a command that runs too long', function(done) { + return this.safe_exec( + ['/bin/sleep', '10'], + { timeout: 500, killSignal: 'SIGTERM' }, + (err, stdout, stderr) => { + err.should.equal('SIGTERM') + return done() + } + ) + }) + }) +}) diff --git a/services/filestore/test/unit/js/SettingsTests.js b/services/filestore/test/unit/js/SettingsTests.js index 98d17723a9..472c6d1179 100644 --- a/services/filestore/test/unit/js/SettingsTests.js +++ b/services/filestore/test/unit/js/SettingsTests.js @@ -9,27 +9,25 @@ * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const { - assert -} = require("chai"); -const sinon = require('sinon'); -const chai = require('chai'); -const should = chai.should(); -const { - expect -} = chai; -const modulePath = "../../../app/js/BucketController.js"; +const { assert } = require('chai') +const sinon = require('sinon') +const chai = require('chai') +const should = chai.should() +const { expect } = chai +const modulePath = '../../../app/js/BucketController.js' -describe("Settings", () => describe("s3", () => it("should use JSONified env var if present", function(done){ - const s3_settings = { +describe('Settings', () => + describe('s3', () => + it('should use JSONified env var if present', function(done) { + const s3_settings = { bucket1: { - auth_key: 'bucket1_key', - auth_secret: 'bucket1_secret' + auth_key: 'bucket1_key', + auth_secret: 'bucket1_secret' } - }; - process.env.S3_BUCKET_CREDENTIALS = JSON.stringify(s3_settings); + } + process.env.S3_BUCKET_CREDENTIALS = JSON.stringify(s3_settings) - const settings = require("settings-sharelatex"); - expect(settings.filestore.s3BucketCreds).to.deep.equal(s3_settings); - return done(); -}))); + const settings = require('settings-sharelatex') + expect(settings.filestore.s3BucketCreds).to.deep.equal(s3_settings) + return done() + }))) From 495424a94b0df234d8c9c6efcfc5326e239b0501 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 16 Dec 2019 11:28:24 +0000 Subject: [PATCH 346/555] Fix metrics includes in SandboxedModule --- services/filestore/test/unit/js/FileConverterTests.js | 6 ++---- services/filestore/test/unit/js/LocalFileWriterTests.js | 6 ++---- 2 files changed, 4 insertions(+), 8 deletions(-) diff --git a/services/filestore/test/unit/js/FileConverterTests.js b/services/filestore/test/unit/js/FileConverterTests.js index 1b548d592e..89faacd1d1 100644 --- a/services/filestore/test/unit/js/FileConverterTests.js +++ b/services/filestore/test/unit/js/FileConverterTests.js @@ -29,10 +29,8 @@ describe('FileConverter', function() { err() {} }, 'metrics-sharelatex': { - inc() {}, - Timer() { - return { done() {} } - } + inc: sinon.stub(), + Timer: sinon.stub().returns({ done: sinon.stub() }) }, 'settings-sharelatex': (this.Settings = { commands: { diff --git a/services/filestore/test/unit/js/LocalFileWriterTests.js b/services/filestore/test/unit/js/LocalFileWriterTests.js index e7ea993ab9..04cc2fb049 100644 --- a/services/filestore/test/unit/js/LocalFileWriterTests.js +++ b/services/filestore/test/unit/js/LocalFileWriterTests.js @@ -48,10 +48,8 @@ describe('LocalFileWriter', function() { }, 'settings-sharelatex': this.settings, 'metrics-sharelatex': { - inc() {}, - Timer() { - return { done() {} } - } + inc: sinon.stub(), + Timer: sinon.stub().returns({ done: sinon.stub() }) } } }) From 6974fd640d496a2d5ebc58d69fb1849b0a1436e6 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 16 Dec 2019 11:29:25 +0000 Subject: [PATCH 347/555] Remove unusued cluster.coffee --- services/filestore/cluster.coffee | 9 --------- 1 file changed, 9 deletions(-) delete mode 100644 services/filestore/cluster.coffee diff --git a/services/filestore/cluster.coffee b/services/filestore/cluster.coffee deleted file mode 100644 index a0ca60a619..0000000000 --- a/services/filestore/cluster.coffee +++ /dev/null @@ -1,9 +0,0 @@ -recluster = require "recluster" # https://github.com/doxout/recluster -path = require "path" - -cluster = recluster path.join(__dirname, 'app.js'), { - workers: 2, - backoff: 0, - readyWhen: "listening" -} -cluster.run() From 35d050a49ca8a10c5f0f06f417d1acc31f821fbc Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 16 Dec 2019 11:32:46 +0000 Subject: [PATCH 348/555] Prettier fixes --- services/filestore/test/unit/js/S3PersistorManagerTests.js | 1 - 1 file changed, 1 deletion(-) diff --git a/services/filestore/test/unit/js/S3PersistorManagerTests.js b/services/filestore/test/unit/js/S3PersistorManagerTests.js index c0a03696a8..777c9c5a32 100644 --- a/services/filestore/test/unit/js/S3PersistorManagerTests.js +++ b/services/filestore/test/unit/js/S3PersistorManagerTests.js @@ -317,7 +317,6 @@ describe('S3PersistorManagerTests', function() { } ) }) - ;[403, 404].forEach(statusCode => it(`should throw NotFoundError when S3 responds with ${statusCode}`, function(done) { const error = new Error() From 230baef8fa3a8d6ae3814cc8a5854bf2f598ca9a Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 16 Dec 2019 11:32:58 +0000 Subject: [PATCH 349/555] Add linting tasks to package.json --- services/filestore/package.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/services/filestore/package.json b/services/filestore/package.json index 40aabd8279..24ec708b10 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -13,6 +13,9 @@ "test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP", "start": "node $NODE_APP_OPTIONS app.js", "nodemon": "nodemon --config nodemon.json", + "lint": "eslint -f unix .", + "format": "prettier-eslint \"$(pwd)/**/*.js\" --list-different", + "format:fix": "prettier-eslint \"$(pwd)/**/*.js\" --write", "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 30000 --exit $@ test/acceptance/js", "test:unit:_run": "mocha --recursive --reporter spec --exit $@ test/unit/js" }, From 7870c8185ecdc0265eac89df7d45d876832367b5 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 16 Dec 2019 11:58:22 +0000 Subject: [PATCH 350/555] Add linting to makefile --- services/filestore/.prettierignore | 1 + services/filestore/Makefile | 12 +++++++++++- 2 files changed, 12 insertions(+), 1 deletion(-) create mode 100644 services/filestore/.prettierignore diff --git a/services/filestore/.prettierignore b/services/filestore/.prettierignore new file mode 100644 index 0000000000..3c3629e647 --- /dev/null +++ b/services/filestore/.prettierignore @@ -0,0 +1 @@ +node_modules diff --git a/services/filestore/Makefile b/services/filestore/Makefile index e83a0696e0..b5b188aa51 100644 --- a/services/filestore/Makefile +++ b/services/filestore/Makefile @@ -21,7 +21,17 @@ clean: rm -rf test/unit/js rm -rf test/acceptance/js -test: test_unit test_acceptance +format: + $(DOCKER_COMPOSE) run --rm test_unit npm run format + +format_fix: + $(DOCKER_COMPOSE) run --rm test_unit npm run format:fix + +lint: + $(DOCKER_COMPOSE) run --rm test_unit npm run lint + + +test: format lint test_unit test_acceptance test_unit: @[ ! -d test/unit ] && echo "filestore has no unit tests" || $(DOCKER_COMPOSE) run --rm test_unit From 29f333d60cac7fe4cab99c0ba4fc763f47898190 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 16 Dec 2019 12:14:06 +0000 Subject: [PATCH 351/555] Placate eslint --- services/filestore/app.js | 23 ++++++++++++++++------- 1 file changed, 16 insertions(+), 7 deletions(-) diff --git a/services/filestore/app.js b/services/filestore/app.js index cbe13e8076..c6c11e152a 100644 --- a/services/filestore/app.js +++ b/services/filestore/app.js @@ -4,6 +4,7 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ +/* eslint-disable node/no-deprecated-api */ const Metrics = require('metrics-sharelatex') Metrics.initialize('filestore') const express = require('express') @@ -11,7 +12,6 @@ const bodyParser = require('body-parser') let logger = require('logger-sharelatex') logger.initialize('filestore') const settings = require('settings-sharelatex') -const request = require('request') const fileController = require('./app/js/FileController') const bucketController = require('./app/js/BucketController') const keyBuilder = require('./app/js/KeyBuilder') @@ -170,10 +170,15 @@ app.get( app.get('/bucket/:bucket/key/*', bucketController.getFile) -app.get('/heapdump', (req, res) => +app.get('/heapdump', (req, res, next) => require('heapdump').writeSnapshot( '/tmp/' + Date.now() + '.filestore.heapsnapshot', - (err, filename) => res.send(filename) + (err, filename) => { + if (err) { + return next(err) + } + res.send(filename) + } ) ) @@ -219,9 +224,13 @@ const host = '0.0.0.0' if (!module.parent) { // Called directly - var server = app.listen(port, host, error => + var server = app.listen(port, host, error => { + if (error) { + logger.error('Error starting Filestore', error) + throw error + } logger.info(`Filestore starting up, listening on ${host}:${port}`) - ) + }) } module.exports = app @@ -232,10 +241,10 @@ process.on('SIGTERM', function() { }) if (global.gc != null) { - let oneMinute + const oneMinute = 60 * 1000 const gcTimer = setInterval(function() { global.gc() return logger.log(process.memoryUsage(), 'global.gc') - }, 3 * (oneMinute = 60 * 1000)) + }, 3 * oneMinute) gcTimer.unref() } From b7e098208f3b8befc3d28b723c9a584e4f1df3f9 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 16 Dec 2019 17:09:38 +0000 Subject: [PATCH 352/555] Update build files from build_scripts --- services/filestore/.eslintrc | 11 +- services/filestore/.nvmrc | 2 +- services/filestore/.prettierrc | 4 + services/filestore/Dockerfile | 11 +- services/filestore/Jenkinsfile | 8 + services/filestore/Makefile | 9 +- services/filestore/buildscript.txt | 16 +- .../filestore/config/settings.defaults.coffee | 6 +- services/filestore/docker-compose.ci.yml | 18 +- services/filestore/docker-compose.yml | 26 +- services/filestore/nodemon.json | 7 +- services/filestore/npm-shrinkwrap.json | 261 ++++++++---------- services/filestore/package.json | 12 +- 13 files changed, 177 insertions(+), 214 deletions(-) diff --git a/services/filestore/.eslintrc b/services/filestore/.eslintrc index f8776c68a0..e65cee6619 100644 --- a/services/filestore/.eslintrc +++ b/services/filestore/.eslintrc @@ -1,16 +1,23 @@ +// this file was auto-generated, do not edit it directly. +// instead run bin/update_build_scripts from +// https://github.com/sharelatex/sharelatex-dev-environment +// Version: 1.3 { "extends": [ "standard", "prettier", - "prettier/standard", + "prettier/standard" ], + "parserOptions": { + "ecmaVersion": 6 + }, "plugins": [ "mocha", "chai-expect", "chai-friendly" ], - "parser": "babel-eslint", "env": { + "node": true, "mocha": true }, "rules": { diff --git a/services/filestore/.nvmrc b/services/filestore/.nvmrc index e1e5d1369a..73bffb0393 100644 --- a/services/filestore/.nvmrc +++ b/services/filestore/.nvmrc @@ -1 +1 @@ -6.9.5 +10.17.0 diff --git a/services/filestore/.prettierrc b/services/filestore/.prettierrc index b2095be81e..ddf06f31b6 100644 --- a/services/filestore/.prettierrc +++ b/services/filestore/.prettierrc @@ -1,3 +1,7 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/sharelatex/sharelatex-dev-environment +# Version: 1.3 { "semi": false, "singleQuote": true diff --git a/services/filestore/Dockerfile b/services/filestore/Dockerfile index 5a18b41699..4cdf40264f 100644 --- a/services/filestore/Dockerfile +++ b/services/filestore/Dockerfile @@ -1,4 +1,9 @@ -FROM node:10.17.0-jessie as app +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/sharelatex/sharelatex-dev-environment +# Version: 1.3 + +FROM node:10.17.0 as app WORKDIR /app @@ -9,9 +14,9 @@ RUN npm install --quiet COPY . /app -RUN npm run compile:all -FROM node:10.17.0-jessie + +FROM node:10.17.0 COPY --from=app /app /app diff --git a/services/filestore/Jenkinsfile b/services/filestore/Jenkinsfile index dd741ce239..fe20b5e06e 100644 --- a/services/filestore/Jenkinsfile +++ b/services/filestore/Jenkinsfile @@ -16,6 +16,7 @@ pipeline { } stages { + stage('Install') { steps { withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) { @@ -36,6 +37,13 @@ pipeline { } } + stage('Linting') { + steps { + sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make format' + sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make lint' + } + } + stage('Unit Tests') { steps { sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_unit' diff --git a/services/filestore/Makefile b/services/filestore/Makefile index b5b188aa51..67bec94e5a 100644 --- a/services/filestore/Makefile +++ b/services/filestore/Makefile @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.24 +# Version: 1.3 BUILD_NUMBER ?= local BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) @@ -16,10 +16,6 @@ DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \ clean: docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) - rm -f app.js - rm -rf app/js - rm -rf test/unit/js - rm -rf test/acceptance/js format: $(DOCKER_COMPOSE) run --rm test_unit npm run format @@ -30,7 +26,6 @@ format_fix: lint: $(DOCKER_COMPOSE) run --rm test_unit npm run lint - test: format lint test_unit test_acceptance test_unit: @@ -46,6 +41,7 @@ test_clean: test_acceptance_pre_run: @[ ! -f test/acceptance/js/scripts/pre-run ] && echo "filestore has no pre acceptance tests task" || $(DOCKER_COMPOSE) run --rm test_acceptance test/acceptance/js/scripts/pre-run + build: docker build --pull --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ --tag gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ @@ -58,4 +54,5 @@ publish: docker push $(DOCKER_REPO)/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + .PHONY: clean test test_unit test_acceptance test_clean build publish diff --git a/services/filestore/buildscript.txt b/services/filestore/buildscript.txt index dc8c156383..773949832d 100644 --- a/services/filestore/buildscript.txt +++ b/services/filestore/buildscript.txt @@ -1,10 +1,10 @@ filestore ---language=coffeescript ---node-version=6.9.5 ---acceptance-creds=None ---dependencies=mongo,redis ---docker-repos=gcr.io/overleaf-ops ---build-target=docker ---script-version=1.1.24 ---env-pass-through= +--acceptance-creds= --public-repo=True +--language=es +--node-version=10.17.0 +--dependencies=mongo,redis,s3 +--script-version=1.3 +--docker-repos=gcr.io/overleaf-ops +--env-add=ENABLE_CONVERSIONS="true",USE_PROM_METRICS="true",AWS_S3_USER_FILES_BUCKET_NAME=fake_user_files,AWS_S3_TEMPLATE_FILES_BUCKET_NAME=fake_template_files,AWS_S3_PUBLIC_FILES_BUCKET_NAME=fake_public_files,AWS_S3_ENDPOINT=http://s3:9090,AWS_ACCESS_KEY_ID=fake,AWS_SECRET_ACCESS_KEY=fake +--env-pass-through= diff --git a/services/filestore/config/settings.defaults.coffee b/services/filestore/config/settings.defaults.coffee index 29010e85ee..95256a33d6 100644 --- a/services/filestore/config/settings.defaults.coffee +++ b/services/filestore/config/settings.defaults.coffee @@ -11,11 +11,11 @@ settings = # Choices are # s3 - Amazon S3 # fs - local filesystem - if process.env['AWS_KEY']? or process.env['S3_BUCKET_CREDENTIALS']? + if process.env['AWS_ACCESS_KEY_ID']? or process.env['S3_BUCKET_CREDENTIALS']? backend: "s3" s3: - key: process.env['AWS_KEY'] - secret: process.env['AWS_SECRET'] + key: process.env['AWS_ACCESS_KEY_ID'] + secret: process.env['AWS_SECRET_ACCESS_KEY'] endpoint: process.env['AWS_S3_ENDPOINT'] stores: user_files: process.env['AWS_S3_USER_FILES_BUCKET_NAME'] diff --git a/services/filestore/docker-compose.ci.yml b/services/filestore/docker-compose.ci.yml index 42c6ae37b5..d8342a9837 100644 --- a/services/filestore/docker-compose.ci.yml +++ b/services/filestore/docker-compose.ci.yml @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.24 +# Version: 1.3 version: "2.1" @@ -22,28 +22,27 @@ services: REDIS_HOST: redis MONGO_HOST: mongo POSTGRES_HOST: postgres - ENABLE_CONVERSIONS: "true" MOCHA_GREP: ${MOCHA_GREP} NODE_ENV: test + ENABLE_CONVERSIONS: "true" USE_PROM_METRICS: "true" - AWS_KEY: fake - AWS_SECRET: fake AWS_S3_USER_FILES_BUCKET_NAME: fake_user_files AWS_S3_TEMPLATE_FILES_BUCKET_NAME: fake_template_files AWS_S3_PUBLIC_FILES_BUCKET_NAME: fake_public_files - AWS_S3_ENDPOINT: http://fakes3:9090 + AWS_S3_ENDPOINT: http://s3:9090 + AWS_ACCESS_KEY_ID: fake + AWS_SECRET_ACCESS_KEY: fake depends_on: mongo: condition: service_healthy redis: condition: service_healthy - fakes3: + s3: condition: service_healthy user: node command: npm run test:acceptance:_run - tar: build: . image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER @@ -51,17 +50,14 @@ services: - ./:/tmp/build/ command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . user: root - redis: image: redis mongo: image: mongo:3.4 - - fakes3: + s3: image: adobe/s3mock environment: - initialBuckets=fake_user_files,fake_template_files,fake_public_files healthcheck: test: ["CMD", "curl", "-f", "http://localhost:9090"] - diff --git a/services/filestore/docker-compose.yml b/services/filestore/docker-compose.yml index 65f18f4d78..1269e795a2 100644 --- a/services/filestore/docker-compose.yml +++ b/services/filestore/docker-compose.yml @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.1.24 +# Version: 1.3 version: "2.1" @@ -28,47 +28,35 @@ services: MONGO_HOST: mongo POSTGRES_HOST: postgres MOCHA_GREP: ${MOCHA_GREP} - ENABLE_CONVERSIONS: "true" LOG_LEVEL: ERROR NODE_ENV: test + ENABLE_CONVERSIONS: "true" USE_PROM_METRICS: "true" - AWS_KEY: fake - AWS_SECRET: fake AWS_S3_USER_FILES_BUCKET_NAME: fake_user_files AWS_S3_TEMPLATE_FILES_BUCKET_NAME: fake_template_files AWS_S3_PUBLIC_FILES_BUCKET_NAME: fake_public_files - AWS_S3_ENDPOINT: http://fakes3:9090 + AWS_S3_ENDPOINT: http://s3:9090 + AWS_ACCESS_KEY_ID: fake + AWS_SECRET_ACCESS_KEY: fake user: node depends_on: mongo: condition: service_healthy redis: condition: service_healthy - fakes3: + s3: condition: service_healthy command: npm run test:acceptance - - - tar: - build: . - image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER - volumes: - - ./:/tmp/build/ - command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . - user: root - redis: image: redis mongo: image: mongo:3.4 - fakes3: + s3: image: adobe/s3mock environment: - initialBuckets=fake_user_files,fake_template_files,fake_public_files healthcheck: test: ["CMD", "curl", "-f", "http://localhost:9090"] - - diff --git a/services/filestore/nodemon.json b/services/filestore/nodemon.json index 98db38d71b..5826281b84 100644 --- a/services/filestore/nodemon.json +++ b/services/filestore/nodemon.json @@ -10,10 +10,9 @@ }, "watch": [ - "app/coffee/", - "app.coffee", + "app/js/", + "app.js", "config/" ], - "ext": "coffee" - + "ext": "js" } diff --git a/services/filestore/npm-shrinkwrap.json b/services/filestore/npm-shrinkwrap.json index 64529d81ca..d636bbe990 100644 --- a/services/filestore/npm-shrinkwrap.json +++ b/services/filestore/npm-shrinkwrap.json @@ -646,18 +646,15 @@ } }, "ansi-escapes": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.0.tgz", - "integrity": "sha512-EiYhwo0v255HUL6eDyuLrXEkTi7WwVCLAw+SeOQ7M7qdun1z1pum4DEm/nuqIVbPvi9RPPc9k9LbyBv6H0DwVg==", - "dev": true, - "requires": { - "type-fest": "^0.8.1" - } + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.2.0.tgz", + "integrity": "sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ==", + "dev": true }, "ansi-regex": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", - "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", + "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", "dev": true }, "ansi-styles": { @@ -1036,12 +1033,12 @@ "dev": true }, "cli-cursor": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", - "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-2.1.0.tgz", + "integrity": "sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU=", "dev": true, "requires": { - "restore-cursor": "^3.1.0" + "restore-cursor": "^2.0.0" } }, "cli-width": { @@ -1372,12 +1369,6 @@ "shimmer": "^1.2.0" } }, - "emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true - }, "encodeurl": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", @@ -1459,48 +1450,47 @@ "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=" }, "eslint": { - "version": "6.7.2", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-6.7.2.tgz", - "integrity": "sha512-qMlSWJaCSxDFr8fBPvJM9kJwbazrhNcBU3+DszDW1OlEwKBBRWsJc7NJFelvwQpanHCR14cOLD41x8Eqvo3Nng==", + "version": "5.16.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-5.16.0.tgz", + "integrity": "sha512-S3Rz11i7c8AA5JPv7xAH+dOyq/Cu/VXHiHXBPOU1k/JAM5dXqQPt3qcrhpHSorXmrpu2g0gkIBVXAqCpzfoZIg==", "dev": true, "requires": { "@babel/code-frame": "^7.0.0", - "ajv": "^6.10.0", + "ajv": "^6.9.1", "chalk": "^2.1.0", "cross-spawn": "^6.0.5", "debug": "^4.0.1", "doctrine": "^3.0.0", - "eslint-scope": "^5.0.0", - "eslint-utils": "^1.4.3", - "eslint-visitor-keys": "^1.1.0", - "espree": "^6.1.2", + "eslint-scope": "^4.0.3", + "eslint-utils": "^1.3.1", + "eslint-visitor-keys": "^1.0.0", + "espree": "^5.0.1", "esquery": "^1.0.1", "esutils": "^2.0.2", "file-entry-cache": "^5.0.1", "functional-red-black-tree": "^1.0.1", - "glob-parent": "^5.0.0", - "globals": "^12.1.0", + "glob": "^7.1.2", + "globals": "^11.7.0", "ignore": "^4.0.6", "import-fresh": "^3.0.0", "imurmurhash": "^0.1.4", - "inquirer": "^7.0.0", - "is-glob": "^4.0.0", - "js-yaml": "^3.13.1", + "inquirer": "^6.2.2", + "js-yaml": "^3.13.0", "json-stable-stringify-without-jsonify": "^1.0.1", "levn": "^0.3.0", - "lodash": "^4.17.14", + "lodash": "^4.17.11", "minimatch": "^3.0.4", "mkdirp": "^0.5.1", "natural-compare": "^1.4.0", - "optionator": "^0.8.3", + "optionator": "^0.8.2", + "path-is-inside": "^1.0.2", "progress": "^2.0.0", "regexpp": "^2.0.1", - "semver": "^6.1.2", - "strip-ansi": "^5.2.0", - "strip-json-comments": "^3.0.1", + "semver": "^5.5.1", + "strip-ansi": "^4.0.0", + "strip-json-comments": "^2.0.1", "table": "^5.2.3", - "text-table": "^0.2.0", - "v8-compile-cache": "^2.0.3" + "text-table": "^0.2.0" }, "dependencies": { "debug": { @@ -1512,11 +1502,19 @@ "ms": "^2.1.1" } }, - "lodash": { - "version": "4.17.15", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", - "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==", - "dev": true + "glob": { + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", + "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } }, "ms": { "version": "2.1.2", @@ -1525,10 +1523,19 @@ "dev": true }, "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", "dev": true + }, + "strip-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", + "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "dev": true, + "requires": { + "ansi-regex": "^3.0.0" + } } } }, @@ -1691,9 +1698,9 @@ "dev": true }, "eslint-scope": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.0.0.tgz", - "integrity": "sha512-oYrhJW7S0bxAFDvWqzvMPRm6pcgcnWc4QnofCAqRTRfQC0JcwenzGglTtsLyIuuWFfkqDG9vz67cnttSd53djw==", + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-4.0.3.tgz", + "integrity": "sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg==", "dev": true, "requires": { "esrecurse": "^4.1.0", @@ -1716,22 +1723,14 @@ "dev": true }, "espree": { - "version": "6.1.2", - "resolved": "https://registry.npmjs.org/espree/-/espree-6.1.2.tgz", - "integrity": "sha512-2iUPuuPP+yW1PZaMSDM9eyVf8D5P0Hi8h83YtZ5bPc/zHYjII5khoixIUTMO794NOY8F/ThF1Bo8ncZILarUTA==", + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-5.0.1.tgz", + "integrity": "sha512-qWAZcWh4XE/RwzLJejfcofscgMc9CamR6Tn1+XRXNzrvUSSbiAjGOI/fggztjIi7y9VLPqnICMIPiGyr8JaZ0A==", "dev": true, "requires": { - "acorn": "^7.1.0", - "acorn-jsx": "^5.1.0", - "eslint-visitor-keys": "^1.1.0" - }, - "dependencies": { - "acorn": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.1.0.tgz", - "integrity": "sha512-kL5CuoXA/dgxlBbVrflsflzQ3PAas7RYZB52NOm/6839iVYJgKMJ3cQJD+t2i5+qFa8h3MDpEOJiS64E8JLnSQ==", - "dev": true - } + "acorn": "^6.0.7", + "acorn-jsx": "^5.0.0", + "eslint-visitor-keys": "^1.0.0" } }, "esprima": { @@ -1950,9 +1949,9 @@ "integrity": "sha1-PlzoKTQJz6pxd6cbnKhOGx5vJe8=" }, "figures": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/figures/-/figures-3.1.0.tgz", - "integrity": "sha512-ravh8VRXqHuMvZt/d8GblBeqDMkdJMBdv/2KntFH+ra5MXkO7nxNKpzQ3n6QD/2da1kH0aWmNISdvhM7gl2gVg==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-2.0.0.tgz", + "integrity": "sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI=", "dev": true, "requires": { "escape-string-regexp": "^1.0.5" @@ -2190,23 +2189,11 @@ "path-is-absolute": "^1.0.0" } }, - "glob-parent": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.0.tgz", - "integrity": "sha512-qjtRgnIVmOfnKUE3NJAQEdk+lKrxfw8t5ke7SXtfMTHcjsBfOfWXCQfdb30zfDoZQ2IRSIiidmjtbHZPZ++Ihw==", - "dev": true, - "requires": { - "is-glob": "^4.0.1" - } - }, "globals": { - "version": "12.3.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-12.3.0.tgz", - "integrity": "sha512-wAfjdLgFsPZsklLJvOBUBmzYE8/CwhEqSBEMRXA3qxIiNtyqvjYurAtIfDh6chlEPUfmTY3MnZh5Hfh4q0UlIw==", - "dev": true, - "requires": { - "type-fest": "^0.8.1" - } + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "dev": true }, "google-auth-library": { "version": "3.1.2", @@ -2453,22 +2440,22 @@ "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=" }, "inquirer": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-7.0.1.tgz", - "integrity": "sha512-V1FFQ3TIO15det8PijPLFR9M9baSlnRs9nL7zWu1MNVA2T9YVl9ZbrHJhYs7e9X8jeMZ3lr2JH/rdHFgNCBdYw==", + "version": "6.5.2", + "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-6.5.2.tgz", + "integrity": "sha512-cntlB5ghuB0iuO65Ovoi8ogLHiWGs/5yNrtUcKjFhSSiVeAIVpD7koaSU9RM8mpXw5YDi9RdYXGQMaOURB7ycQ==", "dev": true, "requires": { - "ansi-escapes": "^4.2.1", + "ansi-escapes": "^3.2.0", "chalk": "^2.4.2", - "cli-cursor": "^3.1.0", + "cli-cursor": "^2.1.0", "cli-width": "^2.0.0", "external-editor": "^3.0.3", - "figures": "^3.0.0", - "lodash": "^4.17.15", - "mute-stream": "0.0.8", + "figures": "^2.0.0", + "lodash": "^4.17.12", + "mute-stream": "0.0.7", "run-async": "^2.2.0", - "rxjs": "^6.5.3", - "string-width": "^4.1.0", + "rxjs": "^6.4.0", + "string-width": "^2.1.0", "strip-ansi": "^5.1.0", "through": "^2.3.6" }, @@ -2514,27 +2501,12 @@ "integrity": "sha1-mqIOtq7rv/d/vTPnTKAbM1gdOhY=", "dev": true }, - "is-extglob": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", - "dev": true - }, "is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", "dev": true }, - "is-glob": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz", - "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==", - "dev": true, - "requires": { - "is-extglob": "^2.1.1" - } - }, "is-promise": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-2.1.0.tgz", @@ -3103,9 +3075,9 @@ } }, "mimic-fn": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", - "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-1.2.0.tgz", + "integrity": "sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ==", "dev": true }, "minimatch": { @@ -3219,9 +3191,9 @@ "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" }, "mute-stream": { - "version": "0.0.8", - "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz", - "integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==", + "version": "0.0.7", + "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz", + "integrity": "sha1-MHXOk7whuPq0PhvE2n6BFe0ee6s=", "dev": true }, "mv": { @@ -3462,12 +3434,12 @@ } }, "onetime": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.0.tgz", - "integrity": "sha512-5NcSkPHhwTVFIQN+TUqXoS5+dlElHXdpAWu9I0HP20YOtIi+aZ0Ct82jdlILDxjLEAWwvm+qj1m6aEtsDVmm6Q==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-2.0.1.tgz", + "integrity": "sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ=", "dev": true, "requires": { - "mimic-fn": "^2.1.0" + "mimic-fn": "^1.0.0" } }, "optionator": { @@ -4582,12 +4554,12 @@ } }, "restore-cursor": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz", - "integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz", + "integrity": "sha1-n37ih/gv0ybU/RYpI9YhKe7g368=", "dev": true, "requires": { - "onetime": "^5.1.0", + "onetime": "^2.0.0", "signal-exit": "^3.0.2" } }, @@ -4928,23 +4900,22 @@ "integrity": "sha1-1cdSgl5TZ+eG944Y5EXqIjoVWVI=" }, "string-width": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", - "integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", + "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", "dev": true, "requires": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.0" + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^4.0.0" }, "dependencies": { "strip-ansi": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", - "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", + "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", "dev": true, "requires": { - "ansi-regex": "^5.0.0" + "ansi-regex": "^3.0.0" } } } @@ -4998,9 +4969,9 @@ "dev": true }, "strip-json-comments": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.0.1.tgz", - "integrity": "sha512-VTyMAUfdm047mwKl+u79WIdrZxtFtn+nBxHeb844XBQ9uMNTuTHdx2hc5RiAJYqwTj3wc/xe5HLSdJSkJ+WfZw==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", + "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=", "dev": true }, "supports-color": { @@ -5198,12 +5169,6 @@ "integrity": "sha1-dkb7XxiHHPu3dJ5pvTmmOI63RQw=", "dev": true }, - "type-fest": { - "version": "0.8.1", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", - "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==", - "dev": true - }, "type-is": { "version": "1.6.16", "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.16.tgz", @@ -5263,12 +5228,6 @@ "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", "integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=" }, - "v8-compile-cache": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.1.0.tgz", - "integrity": "sha512-usZBT3PW+LOjM25wbqIlZwPeJV+3OSz3M1k1Ws8snlW39dZyYL9lOGC5FgPVHfk0jKmjiDV8Z0mIbVQPiwFs7g==", - "dev": true - }, "validate-npm-package-license": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", diff --git a/services/filestore/package.json b/services/filestore/package.json index 24ec708b10..f9bba8a746 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -13,11 +13,11 @@ "test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP", "start": "node $NODE_APP_OPTIONS app.js", "nodemon": "nodemon --config nodemon.json", - "lint": "eslint -f unix .", - "format": "prettier-eslint \"$(pwd)/**/*.js\" --list-different", - "format:fix": "prettier-eslint \"$(pwd)/**/*.js\" --write", - "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 30000 --exit $@ test/acceptance/js", - "test:unit:_run": "mocha --recursive --reporter spec --exit $@ test/unit/js" + "lint": "node_modules/.bin/eslint .", + "format": "node_modules/.bin/prettier-eslint '**/*.js' --list-different", + "format:fix": "node_modules/.bin/prettier-eslint '**/*.js' --write", + "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", + "test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js" }, "dependencies": { "async": "~0.2.10", @@ -47,7 +47,7 @@ "babel-eslint": "^10.0.3", "bunyan": "^1.3.5", "chai": "4.2.0", - "eslint": "^6.7.2", + "eslint": "^5.16.0", "eslint-config-prettier": "^6.7.0", "eslint-config-standard": "^14.1.0", "eslint-plugin-chai-expect": "^2.1.0", From 0b3b71b857d2fb6a2b3483916f52e9fa92eda7c8 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Tue, 17 Dec 2019 09:57:51 +0000 Subject: [PATCH 353/555] Fall back to old AWS env vars if present --- services/filestore/config/settings.defaults.coffee | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/services/filestore/config/settings.defaults.coffee b/services/filestore/config/settings.defaults.coffee index 95256a33d6..206f932a76 100644 --- a/services/filestore/config/settings.defaults.coffee +++ b/services/filestore/config/settings.defaults.coffee @@ -1,5 +1,12 @@ Path = require "path" +# environment variables renamed for consistency +# use AWS_ACCESS_KEY_ID-style going forward +if process.env['AWS_KEY'] && !process.env['AWS_ACCESS_KEY_ID'] + process.env['AWS_ACCESS_KEY_ID'] = process.env['AWS_KEY'] +if process.env['AWS_SECRET'] && !process.env['AWS_SECRET_ACCESS_KEY'] + process.env['AWS_SECRET_ACCESS_KEY'] = process.env['AWS_SECRET'] + settings = internal: filestore: From 42f6d1003147d2df00e852b0b0424a5b41599279 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Tue, 17 Dec 2019 15:19:19 +0000 Subject: [PATCH 354/555] Remove mongo and redis from dependencies --- services/filestore/buildscript.txt | 14 +++++++------- services/filestore/docker-compose.ci.yml | 9 --------- services/filestore/docker-compose.yml | 10 ---------- 3 files changed, 7 insertions(+), 26 deletions(-) diff --git a/services/filestore/buildscript.txt b/services/filestore/buildscript.txt index 773949832d..7e00306b70 100644 --- a/services/filestore/buildscript.txt +++ b/services/filestore/buildscript.txt @@ -1,10 +1,10 @@ filestore ---acceptance-creds= ---public-repo=True ---language=es ---node-version=10.17.0 ---dependencies=mongo,redis,s3 ---script-version=1.3 ---docker-repos=gcr.io/overleaf-ops --env-add=ENABLE_CONVERSIONS="true",USE_PROM_METRICS="true",AWS_S3_USER_FILES_BUCKET_NAME=fake_user_files,AWS_S3_TEMPLATE_FILES_BUCKET_NAME=fake_template_files,AWS_S3_PUBLIC_FILES_BUCKET_NAME=fake_public_files,AWS_S3_ENDPOINT=http://s3:9090,AWS_ACCESS_KEY_ID=fake,AWS_SECRET_ACCESS_KEY=fake +--dependencies=s3 +--acceptance-creds= +--script-version=1.3 +--node-version=10.17.0 +--language=es +--docker-repos=gcr.io/overleaf-ops +--public-repo=True --env-pass-through= diff --git a/services/filestore/docker-compose.ci.yml b/services/filestore/docker-compose.ci.yml index d8342a9837..ae9b43ee25 100644 --- a/services/filestore/docker-compose.ci.yml +++ b/services/filestore/docker-compose.ci.yml @@ -33,10 +33,6 @@ services: AWS_ACCESS_KEY_ID: fake AWS_SECRET_ACCESS_KEY: fake depends_on: - mongo: - condition: service_healthy - redis: - condition: service_healthy s3: condition: service_healthy user: node @@ -50,11 +46,6 @@ services: - ./:/tmp/build/ command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . user: root - redis: - image: redis - - mongo: - image: mongo:3.4 s3: image: adobe/s3mock environment: diff --git a/services/filestore/docker-compose.yml b/services/filestore/docker-compose.yml index 1269e795a2..792d70800b 100644 --- a/services/filestore/docker-compose.yml +++ b/services/filestore/docker-compose.yml @@ -40,20 +40,10 @@ services: AWS_SECRET_ACCESS_KEY: fake user: node depends_on: - mongo: - condition: service_healthy - redis: - condition: service_healthy s3: condition: service_healthy command: npm run test:acceptance - redis: - image: redis - - mongo: - image: mongo:3.4 - s3: image: adobe/s3mock environment: From b94df9952ba88e56ac4847a18e11401b60071867 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Wed, 18 Dec 2019 09:34:25 +0000 Subject: [PATCH 355/555] Add OError --- services/filestore/npm-shrinkwrap.json | 5 +++++ services/filestore/package.json | 1 + 2 files changed, 6 insertions(+) diff --git a/services/filestore/npm-shrinkwrap.json b/services/filestore/npm-shrinkwrap.json index d636bbe990..fb1ada1128 100644 --- a/services/filestore/npm-shrinkwrap.json +++ b/services/filestore/npm-shrinkwrap.json @@ -368,6 +368,11 @@ } } }, + "@overleaf/o-error": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@overleaf/o-error/-/o-error-2.1.0.tgz", + "integrity": "sha512-Zd9sks9LrLw8ErHt/cXeWIkyxWAqNAvNGn7wIjLQJH6TTEEW835PWOhpch+hQwwWsTxWIx/JDj+IpZ3ouw925g==" + }, "@protobufjs/aspromise": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", diff --git a/services/filestore/package.json b/services/filestore/package.json index f9bba8a746..47aa4d0d96 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -20,6 +20,7 @@ "test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js" }, "dependencies": { + "@overleaf/o-error": "^2.1.0", "async": "~0.2.10", "aws-sdk": "^2.1.39", "body-parser": "^1.2.0", From f19d82369d91f06cbc4f0f80b532fd0ea6cc408d Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Wed, 18 Dec 2019 09:52:18 +0000 Subject: [PATCH 356/555] Bump eslintrc up again to last-known-compatible version with eslit-prettier --- services/filestore/.eslintrc | 2 +- services/filestore/npm-shrinkwrap.json | 130 +++++++++++++++---------- services/filestore/package.json | 2 +- 3 files changed, 78 insertions(+), 56 deletions(-) diff --git a/services/filestore/.eslintrc b/services/filestore/.eslintrc index e65cee6619..6bd234a2dc 100644 --- a/services/filestore/.eslintrc +++ b/services/filestore/.eslintrc @@ -9,7 +9,7 @@ "prettier/standard" ], "parserOptions": { - "ecmaVersion": 6 + "ecmaVersion": 2017 }, "plugins": [ "mocha", diff --git a/services/filestore/npm-shrinkwrap.json b/services/filestore/npm-shrinkwrap.json index fb1ada1128..ef0f78fc15 100644 --- a/services/filestore/npm-shrinkwrap.json +++ b/services/filestore/npm-shrinkwrap.json @@ -1455,47 +1455,48 @@ "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=" }, "eslint": { - "version": "5.16.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-5.16.0.tgz", - "integrity": "sha512-S3Rz11i7c8AA5JPv7xAH+dOyq/Cu/VXHiHXBPOU1k/JAM5dXqQPt3qcrhpHSorXmrpu2g0gkIBVXAqCpzfoZIg==", + "version": "6.4.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-6.4.0.tgz", + "integrity": "sha512-WTVEzK3lSFoXUovDHEbkJqCVPEPwbhCq4trDktNI6ygs7aO41d4cDT0JFAT5MivzZeVLWlg7vHL+bgrQv/t3vA==", "dev": true, "requires": { "@babel/code-frame": "^7.0.0", - "ajv": "^6.9.1", + "ajv": "^6.10.0", "chalk": "^2.1.0", "cross-spawn": "^6.0.5", "debug": "^4.0.1", "doctrine": "^3.0.0", - "eslint-scope": "^4.0.3", - "eslint-utils": "^1.3.1", - "eslint-visitor-keys": "^1.0.0", - "espree": "^5.0.1", + "eslint-scope": "^5.0.0", + "eslint-utils": "^1.4.2", + "eslint-visitor-keys": "^1.1.0", + "espree": "^6.1.1", "esquery": "^1.0.1", "esutils": "^2.0.2", "file-entry-cache": "^5.0.1", "functional-red-black-tree": "^1.0.1", - "glob": "^7.1.2", + "glob-parent": "^5.0.0", "globals": "^11.7.0", "ignore": "^4.0.6", "import-fresh": "^3.0.0", "imurmurhash": "^0.1.4", - "inquirer": "^6.2.2", - "js-yaml": "^3.13.0", + "inquirer": "^6.4.1", + "is-glob": "^4.0.0", + "js-yaml": "^3.13.1", "json-stable-stringify-without-jsonify": "^1.0.1", "levn": "^0.3.0", - "lodash": "^4.17.11", + "lodash": "^4.17.14", "minimatch": "^3.0.4", "mkdirp": "^0.5.1", "natural-compare": "^1.4.0", "optionator": "^0.8.2", - "path-is-inside": "^1.0.2", "progress": "^2.0.0", "regexpp": "^2.0.1", - "semver": "^5.5.1", - "strip-ansi": "^4.0.0", - "strip-json-comments": "^2.0.1", + "semver": "^6.1.2", + "strip-ansi": "^5.2.0", + "strip-json-comments": "^3.0.1", "table": "^5.2.3", - "text-table": "^0.2.0" + "text-table": "^0.2.0", + "v8-compile-cache": "^2.0.3" }, "dependencies": { "debug": { @@ -1507,19 +1508,11 @@ "ms": "^2.1.1" } }, - "glob": { - "version": "7.1.6", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", - "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", - "dev": true, - "requires": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - } + "lodash": { + "version": "4.17.15", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", + "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==", + "dev": true }, "ms": { "version": "2.1.2", @@ -1528,19 +1521,10 @@ "dev": true }, "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", "dev": true - }, - "strip-ansi": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", - "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", - "dev": true, - "requires": { - "ansi-regex": "^3.0.0" - } } } }, @@ -1703,9 +1687,9 @@ "dev": true }, "eslint-scope": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-4.0.3.tgz", - "integrity": "sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.0.0.tgz", + "integrity": "sha512-oYrhJW7S0bxAFDvWqzvMPRm6pcgcnWc4QnofCAqRTRfQC0JcwenzGglTtsLyIuuWFfkqDG9vz67cnttSd53djw==", "dev": true, "requires": { "esrecurse": "^4.1.0", @@ -1728,14 +1712,22 @@ "dev": true }, "espree": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/espree/-/espree-5.0.1.tgz", - "integrity": "sha512-qWAZcWh4XE/RwzLJejfcofscgMc9CamR6Tn1+XRXNzrvUSSbiAjGOI/fggztjIi7y9VLPqnICMIPiGyr8JaZ0A==", + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/espree/-/espree-6.1.2.tgz", + "integrity": "sha512-2iUPuuPP+yW1PZaMSDM9eyVf8D5P0Hi8h83YtZ5bPc/zHYjII5khoixIUTMO794NOY8F/ThF1Bo8ncZILarUTA==", "dev": true, "requires": { - "acorn": "^6.0.7", - "acorn-jsx": "^5.0.0", - "eslint-visitor-keys": "^1.0.0" + "acorn": "^7.1.0", + "acorn-jsx": "^5.1.0", + "eslint-visitor-keys": "^1.1.0" + }, + "dependencies": { + "acorn": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.1.0.tgz", + "integrity": "sha512-kL5CuoXA/dgxlBbVrflsflzQ3PAas7RYZB52NOm/6839iVYJgKMJ3cQJD+t2i5+qFa8h3MDpEOJiS64E8JLnSQ==", + "dev": true + } } }, "esprima": { @@ -2194,6 +2186,15 @@ "path-is-absolute": "^1.0.0" } }, + "glob-parent": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.0.tgz", + "integrity": "sha512-qjtRgnIVmOfnKUE3NJAQEdk+lKrxfw8t5ke7SXtfMTHcjsBfOfWXCQfdb30zfDoZQ2IRSIiidmjtbHZPZ++Ihw==", + "dev": true, + "requires": { + "is-glob": "^4.0.1" + } + }, "globals": { "version": "11.12.0", "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", @@ -2506,12 +2507,27 @@ "integrity": "sha1-mqIOtq7rv/d/vTPnTKAbM1gdOhY=", "dev": true }, + "is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", + "dev": true + }, "is-fullwidth-code-point": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", "dev": true }, + "is-glob": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz", + "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==", + "dev": true, + "requires": { + "is-extglob": "^2.1.1" + } + }, "is-promise": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-2.1.0.tgz", @@ -4974,9 +4990,9 @@ "dev": true }, "strip-json-comments": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", - "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.0.1.tgz", + "integrity": "sha512-VTyMAUfdm047mwKl+u79WIdrZxtFtn+nBxHeb844XBQ9uMNTuTHdx2hc5RiAJYqwTj3wc/xe5HLSdJSkJ+WfZw==", "dev": true }, "supports-color": { @@ -5233,6 +5249,12 @@ "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", "integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=" }, + "v8-compile-cache": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.1.0.tgz", + "integrity": "sha512-usZBT3PW+LOjM25wbqIlZwPeJV+3OSz3M1k1Ws8snlW39dZyYL9lOGC5FgPVHfk0jKmjiDV8Z0mIbVQPiwFs7g==", + "dev": true + }, "validate-npm-package-license": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", diff --git a/services/filestore/package.json b/services/filestore/package.json index 47aa4d0d96..fd03757072 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -48,7 +48,7 @@ "babel-eslint": "^10.0.3", "bunyan": "^1.3.5", "chai": "4.2.0", - "eslint": "^5.16.0", + "eslint": "^6.4.0", "eslint-config-prettier": "^6.7.0", "eslint-config-standard": "^14.1.0", "eslint-plugin-chai-expect": "^2.1.0", From eacad771127857339b84cd7e9c7455bd8c3e68f9 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Wed, 18 Dec 2019 10:01:59 +0000 Subject: [PATCH 357/555] Cleanup SafeExec --- services/filestore/app/js/Errors.js | 52 +++++-- services/filestore/app/js/SafeExec.js | 90 ++++++------ .../filestore/test/unit/js/SafeExecTests.js | 134 ++++++++++-------- 3 files changed, 161 insertions(+), 115 deletions(-) diff --git a/services/filestore/app/js/Errors.js b/services/filestore/app/js/Errors.js index f3bc8e37eb..4231571cb3 100644 --- a/services/filestore/app/js/Errors.js +++ b/services/filestore/app/js/Errors.js @@ -1,16 +1,38 @@ -/* eslint-disable - no-proto, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -let Errors -var NotFoundError = function(message) { - const error = new Error(message) - error.name = 'NotFoundError' - error.__proto__ = NotFoundError.prototype - return error -} -NotFoundError.prototype.__proto__ = Error.prototype +const OError = require('@overleaf/o-error') -module.exports = Errors = { NotFoundError } +// Error class for legacy errors so they inherit OError while staying +// backward-compatible (can be instantiated with string as argument instead +// of object) +class BackwardCompatibleError extends OError { + constructor(messageOrOptions) { + let options + if (typeof messageOrOptions === 'string') { + options = { message: messageOrOptions } + } else if (!messageOrOptions) { + options = {} + } else { + options = messageOrOptions + } + super(options) + } +} + +class NotFoundError extends BackwardCompatibleError {} +class ConversionsDisabledError extends BackwardCompatibleError {} + +class FailedCommandError extends OError { + constructor(command, code, stdout, stderr) { + super({ + message: 'command failed with error exit code', + info: { + command, + code + } + }) + this.stdout = stdout + this.stderr = stderr + this.code = code + } +} + +module.exports = { NotFoundError, FailedCommandError, ConversionsDisabledError } diff --git a/services/filestore/app/js/SafeExec.js b/services/filestore/app/js/SafeExec.js index dbc1576a88..5f079fa474 100644 --- a/services/filestore/app/js/SafeExec.js +++ b/services/filestore/app/js/SafeExec.js @@ -1,21 +1,8 @@ -/* eslint-disable - camelcase, - handle-callback-err, - no-return-assign, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS101: Remove unnecessary use of Array.from - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ const _ = require('underscore') const logger = require('logger-sharelatex') -const child_process = require('child_process') +const childProcess = require('child_process') const Settings = require('settings-sharelatex') +const { ConversionsDisabledError, FailedCommandError } = require('./Errors') // execute a command in the same way as 'exec' but with a timeout that // kills all child processes @@ -23,36 +10,32 @@ const Settings = require('settings-sharelatex') // we spawn the command with 'detached:true' to make a new process // group, then we can kill everything in that process group. -module.exports = function(command, options, callback) { - if (callback == null) { - callback = function(err, stdout, stderr) {} - } +module.exports = safeExec +module.exports.promises = safeExecPromise + +// options are {timeout: number-of-milliseconds, killSignal: signal-name} +function safeExec(command, options, callback) { if (!Settings.enableConversions) { - const error = new Error('Image conversions are disabled') - return callback(error) + return callback( + new ConversionsDisabledError('image conversions are disabled') + ) } - // options are {timeout: number-of-milliseconds, killSignal: signal-name} - const [cmd, ...args] = Array.from(command) + const [cmd, ...args] = command - const child = child_process.spawn(cmd, args, { detached: true }) + const child = childProcess.spawn(cmd, args, { detached: true }) let stdout = '' let stderr = '' - const cleanup = _.once(function(err) { - if (killTimer != null) { - clearTimeout(killTimer) - } - return callback(err, stdout, stderr) - }) + let killTimer - if (options.timeout != null) { - var killTimer = setTimeout(function() { + if (options.timeout) { + killTimer = setTimeout(function() { try { // use negative process id to kill process group - return process.kill(-child.pid, options.killSignal || 'SIGTERM') + process.kill(-child.pid, options.killSignal || 'SIGTERM') } catch (error) { - return logger.log( + logger.log( { process: child.pid, kill_error: error }, 'error killing process' ) @@ -60,14 +43,41 @@ module.exports = function(command, options, callback) { }, options.timeout) } - child.on('close', function(code, signal) { - const err = code ? new Error(`exit status ${code}`) : signal - return cleanup(err) + const cleanup = _.once(function(err) { + if (killTimer) { + clearTimeout(killTimer) + } + callback(err, stdout, stderr) }) - child.on('error', err => cleanup(err)) + child.on('close', function(code, signal) { + if (code || signal) { + return cleanup( + new FailedCommandError(command, code || signal, stdout, stderr) + ) + } - child.stdout.on('data', chunk => (stdout += chunk)) + cleanup() + }) - return child.stderr.on('data', chunk => (stderr += chunk)) + child.on('error', err => { + cleanup(err) + }) + child.stdout.on('data', chunk => { + stdout += chunk + }) + child.stderr.on('data', chunk => { + stderr += chunk + }) +} + +function safeExecPromise(command, options) { + return new Promise((resolve, reject) => { + safeExec(command, options, (err, stdout, stderr) => { + if (err) { + reject(err) + } + resolve({ stdout, stderr }) + }) + }) } diff --git a/services/filestore/test/unit/js/SafeExecTests.js b/services/filestore/test/unit/js/SafeExecTests.js index 2b629947f5..077964ead7 100644 --- a/services/filestore/test/unit/js/SafeExecTests.js +++ b/services/filestore/test/unit/js/SafeExecTests.js @@ -1,95 +1,109 @@ -/* eslint-disable - no-return-assign, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const { assert } = require('chai') -const sinon = require('sinon') const chai = require('chai') const should = chai.should() const { expect } = chai -const modulePath = '../../../app/js/SafeExec.js' +const modulePath = '../../../app/js/SafeExec' const SandboxedModule = require('sandboxed-module') describe('SafeExec', function() { + let settings, options, safeExec + beforeEach(function() { - this.settings = { enableConversions: true } - this.safe_exec = SandboxedModule.require(modulePath, { + settings = { enableConversions: true } + options = { timeout: 10 * 1000, killSignal: 'SIGTERM' } + + safeExec = SandboxedModule.require(modulePath, { requires: { 'logger-sharelatex': { log() {}, err() {} }, - 'settings-sharelatex': this.settings + 'settings-sharelatex': settings } }) - return (this.options = { timeout: 10 * 1000, killSignal: 'SIGTERM' }) }) - return describe('safe_exec', function() { + describe('safeExec', function() { it('should execute a valid command', function(done) { - return this.safe_exec( - ['/bin/echo', 'hello'], - this.options, - (err, stdout, stderr) => { - stdout.should.equal('hello\n') - should.not.exist(err) - return done() - } - ) + safeExec(['/bin/echo', 'hello'], options, (err, stdout, stderr) => { + stdout.should.equal('hello\n') + stderr.should.equal('') + should.not.exist(err) + done() + }) }) it('should error when conversions are disabled', function(done) { - this.settings.enableConversions = false - return this.safe_exec( - ['/bin/echo', 'hello'], - this.options, - (err, stdout, stderr) => { - expect(err).to.exist - return done() - } - ) + settings.enableConversions = false + safeExec(['/bin/echo', 'hello'], options, err => { + expect(err).to.exist + done() + }) }) it('should execute a command with non-zero exit status', function(done) { - return this.safe_exec( - ['/usr/bin/env', 'false'], - this.options, - (err, stdout, stderr) => { - stdout.should.equal('') - stderr.should.equal('') - err.message.should.equal('exit status 1') - return done() - } - ) + safeExec(['/usr/bin/env', 'false'], options, err => { + expect(err).to.exist + expect(err.name).to.equal('FailedCommandError') + expect(err.code).to.equal(1) + expect(err.stdout).to.equal('') + expect(err.stderr).to.equal('') + done() + }) }) it('should handle an invalid command', function(done) { - return this.safe_exec( - ['/bin/foobar'], - this.options, - (err, stdout, stderr) => { - err.code.should.equal('ENOENT') - return done() - } - ) + safeExec(['/bin/foobar'], options, err => { + err.code.should.equal('ENOENT') + done() + }) }) - return it('should handle a command that runs too long', function(done) { - return this.safe_exec( + it('should handle a command that runs too long', function(done) { + safeExec( ['/bin/sleep', '10'], { timeout: 500, killSignal: 'SIGTERM' }, - (err, stdout, stderr) => { - err.should.equal('SIGTERM') - return done() + err => { + expect(err).to.exist + expect(err.name).to.equal('FailedCommandError') + expect(err.code).to.equal('SIGTERM') + done() } ) }) }) + + describe('as a promise', function() { + beforeEach(function() { + safeExec = safeExec.promises + }) + + it('should execute a valid command', async function() { + const { stdout, stderr } = await safeExec(['/bin/echo', 'hello'], options) + + stdout.should.equal('hello\n') + stderr.should.equal('') + }) + + it('should throw a ConversionsDisabledError when appropriate', async function() { + settings.enableConversions = false + try { + await safeExec(['/bin/echo', 'hello'], options) + } catch (err) { + expect(err.name).to.equal('ConversionsDisabledError') + return + } + expect('method did not throw an error').not.to.exist + }) + + it('should throw a FailedCommandError when appropriate', async function() { + try { + await safeExec(['/usr/bin/env', 'false'], options) + } catch (err) { + expect(err.name).to.equal('FailedCommandError') + expect(err.code).to.equal(1) + return + } + expect('method did not throw an error').not.to.exist + }) + }) }) From 27aaff7843c6dd39e1623b00402939effa3f7afb Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Wed, 18 Dec 2019 15:40:30 +0000 Subject: [PATCH 358/555] Decaf cleanup for FileHandler and LocalFileWriter Simplified code and tests where possible --- services/filestore/app/js/Errors.js | 12 +- services/filestore/app/js/FileHandler.js | 417 +++++++-------- services/filestore/app/js/LocalFileWriter.js | 120 ++--- services/filestore/npm-shrinkwrap.json | 6 + services/filestore/package.json | 3 +- .../test/unit/js/FileHandlerTests.js | 476 +++++++----------- .../test/unit/js/LocalFileWriterTests.js | 139 ++--- 7 files changed, 474 insertions(+), 699 deletions(-) diff --git a/services/filestore/app/js/Errors.js b/services/filestore/app/js/Errors.js index 4231571cb3..57dbdbe522 100644 --- a/services/filestore/app/js/Errors.js +++ b/services/filestore/app/js/Errors.js @@ -18,7 +18,10 @@ class BackwardCompatibleError extends OError { } class NotFoundError extends BackwardCompatibleError {} +class WriteError extends BackwardCompatibleError {} +class ReadError extends BackwardCompatibleError {} class ConversionsDisabledError extends BackwardCompatibleError {} +class ConversionError extends BackwardCompatibleError {} class FailedCommandError extends OError { constructor(command, code, stdout, stderr) { @@ -35,4 +38,11 @@ class FailedCommandError extends OError { } } -module.exports = { NotFoundError, FailedCommandError, ConversionsDisabledError } +module.exports = { + NotFoundError, + FailedCommandError, + ConversionsDisabledError, + WriteError, + ReadError, + ConversionError +} diff --git a/services/filestore/app/js/FileHandler.js b/services/filestore/app/js/FileHandler.js index e63c813167..3e102b316b 100644 --- a/services/filestore/app/js/FileHandler.js +++ b/services/filestore/app/js/FileHandler.js @@ -1,18 +1,5 @@ -/* eslint-disable - camelcase, - no-self-assign, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -let FileHandler -const settings = require('settings-sharelatex') +const { promisify } = require('util') +const fs = require('fs') const PersistorManager = require('./PersistorManager') const LocalFileWriter = require('./LocalFileWriter') const logger = require('logger-sharelatex') @@ -20,216 +7,196 @@ const FileConverter = require('./FileConverter') const KeyBuilder = require('./KeyBuilder') const async = require('async') const ImageOptimiser = require('./ImageOptimiser') -const Errors = require('./Errors') +const { WriteError, ReadError, ConversionError } = require('./Errors') -module.exports = FileHandler = { - insertFile(bucket, key, stream, callback) { - const convertedKey = KeyBuilder.getConvertedFolderKey(key) - return PersistorManager.deleteDirectory(bucket, convertedKey, function( - error - ) { - if (error != null) { - return callback(error) - } - return PersistorManager.sendStream(bucket, key, stream, callback) - }) - }, - - deleteFile(bucket, key, callback) { - const convertedKey = KeyBuilder.getConvertedFolderKey(key) - return async.parallel( - [ - done => PersistorManager.deleteFile(bucket, key, done), - done => PersistorManager.deleteDirectory(bucket, convertedKey, done) - ], - callback - ) - }, - - getFile(bucket, key, opts, callback) { - // In this call, opts can contain credentials - if (opts == null) { - opts = {} - } - logger.log({ bucket, key, opts: this._scrubSecrets(opts) }, 'getting file') - if (opts.format == null && opts.style == null) { - return this._getStandardFile(bucket, key, opts, callback) - } else { - return this._getConvertedFile(bucket, key, opts, callback) - } - }, - - getFileSize(bucket, key, callback) { - return PersistorManager.getFileSize(bucket, key, callback) - }, - - _getStandardFile(bucket, key, opts, callback) { - return PersistorManager.getFileStream(bucket, key, opts, function( - err, - fileStream - ) { - if (err != null && !(err instanceof Errors.NotFoundError)) { - logger.err( - { bucket, key, opts: FileHandler._scrubSecrets(opts) }, - 'error getting fileStream' - ) - } - return callback(err, fileStream) - }) - }, - - _getConvertedFile(bucket, key, opts, callback) { - const convertedKey = KeyBuilder.addCachingToKey(key, opts) - return PersistorManager.checkIfFileExists( - bucket, - convertedKey, - (err, exists) => { - if (err != null) { - return callback(err) - } - if (exists) { - return PersistorManager.getFileStream( - bucket, - convertedKey, - opts, - callback - ) - } else { - return this._getConvertedFileAndCache( - bucket, - key, - convertedKey, - opts, - callback - ) - } - } - ) - }, - - _getConvertedFileAndCache(bucket, key, convertedKey, opts, callback) { - let convertedFsPath = '' - const originalFsPath = '' - return async.series( - [ - cb => { - return this._convertFile(bucket, key, opts, function( - err, - fileSystemPath, - originalFsPath - ) { - convertedFsPath = fileSystemPath - originalFsPath = originalFsPath - return cb(err) - }) - }, - cb => ImageOptimiser.compressPng(convertedFsPath, cb), - cb => - PersistorManager.sendFile(bucket, convertedKey, convertedFsPath, cb) - ], - function(err) { - if (err != null) { - LocalFileWriter.deleteFile(convertedFsPath, function() {}) - LocalFileWriter.deleteFile(originalFsPath, function() {}) - return callback(err) - } - // Send back the converted file from the local copy to avoid problems - // with the file not being present in S3 yet. As described in the - // documentation below, we have already made a 'HEAD' request in - // checkIfFileExists so we only have "eventual consistency" if we try - // to stream it from S3 here. This was a cause of many 403 errors. - // - // "Amazon S3 provides read-after-write consistency for PUTS of new - // objects in your S3 bucket in all regions with one caveat. The - // caveat is that if you make a HEAD or GET request to the key name - // (to find if the object exists) before creating the object, Amazon - // S3 provides eventual consistency for read-after-write."" - // https://docs.aws.amazon.com/AmazonS3/latest/dev/Introduction.html#ConsistencyModel - return LocalFileWriter.getStream(convertedFsPath, function( - err, - readStream - ) { - if (err != null) { - return callback(err) - } - readStream.on('end', function() { - logger.log({ convertedFsPath }, 'deleting temporary file') - return LocalFileWriter.deleteFile(convertedFsPath, function() {}) - }) - return callback(null, readStream) - }) - } - ) - }, - - _convertFile(bucket, originalKey, opts, callback) { - return this._writeS3FileToDisk(bucket, originalKey, opts, function( - err, - originalFsPath - ) { - if (err != null) { - return callback(err) - } - const done = function(err, destPath) { - if (err != null) { - logger.err( - { err, bucket, originalKey, opts: FileHandler._scrubSecrets(opts) }, - 'error converting file' - ) - return callback(err) - } - LocalFileWriter.deleteFile(originalFsPath, function() {}) - return callback(err, destPath, originalFsPath) - } - - logger.log({ opts }, 'converting file depending on opts') - - if (opts.format != null) { - return FileConverter.convert(originalFsPath, opts.format, done) - } else if (opts.style === 'thumbnail') { - return FileConverter.thumbnail(originalFsPath, done) - } else if (opts.style === 'preview') { - return FileConverter.preview(originalFsPath, done) - } else { - return callback( - new Error( - `should have specified opts to convert file with ${JSON.stringify( - opts - )}` - ) - ) - } - }) - }, - - _writeS3FileToDisk(bucket, key, opts, callback) { - return PersistorManager.getFileStream(bucket, key, opts, function( - err, - fileStream - ) { - if (err != null) { - return callback(err) - } - return LocalFileWriter.writeStream(fileStream, key, callback) - }) - }, - - getDirectorySize(bucket, project_id, callback) { - logger.log({ bucket, project_id }, 'getting project size') - return PersistorManager.directorySize(bucket, project_id, function( - err, - size - ) { - if (err != null) { - logger.err({ bucket, project_id }, 'error getting size') - } - return callback(err, size) - }) - }, - - _scrubSecrets(opts) { - const safe = Object.assign({}, opts) - delete safe.credentials - return safe +module.exports = { + insertFile, + deleteFile, + getFile, + getFileSize, + getDirectorySize, + promises: { + getFile: promisify(getFile), + insertFile: promisify(insertFile), + deleteFile: promisify(deleteFile), + getFileSize: promisify(getFileSize), + getDirectorySize: promisify(getDirectorySize) } } + +function insertFile(bucket, key, stream, callback) { + const convertedKey = KeyBuilder.getConvertedFolderKey(key) + PersistorManager.deleteDirectory(bucket, convertedKey, function(error) { + if (error) { + return callback(new WriteError('error inserting file').withCause(error)) + } + PersistorManager.sendStream(bucket, key, stream, callback) + }) +} + +function deleteFile(bucket, key, callback) { + const convertedKey = KeyBuilder.getConvertedFolderKey(key) + async.parallel( + [ + done => PersistorManager.deleteFile(bucket, key, done), + done => PersistorManager.deleteDirectory(bucket, convertedKey, done) + ], + callback + ) +} + +function getFile(bucket, key, opts, callback) { + // In this call, opts can contain credentials + if (!opts) { + opts = {} + } + logger.log({ bucket, key, opts: _scrubSecrets(opts) }, 'getting file') + if (!opts.format && !opts.style) { + _getStandardFile(bucket, key, opts, callback) + } else { + _getConvertedFile(bucket, key, opts, callback) + } +} + +function getFileSize(bucket, key, callback) { + PersistorManager.getFileSize(bucket, key, callback) +} + +function getDirectorySize(bucket, projectId, callback) { + logger.log({ bucket, project_id: projectId }, 'getting project size') + PersistorManager.directorySize(bucket, projectId, function(err, size) { + if (err) { + logger.err({ bucket, project_id: projectId }, 'error getting size') + err = new ReadError('error getting project size').withCause(err) + } + return callback(err, size) + }) +} + +function _getStandardFile(bucket, key, opts, callback) { + PersistorManager.getFileStream(bucket, key, opts, function(err, fileStream) { + if (err && err.name !== 'NotFoundError') { + logger.err( + { bucket, key, opts: _scrubSecrets(opts) }, + 'error getting fileStream' + ) + } + callback(err, fileStream) + }) +} + +function _getConvertedFile(bucket, key, opts, callback) { + const convertedKey = KeyBuilder.addCachingToKey(key, opts) + PersistorManager.checkIfFileExists(bucket, convertedKey, (err, exists) => { + if (err) { + return callback(err) + } + + if (exists) { + PersistorManager.getFileStream(bucket, convertedKey, opts, callback) + } else { + _getConvertedFileAndCache(bucket, key, convertedKey, opts, callback) + } + }) +} + +function _getConvertedFileAndCache(bucket, key, convertedKey, opts, callback) { + let convertedFsPath + + async.series( + [ + cb => { + _convertFile(bucket, key, opts, function(err, fileSystemPath) { + convertedFsPath = fileSystemPath + cb(err) + }) + }, + cb => ImageOptimiser.compressPng(convertedFsPath, cb), + cb => PersistorManager.sendFile(bucket, convertedKey, convertedFsPath, cb) + ], + function(err) { + if (err) { + LocalFileWriter.deleteFile(convertedFsPath, function() {}) + return callback( + new ConversionError('failed to convert file').withCause(err) + ) + } + // Send back the converted file from the local copy to avoid problems + // with the file not being present in S3 yet. As described in the + // documentation below, we have already made a 'HEAD' request in + // checkIfFileExists so we only have "eventual consistency" if we try + // to stream it from S3 here. This was a cause of many 403 errors. + // + // "Amazon S3 provides read-after-write consistency for PUTS of new + // objects in your S3 bucket in all regions with one caveat. The + // caveat is that if you make a HEAD or GET request to the key name + // (to find if the object exists) before creating the object, Amazon + // S3 provides eventual consistency for read-after-write."" + // https://docs.aws.amazon.com/AmazonS3/latest/dev/Introduction.html#ConsistencyModel + const readStream = fs.createReadStream(convertedFsPath) + readStream.on('end', function() { + LocalFileWriter.deleteFile(convertedFsPath, function() {}) + }) + callback(null, readStream) + } + ) +} + +function _convertFile(bucket, originalKey, opts, callback) { + _writeFileToDisk(bucket, originalKey, opts, function(err, originalFsPath) { + if (err) { + return callback( + new ConversionError('unable to write file to disk').withCause(err) + ) + } + + const done = function(err, destPath) { + if (err) { + logger.err( + { err, bucket, originalKey, opts: _scrubSecrets(opts) }, + 'error converting file' + ) + return callback( + new ConversionError('error converting file').withCause(err) + ) + } + LocalFileWriter.deleteFile(originalFsPath, function() {}) + callback(err, destPath) + } + + logger.log({ opts }, 'converting file depending on opts') + + if (opts.format) { + FileConverter.convert(originalFsPath, opts.format, done) + } else if (opts.style === 'thumbnail') { + FileConverter.thumbnail(originalFsPath, done) + } else if (opts.style === 'preview') { + FileConverter.preview(originalFsPath, done) + } else { + callback( + new ConversionError( + `should have specified opts to convert file with ${JSON.stringify( + opts + )}` + ) + ) + } + }) +} + +function _writeFileToDisk(bucket, key, opts, callback) { + PersistorManager.getFileStream(bucket, key, opts, function(err, fileStream) { + if (err) { + return callback( + new ReadError('unable to get read stream for file').withCause(err) + ) + } + LocalFileWriter.writeStream(fileStream, key, callback) + }) +} + +function _scrubSecrets(opts) { + const safe = Object.assign({}, opts) + delete safe.credentials + return safe +} diff --git a/services/filestore/app/js/LocalFileWriter.js b/services/filestore/app/js/LocalFileWriter.js index 8a541a35e9..44f3f9433a 100644 --- a/services/filestore/app/js/LocalFileWriter.js +++ b/services/filestore/app/js/LocalFileWriter.js @@ -1,91 +1,57 @@ -/* eslint-disable - handle-callback-err, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ const fs = require('fs') const uuid = require('node-uuid') const path = require('path') -const _ = require('underscore') +const Stream = require('stream') +const { callbackify, promisify } = require('util') const logger = require('logger-sharelatex') const metrics = require('metrics-sharelatex') const Settings = require('settings-sharelatex') -const Errors = require('./Errors') +const { WriteError } = require('./Errors') module.exports = { - writeStream(stream, key, callback) { - const timer = new metrics.Timer('writingFile') - callback = _.once(callback) - const fsPath = this._getPath(key) - logger.log({ fsPath }, 'writing file locally') - const writeStream = fs.createWriteStream(fsPath) - writeStream.on('finish', function() { - timer.done() - logger.log({ fsPath }, 'finished writing file locally') - return callback(null, fsPath) - }) - writeStream.on('error', function(err) { - logger.err( - { err, fsPath }, - 'problem writing file locally, with write stream' - ) - return callback(err) - }) - stream.on('error', function(err) { - logger.log( - { err, fsPath }, - 'problem writing file locally, with read stream' - ) - return callback(err) - }) - return stream.pipe(writeStream) + promises: { + writeStream, + deleteFile }, + writeStream: callbackify(writeStream), + deleteFile: callbackify(deleteFile) +} - getStream(fsPath, _callback) { - if (_callback == null) { - _callback = function(err, res) {} - } - const callback = _.once(_callback) - const timer = new metrics.Timer('readingFile') - logger.log({ fsPath }, 'reading file locally') - const readStream = fs.createReadStream(fsPath) - readStream.on('end', function() { - timer.done() - return logger.log({ fsPath }, 'finished reading file locally') - }) - readStream.on('error', function(err) { - logger.err( - { err, fsPath }, - 'problem reading file locally, with read stream' - ) - if (err.code === 'ENOENT') { - return callback(new Errors.NotFoundError(err.message), null) - } else { - return callback(err) - } - }) - return callback(null, readStream) - }, +const pipeline = promisify(Stream.pipeline) - deleteFile(fsPath, callback) { - if (fsPath == null || fsPath === '') { - return callback() - } - logger.log({ fsPath }, 'removing local temp file') - return fs.unlink(fsPath, callback) - }, +async function writeStream(stream, key) { + const timer = new metrics.Timer('writingFile') + const fsPath = _getPath(key) - _getPath(key) { - if (key == null) { - key = uuid.v1() - } - key = key.replace(/\//g, '-') - return path.join(Settings.path.uploadFolder, key) + logger.log({ fsPath }, 'writing file locally') + + const writeStream = fs.createWriteStream(fsPath) + try { + await pipeline(stream, writeStream) + timer.done() + logger.log({ fsPath }, 'finished writing file locally') + return fsPath + } catch (err) { + logger.err({ err, fsPath }, 'problem writing file locally') + throw new WriteError({ + message: 'problem writing file locally', + info: { err, fsPath } + }).withCause(err) } } + +async function deleteFile(fsPath) { + if (!fsPath) { + return + } + logger.log({ fsPath }, 'removing local temp file') + await promisify(fs.unlink)(fsPath) +} + +function _getPath(key) { + if (key == null) { + key = uuid.v1() + } + key = key.replace(/\//g, '-') + return path.join(Settings.path.uploadFolder, key) +} diff --git a/services/filestore/npm-shrinkwrap.json b/services/filestore/npm-shrinkwrap.json index ef0f78fc15..44f5ec3263 100644 --- a/services/filestore/npm-shrinkwrap.json +++ b/services/filestore/npm-shrinkwrap.json @@ -4763,6 +4763,12 @@ "type-detect": "^4.0.8" } }, + "sinon-chai": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/sinon-chai/-/sinon-chai-3.3.0.tgz", + "integrity": "sha512-r2JhDY7gbbmh5z3Q62pNbrjxZdOAjpsqW/8yxAZRSqLZqowmfGZPGUZPFf3UX36NLis0cv8VEM5IJh9HgkSOAA==", + "dev": true + }, "slice-ansi": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-2.1.0.tgz", diff --git a/services/filestore/package.json b/services/filestore/package.json index fd03757072..d39d1027be 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -62,6 +62,7 @@ "prettier-eslint": "^9.0.1", "prettier-eslint-cli": "^5.0.0", "sandboxed-module": "2.0.3", - "sinon": "7.1.1" + "sinon": "7.1.1", + "sinon-chai": "^3.3.0" } } diff --git a/services/filestore/test/unit/js/FileHandlerTests.js b/services/filestore/test/unit/js/FileHandlerTests.js index e641ffdd16..671e5c41ea 100644 --- a/services/filestore/test/unit/js/FileHandlerTests.js +++ b/services/filestore/test/unit/js/FileHandlerTests.js @@ -1,367 +1,233 @@ -/* eslint-disable - handle-callback-err, - no-return-assign, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const { assert } = require('chai') const sinon = require('sinon') const chai = require('chai') -const should = chai.should() const { expect } = chai const modulePath = '../../../app/js/FileHandler.js' const SandboxedModule = require('sandboxed-module') describe('FileHandler', function() { - beforeEach(function() { - this.settings = { - s3: { - buckets: { - user_files: 'user_files' - } + let PersistorManager, + LocalFileWriter, + FileConverter, + KeyBuilder, + ImageOptimiser, + FileHandler, + fs + const settings = { + s3: { + buckets: { + user_files: 'user_files' } } - this.PersistorManager = { - getFileStream: sinon.stub(), - checkIfFileExists: sinon.stub(), - deleteFile: sinon.stub(), - deleteDirectory: sinon.stub(), - sendStream: sinon.stub(), - insertFile: sinon.stub(), - directorySize: sinon.stub() + } + + const bucket = 'my_bucket' + const key = 'key/here' + const convertedFolderKey = 'convertedFolder' + const sourceStream = 'sourceStream' + const convertedKey = 'convertedKey' + const readStream = { + stream: 'readStream', + on: sinon.stub() + } + + beforeEach(function() { + PersistorManager = { + getFileStream: sinon.stub().yields(null, sourceStream), + checkIfFileExists: sinon.stub().yields(), + deleteFile: sinon.stub().yields(), + deleteDirectory: sinon.stub().yields(), + sendStream: sinon.stub().yields(), + insertFile: sinon.stub().yields(), + sendFile: sinon.stub().yields(), + directorySize: sinon.stub().yields() } - this.LocalFileWriter = { - writeStream: sinon.stub(), - getStream: sinon.stub(), - deleteFile: sinon.stub() + LocalFileWriter = { + writeStream: sinon.stub().yields(), + deleteFile: sinon.stub().yields() } - this.FileConverter = { - convert: sinon.stub(), - thumbnail: sinon.stub(), - preview: sinon.stub() + FileConverter = { + convert: sinon.stub().yields(), + thumbnail: sinon.stub().yields(), + preview: sinon.stub().yields() } - this.keyBuilder = { - addCachingToKey: sinon.stub(), - getConvertedFolderKey: sinon.stub() + KeyBuilder = { + addCachingToKey: sinon.stub().returns(convertedKey), + getConvertedFolderKey: sinon.stub().returns(convertedFolderKey) } - this.ImageOptimiser = { compressPng: sinon.stub() } - this.handler = SandboxedModule.require(modulePath, { + ImageOptimiser = { compressPng: sinon.stub().yields() } + fs = { + createReadStream: sinon.stub().returns(readStream) + } + + FileHandler = SandboxedModule.require(modulePath, { requires: { - 'settings-sharelatex': this.settings, - './PersistorManager': this.PersistorManager, - './LocalFileWriter': this.LocalFileWriter, - './FileConverter': this.FileConverter, - './KeyBuilder': this.keyBuilder, - './ImageOptimiser': this.ImageOptimiser, + 'settings-sharelatex': settings, + './PersistorManager': PersistorManager, + './LocalFileWriter': LocalFileWriter, + './FileConverter': FileConverter, + './KeyBuilder': KeyBuilder, + './ImageOptimiser': ImageOptimiser, + fs: fs, 'logger-sharelatex': { log() {}, err() {} } - } + }, + globals: { console } }) - this.bucket = 'my_bucket' - this.key = 'key/here' - this.stubbedPath = '/var/somewhere/path' - this.format = 'png' - return (this.formattedStubbedPath = `${this.stubbedPath}.${this.format}`) }) describe('insertFile', function() { - beforeEach(function() { - this.stream = {} - this.PersistorManager.deleteDirectory.callsArgWith(2) - return this.PersistorManager.sendStream.callsArgWith(3) - }) + const stream = 'stream' it('should send file to the filestore', function(done) { - return this.handler.insertFile(this.bucket, this.key, this.stream, () => { - this.PersistorManager.sendStream - .calledWith(this.bucket, this.key, this.stream) - .should.equal(true) - return done() + FileHandler.insertFile(bucket, key, stream, err => { + expect(err).not.to.exist + expect(PersistorManager.sendStream).to.have.been.calledWith( + bucket, + key, + stream + ) + done() }) }) - return it('should delete the convetedKey folder', function(done) { - this.keyBuilder.getConvertedFolderKey.returns(this.stubbedConvetedKey) - return this.handler.insertFile(this.bucket, this.key, this.stream, () => { - this.PersistorManager.deleteDirectory - .calledWith(this.bucket, this.stubbedConvetedKey) - .should.equal(true) - return done() + it('should delete the convertedKey folder', function(done) { + FileHandler.insertFile(bucket, key, stream, err => { + expect(err).not.to.exist + expect(PersistorManager.deleteDirectory).to.have.been.calledWith( + bucket, + convertedFolderKey + ) + done() }) }) }) describe('deleteFile', function() { - beforeEach(function() { - this.keyBuilder.getConvertedFolderKey.returns(this.stubbedConvetedKey) - this.PersistorManager.deleteFile.callsArgWith(2) - return this.PersistorManager.deleteDirectory.callsArgWith(2) - }) - it('should tell the filestore manager to delete the file', function(done) { - return this.handler.deleteFile(this.bucket, this.key, () => { - this.PersistorManager.deleteFile - .calledWith(this.bucket, this.key) - .should.equal(true) - return done() + FileHandler.deleteFile(bucket, key, err => { + expect(err).not.to.exist + expect(PersistorManager.deleteFile).to.have.been.calledWith(bucket, key) + done() }) }) - return it('should tell the filestore manager to delete the cached foler', function(done) { - return this.handler.deleteFile(this.bucket, this.key, () => { - this.PersistorManager.deleteDirectory - .calledWith(this.bucket, this.stubbedConvetedKey) - .should.equal(true) - return done() + it('should tell the filestore manager to delete the cached folder', function(done) { + FileHandler.deleteFile(bucket, key, err => { + expect(err).not.to.exist + expect(PersistorManager.deleteDirectory).to.have.been.calledWith( + bucket, + convertedFolderKey + ) + done() }) }) }) describe('getFile', function() { - beforeEach(function() { - this.handler._getStandardFile = sinon.stub().callsArgWith(3) - return (this.handler._getConvertedFile = sinon.stub().callsArgWith(3)) - }) - - it('should call _getStandardFile if no format or style are defined', function(done) { - return this.handler.getFile(this.bucket, this.key, null, () => { - this.handler._getStandardFile.called.should.equal(true) - this.handler._getConvertedFile.called.should.equal(false) - return done() + it('should return the source stream no format or style are defined', function(done) { + FileHandler.getFile(bucket, key, null, (err, stream) => { + expect(err).not.to.exist + expect(stream).to.equal(sourceStream) + done() }) }) - it('should pass options to _getStandardFile', function(done) { + it('should pass options through to PersistorManager', function(done) { const options = { start: 0, end: 8 } - return this.handler.getFile(this.bucket, this.key, options, () => { - expect(this.handler._getStandardFile.lastCall.args[2].start).to.equal(0) - expect(this.handler._getStandardFile.lastCall.args[2].end).to.equal(8) - return done() + FileHandler.getFile(bucket, key, options, err => { + expect(err).not.to.exist + expect(PersistorManager.getFileStream).to.have.been.calledWith( + bucket, + key, + options + ) + done() }) }) - return it('should call _getConvertedFile if a format is defined', function(done) { - return this.handler.getFile( - this.bucket, - this.key, - { format: 'png' }, - () => { - this.handler._getStandardFile.called.should.equal(false) - this.handler._getConvertedFile.called.should.equal(true) - return done() - } - ) - }) - }) + describe('when a format is defined', function() { + let result - describe('_getStandardFile', function() { - beforeEach(function() { - this.fileStream = { on() {} } - return this.PersistorManager.getFileStream.callsArgWith( - 3, - 'err', - this.fileStream - ) - }) + describe('when the file is not cached', function() { + beforeEach(function(done) { + FileHandler.getFile(bucket, key, { format: 'png' }, (err, stream) => { + result = { err, stream } + done() + }) + }) - it('should get the stream', function(done) { - return this.handler.getFile(this.bucket, this.key, null, () => { - this.PersistorManager.getFileStream - .calledWith(this.bucket, this.key) - .should.equal(true) - return done() + it('should convert the file', function() { + expect(FileConverter.convert).to.have.been.called + expect(ImageOptimiser.compressPng).to.have.been.called + }) + + it('should return the the converted stream', function() { + expect(result.err).not.to.exist + expect(result.stream).to.equal(readStream) + expect(PersistorManager.getFileStream).to.have.been.calledWith( + bucket, + key + ) + }) + }) + + describe('when the file is cached', function() { + beforeEach(function(done) { + PersistorManager.checkIfFileExists = sinon.stub().yields(null, true) + FileHandler.getFile(bucket, key, { format: 'png' }, (err, stream) => { + result = { err, stream } + done() + }) + }) + + it('should not convert the file', function() { + expect(FileConverter.convert).not.to.have.been.called + expect(ImageOptimiser.compressPng).not.to.have.been.called + }) + + it('should return the cached stream', function() { + expect(result.err).not.to.exist + expect(result.stream).to.equal(sourceStream) + expect(PersistorManager.getFileStream).to.have.been.calledWith( + bucket, + convertedKey + ) + }) }) }) - it('should return the stream and error', function(done) { - return this.handler.getFile( - this.bucket, - this.key, - null, - (err, stream) => { - err.should.equal('err') - stream.should.equal(this.fileStream) - return done() - } - ) - }) - - return it('should pass options to PersistorManager', function(done) { - return this.handler.getFile( - this.bucket, - this.key, - { start: 0, end: 8 }, - () => { - expect( - this.PersistorManager.getFileStream.lastCall.args[2].start - ).to.equal(0) - expect( - this.PersistorManager.getFileStream.lastCall.args[2].end - ).to.equal(8) - return done() - } - ) - }) - }) - - describe('_getConvertedFile', function() { - it('should getFileStream if it does exists', function(done) { - this.PersistorManager.checkIfFileExists.callsArgWith(2, null, true) - this.PersistorManager.getFileStream.callsArgWith(3) - return this.handler._getConvertedFile(this.bucket, this.key, {}, () => { - this.PersistorManager.getFileStream - .calledWith(this.bucket) - .should.equal(true) - return done() + describe('when a style is defined', function() { + it('generates a thumbnail when requested', function(done) { + FileHandler.getFile(bucket, key, { style: 'thumbnail' }, err => { + expect(err).not.to.exist + expect(FileConverter.thumbnail).to.have.been.called + expect(FileConverter.preview).not.to.have.been.called + done() + }) }) - }) - return it('should call _getConvertedFileAndCache if it does exists', function(done) { - this.PersistorManager.checkIfFileExists.callsArgWith(2, null, false) - this.handler._getConvertedFileAndCache = sinon.stub().callsArgWith(4) - return this.handler._getConvertedFile(this.bucket, this.key, {}, () => { - this.handler._getConvertedFileAndCache - .calledWith(this.bucket, this.key) - .should.equal(true) - return done() + it('generates a preview when requested', function(done) { + FileHandler.getFile(bucket, key, { style: 'preview' }, err => { + expect(err).not.to.exist + expect(FileConverter.thumbnail).not.to.have.been.called + expect(FileConverter.preview).to.have.been.called + done() + }) }) }) }) - describe('_getConvertedFileAndCache', () => - it('should _convertFile ', function(done) { - this.stubbedStream = { something: 'here' } - this.localStream = { - on() {} - } - this.PersistorManager.sendFile = sinon.stub().callsArgWith(3) - this.LocalFileWriter.getStream = sinon - .stub() - .callsArgWith(1, null, this.localStream) - this.convetedKey = this.key + 'converted' - this.handler._convertFile = sinon - .stub() - .callsArgWith(3, null, this.stubbedPath) - this.ImageOptimiser.compressPng = sinon.stub().callsArgWith(1) - return this.handler._getConvertedFileAndCache( - this.bucket, - this.key, - this.convetedKey, - {}, - (err, fsStream) => { - this.handler._convertFile.called.should.equal(true) - this.PersistorManager.sendFile - .calledWith(this.bucket, this.convetedKey, this.stubbedPath) - .should.equal(true) - this.ImageOptimiser.compressPng - .calledWith(this.stubbedPath) - .should.equal(true) - this.LocalFileWriter.getStream - .calledWith(this.stubbedPath) - .should.equal(true) - fsStream.should.equal(this.localStream) - return done() - } - ) - })) - - describe('_convertFile', function() { - beforeEach(function() { - this.FileConverter.convert.callsArgWith( - 2, - null, - this.formattedStubbedPath - ) - this.FileConverter.thumbnail.callsArgWith( - 1, - null, - this.formattedStubbedPath - ) - this.FileConverter.preview.callsArgWith( - 1, - null, - this.formattedStubbedPath - ) - this.handler._writeS3FileToDisk = sinon - .stub() - .callsArgWith(3, null, this.stubbedPath) - return this.LocalFileWriter.deleteFile.callsArgWith(1) - }) - - it('should call thumbnail on the writer path if style was thumbnail was specified', function(done) { - return this.handler._convertFile( - this.bucket, - this.key, - { style: 'thumbnail' }, - (err, path) => { - path.should.equal(this.formattedStubbedPath) - this.FileConverter.thumbnail - .calledWith(this.stubbedPath) - .should.equal(true) - this.LocalFileWriter.deleteFile - .calledWith(this.stubbedPath) - .should.equal(true) - return done() - } - ) - }) - - it('should call preview on the writer path if style was preview was specified', function(done) { - return this.handler._convertFile( - this.bucket, - this.key, - { style: 'preview' }, - (err, path) => { - path.should.equal(this.formattedStubbedPath) - this.FileConverter.preview - .calledWith(this.stubbedPath) - .should.equal(true) - this.LocalFileWriter.deleteFile - .calledWith(this.stubbedPath) - .should.equal(true) - return done() - } - ) - }) - - return it('should call convert on the writer path if a format was specified', function(done) { - return this.handler._convertFile( - this.bucket, - this.key, - { format: this.format }, - (err, path) => { - path.should.equal(this.formattedStubbedPath) - this.FileConverter.convert - .calledWith(this.stubbedPath, this.format) - .should.equal(true) - this.LocalFileWriter.deleteFile - .calledWith(this.stubbedPath) - .should.equal(true) - return done() - } - ) - }) - }) - - return describe('getDirectorySize', function() { - beforeEach(function() { - return this.PersistorManager.directorySize.callsArgWith(2) - }) - - return it('should call the filestore manager to get directory size', function(done) { - return this.handler.getDirectorySize(this.bucket, this.key, () => { - this.PersistorManager.directorySize - .calledWith(this.bucket, this.key) - .should.equal(true) - return done() + describe('getDirectorySize', function() { + it('should call the filestore manager to get directory size', function(done) { + FileHandler.getDirectorySize(bucket, key, err => { + expect(err).not.to.exist + expect(PersistorManager.directorySize).to.have.been.calledWith( + bucket, + key + ) + done() }) }) }) diff --git a/services/filestore/test/unit/js/LocalFileWriterTests.js b/services/filestore/test/unit/js/LocalFileWriterTests.js index 04cc2fb049..5d7008a91f 100644 --- a/services/filestore/test/unit/js/LocalFileWriterTests.js +++ b/services/filestore/test/unit/js/LocalFileWriterTests.js @@ -1,120 +1,79 @@ -/* eslint-disable - handle-callback-err, - no-return-assign, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ - -const { assert } = require('chai') const sinon = require('sinon') const chai = require('chai') -const should = chai.should() const { expect } = chai const modulePath = '../../../app/js/LocalFileWriter.js' const SandboxedModule = require('sandboxed-module') +chai.use(require('sinon-chai')) describe('LocalFileWriter', function() { + const writeStream = 'writeStream' + const readStream = 'readStream' + const settings = { path: { uploadFolder: '/uploads' } } + const fsPath = '/uploads/wombat' + const filename = 'wombat' + let stream, fs, LocalFileWriter + beforeEach(function() { - this.writeStream = { - on(type, cb) { - if (type === 'finish') { - return cb() - } - } + fs = { + createWriteStream: sinon.stub().returns(writeStream), + unlink: sinon.stub().yields() } - this.readStream = { on() {} } - this.fs = { - createWriteStream: sinon.stub().returns(this.writeStream), - createReadStream: sinon.stub().returns(this.readStream), - unlink: sinon.stub() + stream = { + pipeline: sinon.stub().yields() } - this.settings = { - path: { - uploadFolder: 'somewhere' - } - } - this.writer = SandboxedModule.require(modulePath, { + + LocalFileWriter = SandboxedModule.require(modulePath, { requires: { - fs: this.fs, + fs, + stream, 'logger-sharelatex': { log() {}, err() {} }, - 'settings-sharelatex': this.settings, + 'settings-sharelatex': settings, 'metrics-sharelatex': { inc: sinon.stub(), Timer: sinon.stub().returns({ done: sinon.stub() }) } } }) - - return (this.stubbedFsPath = 'something/uploads/eio2k1j3') }) - describe('writeStrem', function() { - beforeEach(function() { - return (this.writer._getPath = sinon.stub().returns(this.stubbedFsPath)) - }) - - it('write the stream to ./uploads', function(done) { - const stream = { - pipe: dest => { - dest.should.equal(this.writeStream) - return done() - }, - on() {} - } - return this.writer.writeStream(stream, null, () => {}) - }) - - return it('should send the path in the callback', function(done) { - const stream = { - pipe: dest => {}, - on(type, cb) { - if (type === 'end') { - return cb() - } - } - } - return this.writer.writeStream(stream, null, (err, fsPath) => { - fsPath.should.equal(this.stubbedFsPath) - return done() + describe('writeStream', function() { + it('writes the stream to the upload folder', function(done) { + LocalFileWriter.writeStream(readStream, filename, (err, path) => { + expect(err).not.to.exist + expect(fs.createWriteStream).to.have.been.calledWith(fsPath) + expect(stream.pipeline).to.have.been.calledWith(readStream, writeStream) + expect(path).to.equal(fsPath) + done() }) }) }) - describe('getStream', function() { - it('should read the stream from the file ', function(done) { - return this.writer.getStream(this.stubbedFsPath, (err, stream) => { - this.fs.createReadStream - .calledWith(this.stubbedFsPath) - .should.equal(true) - return done() - }) - }) - - return it('should send the stream in the callback', function(done) { - return this.writer.getStream(this.stubbedFsPath, (err, readStream) => { - readStream.should.equal(this.readStream) - return done() - }) - }) - }) - - return describe('delete file', () => + describe('deleteFile', function() { it('should unlink the file', function(done) { - const error = 'my error' - this.fs.unlink.callsArgWith(1, error) - return this.writer.deleteFile(this.stubbedFsPath, err => { - this.fs.unlink.calledWith(this.stubbedFsPath).should.equal(true) - err.should.equal(error) - return done() + LocalFileWriter.deleteFile(fsPath, err => { + expect(err).not.to.exist + expect(fs.unlink).to.have.been.calledWith(fsPath) + done() }) - })) + }) + + it('should not do anything if called with an empty path', function(done) { + fs.unlink = sinon.stub().yields(new Error('failed to reticulate splines')) + LocalFileWriter.deleteFile(fsPath, err => { + expect(err).to.exist + done() + }) + }) + + it('should not call unlink with an empty path', function(done) { + LocalFileWriter.deleteFile('', err => { + expect(err).not.to.exist + expect(fs.unlink).not.to.have.been.called + done() + }) + }) + }) }) From a216be5fd33166ddbcbde54ff1f34cfc5ac9a141 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 23 Dec 2019 15:58:03 +0000 Subject: [PATCH 359/555] Improve error handling in FileHandler --- services/filestore/app/js/FileHandler.js | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/services/filestore/app/js/FileHandler.js b/services/filestore/app/js/FileHandler.js index 3e102b316b..684a1d2896 100644 --- a/services/filestore/app/js/FileHandler.js +++ b/services/filestore/app/js/FileHandler.js @@ -7,7 +7,12 @@ const FileConverter = require('./FileConverter') const KeyBuilder = require('./KeyBuilder') const async = require('async') const ImageOptimiser = require('./ImageOptimiser') -const { WriteError, ReadError, ConversionError } = require('./Errors') +const { + WriteError, + ReadError, + ConversionError, + NotFoundError +} = require('./Errors') module.exports = { insertFile, @@ -66,16 +71,17 @@ function getDirectorySize(bucket, projectId, callback) { logger.log({ bucket, project_id: projectId }, 'getting project size') PersistorManager.directorySize(bucket, projectId, function(err, size) { if (err) { - logger.err({ bucket, project_id: projectId }, 'error getting size') - err = new ReadError('error getting project size').withCause(err) + return callback( + new ReadError('error getting project size').withCause(err) + ) } - return callback(err, size) + callback(null, size) }) } function _getStandardFile(bucket, key, opts, callback) { PersistorManager.getFileStream(bucket, key, opts, function(err, fileStream) { - if (err && err.name !== 'NotFoundError') { + if (err && !(err instanceof NotFoundError)) { logger.err( { bucket, key, opts: _scrubSecrets(opts) }, 'error getting fileStream' From b6a1ea6a3025145bffda321d30bcefc9b214081a Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 19 Dec 2019 10:41:41 +0000 Subject: [PATCH 360/555] Cleanup and promisify FileConverter --- services/filestore/app/js/FileConverter.js | 231 ++++++++---------- .../test/unit/js/FileConverterTests.js | 154 ++++++------ 2 files changed, 179 insertions(+), 206 deletions(-) diff --git a/services/filestore/app/js/FileConverter.js b/services/filestore/app/js/FileConverter.js index 04b599728a..0cb5c709fc 100644 --- a/services/filestore/app/js/FileConverter.js +++ b/services/filestore/app/js/FileConverter.js @@ -1,133 +1,116 @@ -/* eslint-disable - camelcase, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const _ = require('underscore') const metrics = require('metrics-sharelatex') const logger = require('logger-sharelatex') -const safe_exec = require('./SafeExec') -const approvedFormats = ['png'] const Settings = require('settings-sharelatex') +const { callbackify } = require('util') -const fourtySeconds = 40 * 1000 +const safeExec = require('./SafeExec').promises +const { ConversionError } = require('./Errors') -const childProcessOpts = { - killSignal: 'SIGTERM', - timeout: fourtySeconds -} +const APPROVED_FORMATS = ['png'] +const FOURTY_SECONDS = 40 * 1000 +const KILL_SIGNAL = 'SIGTERM' module.exports = { - convert(sourcePath, requestedFormat, callback) { - logger.log({ sourcePath, requestedFormat }, 'converting file format') - const timer = new metrics.Timer('imageConvert') - const destPath = `${sourcePath}.${requestedFormat}` - sourcePath = `${sourcePath}[0]` - if (!_.include(approvedFormats, requestedFormat)) { - const err = new Error('invalid format requested') - return callback(err) - } - const width = '600x' - let command = [ - 'convert', - '-define', - `pdf:fit-page=${width}`, - '-flatten', - '-density', - '300', - sourcePath, - destPath - ] - command = Settings.commands.convertCommandPrefix.concat(command) - return safe_exec(command, childProcessOpts, function(err, stdout, stderr) { - timer.done() - if (err != null) { - logger.err( - { err, stderr, sourcePath, requestedFormat, destPath }, - 'something went wrong converting file' - ) - } else { - logger.log( - { sourcePath, requestedFormat, destPath }, - 'finished converting file' - ) - } - return callback(err, destPath) - }) - }, - - thumbnail(sourcePath, callback) { - const destPath = `${sourcePath}.png` - sourcePath = `${sourcePath}[0]` - const width = '260x' - let command = [ - 'convert', - '-flatten', - '-background', - 'white', - '-density', - '300', - '-define', - `pdf:fit-page=${width}`, - sourcePath, - '-resize', - width, - destPath - ] - logger.log({ sourcePath, destPath, command }, 'thumbnail convert file') - command = Settings.commands.convertCommandPrefix.concat(command) - return safe_exec(command, childProcessOpts, function(err, stdout, stderr) { - if (err != null) { - logger.err( - { err, stderr, sourcePath }, - 'something went wrong converting file to thumbnail' - ) - } else { - logger.log({ sourcePath, destPath }, 'finished thumbnailing file') - } - return callback(err, destPath) - }) - }, - - preview(sourcePath, callback) { - logger.log({ sourcePath }, 'preview convert file') - const destPath = `${sourcePath}.png` - sourcePath = `${sourcePath}[0]` - const width = '548x' - let command = [ - 'convert', - '-flatten', - '-background', - 'white', - '-density', - '300', - '-define', - `pdf:fit-page=${width}`, - sourcePath, - '-resize', - width, - destPath - ] - command = Settings.commands.convertCommandPrefix.concat(command) - return safe_exec(command, childProcessOpts, function(err, stdout, stderr) { - if (err != null) { - logger.err( - { err, stderr, sourcePath, destPath }, - 'something went wrong converting file to preview' - ) - } else { - logger.log( - { sourcePath, destPath }, - 'finished converting file to preview' - ) - } - return callback(err, destPath) - }) + convert: callbackify(convert), + thumbnail: callbackify(thumbnail), + preview: callbackify(preview), + promises: { + convert, + thumbnail, + preview } } + +async function convert(sourcePath, requestedFormat) { + const width = '600x' + return _convert(sourcePath, requestedFormat, [ + 'convert', + '-define', + `pdf:fit-page=${width}`, + '-flatten', + '-density', + '300', + `${sourcePath}[0]` + ]) +} + +async function thumbnail(sourcePath) { + const width = '260x' + return convert(sourcePath, 'png', [ + 'convert', + '-flatten', + '-background', + 'white', + '-density', + '300', + '-define', + `pdf:fit-page=${width}`, + `${sourcePath}[0]`, + '-resize', + width + ]) +} + +async function preview(sourcePath) { + const width = '548x' + return convert(sourcePath, 'png', [ + 'convert', + '-flatten', + '-background', + 'white', + '-density', + '300', + '-define', + `pdf:fit-page=${width}`, + `${sourcePath}[0]`, + '-resize', + width + ]) +} + +async function _convert(sourcePath, requestedFormat, command) { + logger.log({ sourcePath, requestedFormat }, 'converting file format') + + if (!APPROVED_FORMATS.includes(requestedFormat)) { + throw new ConversionError({ + message: 'invalid format requested', + info: { format: requestedFormat } + }) + } + + const timer = new metrics.Timer('imageConvert') + const destPath = `${sourcePath}.${requestedFormat}` + + command.push(destPath) + command = Settings.commands.convertCommandPrefix.concat(command) + + try { + await safeExec(command, { + killSignal: KILL_SIGNAL, + timeout: FOURTY_SECONDS + }) + } catch (err) { + logger.err( + { + err, + stderr: err.stderr, + command, + sourcePath, + requestedFormat, + destPath + }, + 'something went wrong converting file' + ) + throw new ConversionError({ + message: 'something went wrong converting file', + info: { stderr: err.stderr, sourcePath, requestedFormat, destPath } + }).withCause(err) + } + + timer.done() + logger.log( + { sourcePath, requestedFormat, destPath }, + 'finished converting file' + ) + return destPath +} diff --git a/services/filestore/test/unit/js/FileConverterTests.js b/services/filestore/test/unit/js/FileConverterTests.js index 89faacd1d1..7a7ba12ee2 100644 --- a/services/filestore/test/unit/js/FileConverterTests.js +++ b/services/filestore/test/unit/js/FileConverterTests.js @@ -1,29 +1,30 @@ -/* eslint-disable - handle-callback-err, - no-return-assign, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const { assert } = require('chai') const sinon = require('sinon') const chai = require('chai') -const should = chai.should() const { expect } = chai -const modulePath = '../../../app/js/FileConverter.js' const SandboxedModule = require('sandboxed-module') +const modulePath = '../../../app/js/FileConverter.js' + describe('FileConverter', function() { + let SafeExec, FileConverter + const sourcePath = '/data/wombat.eps' + const destPath = '/tmp/dest.png' + const format = 'png' + const errorMessage = 'guru meditation error' + const Settings = { + commands: { + convertCommandPrefix: [] + } + } + beforeEach(function() { - this.safe_exec = sinon.stub() - this.converter = SandboxedModule.require(modulePath, { + SafeExec = { + promises: sinon.stub().resolves(destPath) + } + + FileConverter = SandboxedModule.require(modulePath, { requires: { - './SafeExec': this.safe_exec, + './SafeExec': SafeExec, 'logger-sharelatex': { log() {}, err() {} @@ -32,86 +33,75 @@ describe('FileConverter', function() { inc: sinon.stub(), Timer: sinon.stub().returns({ done: sinon.stub() }) }, - 'settings-sharelatex': (this.Settings = { - commands: { - convertCommandPrefix: [] - } - }) + 'settings-sharelatex': Settings } }) - - this.sourcePath = '/this/path/here.eps' - this.format = 'png' - return (this.error = 'Error') }) describe('convert', function() { - it('should convert the source to the requested format', function(done) { - this.safe_exec.callsArgWith(2) - return this.converter.convert(this.sourcePath, this.format, err => { - const args = this.safe_exec.args[0][0] - args.indexOf(`${this.sourcePath}[0]`).should.not.equal(-1) - args.indexOf(`${this.sourcePath}.${this.format}`).should.not.equal(-1) - return done() - }) + it('should convert the source to the requested format', async function() { + await FileConverter.promises.convert(sourcePath, format) + const args = SafeExec.promises.args[0][0] + expect(args).to.include(`${sourcePath}[0]`) + expect(args).to.include(`${sourcePath}.${format}`) }) - it('should return the dest path', function(done) { - this.safe_exec.callsArgWith(2) - return this.converter.convert( - this.sourcePath, - this.format, - (err, destPath) => { - destPath.should.equal(`${this.sourcePath}.${this.format}`) - return done() - } - ) + it('should return the dest path', async function() { + const destPath = await FileConverter.promises.convert(sourcePath, format) + destPath.should.equal(`${sourcePath}.${format}`) }) - it('should return the error from convert', function(done) { - this.safe_exec.callsArgWith(2, this.error) - return this.converter.convert(this.sourcePath, this.format, err => { - err.should.equal(this.error) - return done() - }) + it('should wrap the error from convert', async function() { + SafeExec.promises.rejects(errorMessage) + try { + await FileConverter.promises.convert(sourcePath, format) + expect('error should have been thrown').not.to.exist + } catch (err) { + expect(err.name).to.equal('ConversionError') + expect(err.cause.toString()).to.equal(errorMessage) + } }) - it('should not accapt an non aproved format', function(done) { - this.safe_exec.callsArgWith(2) - return this.converter.convert(this.sourcePath, 'ahhhhh', err => { - expect(err).to.exist - return done() - }) + it('should not accept an non approved format', async function() { + try { + await FileConverter.promises.convert(sourcePath, 'potato') + expect('error should have been thrown').not.to.exist + } catch (err) { + expect(err.name).to.equal('ConversionError') + } }) - return it('should prefix the command with Settings.commands.convertCommandPrefix', function(done) { - this.safe_exec.callsArgWith(2) - this.Settings.commands.convertCommandPrefix = ['nice'] - return this.converter.convert(this.sourcePath, this.format, err => { - const command = this.safe_exec.args[0][0] - command[0].should.equal('nice') - return done() + it('should prefix the command with Settings.commands.convertCommandPrefix', async function() { + Settings.commands.convertCommandPrefix = ['nice'] + await FileConverter.promises.convert(sourcePath, format) + }) + + it('should convert the file when called as a callback', function(done) { + FileConverter.convert(sourcePath, format, (err, destPath) => { + expect(err).not.to.exist + destPath.should.equal(`${sourcePath}.${format}`) + + const args = SafeExec.promises.args[0][0] + expect(args).to.include(`${sourcePath}[0]`) + expect(args).to.include(`${sourcePath}.${format}`) + done() }) }) }) - describe('thumbnail', () => - it('should call converter resize with args', function(done) { - this.safe_exec.callsArgWith(2) - return this.converter.thumbnail(this.sourcePath, err => { - const args = this.safe_exec.args[0][0] - args.indexOf(`${this.sourcePath}[0]`).should.not.equal(-1) - return done() - }) - })) + describe('thumbnail', function() { + it('should call converter resize with args', async function() { + await FileConverter.promises.thumbnail(sourcePath) + const args = SafeExec.promises.args[0][0] + expect(args).to.include(`${sourcePath}[0]`) + }) + }) - return describe('preview', () => - it('should call converter resize with args', function(done) { - this.safe_exec.callsArgWith(2) - return this.converter.preview(this.sourcePath, err => { - const args = this.safe_exec.args[0][0] - args.indexOf(`${this.sourcePath}[0]`).should.not.equal(-1) - return done() - }) - })) + describe('preview', function() { + it('should call converter resize with args', async function() { + await FileConverter.promises.preview(sourcePath) + const args = SafeExec.promises.args[0][0] + expect(args).to.include(`${sourcePath}[0]`) + }) + }) }) From ac2d05ecb3d6da0bca8064b01b2bc0f46006af58 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 23 Dec 2019 16:03:01 +0000 Subject: [PATCH 361/555] Remove unnecessary logging from FileConverter --- services/filestore/app/js/FileConverter.js | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/services/filestore/app/js/FileConverter.js b/services/filestore/app/js/FileConverter.js index 0cb5c709fc..9338b289fb 100644 --- a/services/filestore/app/js/FileConverter.js +++ b/services/filestore/app/js/FileConverter.js @@ -90,17 +90,6 @@ async function _convert(sourcePath, requestedFormat, command) { timeout: FOURTY_SECONDS }) } catch (err) { - logger.err( - { - err, - stderr: err.stderr, - command, - sourcePath, - requestedFormat, - destPath - }, - 'something went wrong converting file' - ) throw new ConversionError({ message: 'something went wrong converting file', info: { stderr: err.stderr, sourcePath, requestedFormat, destPath } From 42adc59d018e906ddf662ea9c50ec970a325122a Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 19 Dec 2019 15:36:48 +0000 Subject: [PATCH 362/555] Clean up and promisify health-check controller and KeyBuilder --- services/filestore/app.js | 30 ++-- services/filestore/app/js/Errors.js | 4 +- .../filestore/app/js/HealthCheckController.js | 114 +++++++------- services/filestore/app/js/KeyBuilder.js | 141 +++++++++--------- 4 files changed, 145 insertions(+), 144 deletions(-) diff --git a/services/filestore/app.js b/services/filestore/app.js index c6c11e152a..9e76107ea6 100644 --- a/services/filestore/app.js +++ b/services/filestore/app.js @@ -89,82 +89,82 @@ Metrics.injectMetricsRoute(app) app.head( '/project/:project_id/file/:file_id', - keyBuilder.userFileKey, + keyBuilder.userFileKeyMiddleware, fileController.getFileHead ) app.get( '/project/:project_id/file/:file_id', - keyBuilder.userFileKey, + keyBuilder.userFileKeyMiddleware, fileController.getFile ) app.post( '/project/:project_id/file/:file_id', - keyBuilder.userFileKey, + keyBuilder.userFileKeyMiddleware, fileController.insertFile ) app.put( '/project/:project_id/file/:file_id', - keyBuilder.userFileKey, + keyBuilder.userFileKeyMiddleware, bodyParser.json(), fileController.copyFile ) app.del( '/project/:project_id/file/:file_id', - keyBuilder.userFileKey, + keyBuilder.userFileKeyMiddleware, fileController.deleteFile ) app.head( '/template/:template_id/v/:version/:format', - keyBuilder.templateFileKey, + keyBuilder.templateFileKeyMiddleware, fileController.getFileHead ) app.get( '/template/:template_id/v/:version/:format', - keyBuilder.templateFileKey, + keyBuilder.templateFileKeyMiddleware, fileController.getFile ) app.get( '/template/:template_id/v/:version/:format/:sub_type', - keyBuilder.templateFileKey, + keyBuilder.templateFileKeyMiddleware, fileController.getFile ) app.post( '/template/:template_id/v/:version/:format', - keyBuilder.templateFileKey, + keyBuilder.templateFileKeyMiddleware, fileController.insertFile ) app.head( '/project/:project_id/public/:public_file_id', - keyBuilder.publicFileKey, + keyBuilder.publicFileKeyMiddleware, fileController.getFileHead ) app.get( '/project/:project_id/public/:public_file_id', - keyBuilder.publicFileKey, + keyBuilder.publicFileKeyMiddleware, fileController.getFile ) app.post( '/project/:project_id/public/:public_file_id', - keyBuilder.publicFileKey, + keyBuilder.publicFileKeyMiddleware, fileController.insertFile ) app.put( '/project/:project_id/public/:public_file_id', - keyBuilder.publicFileKey, + keyBuilder.publicFileKeyMiddleware, bodyParser.json(), fileController.copyFile ) app.del( '/project/:project_id/public/:public_file_id', - keyBuilder.publicFileKey, + keyBuilder.publicFileKeyMiddleware, fileController.deleteFile ) app.get( '/project/:project_id/size', - keyBuilder.publicProjectKey, + keyBuilder.publicProjectKeyMiddleware, fileController.directorySize ) diff --git a/services/filestore/app/js/Errors.js b/services/filestore/app/js/Errors.js index 57dbdbe522..65af6dc056 100644 --- a/services/filestore/app/js/Errors.js +++ b/services/filestore/app/js/Errors.js @@ -20,6 +20,7 @@ class BackwardCompatibleError extends OError { class NotFoundError extends BackwardCompatibleError {} class WriteError extends BackwardCompatibleError {} class ReadError extends BackwardCompatibleError {} +class HealthCheckError extends BackwardCompatibleError {} class ConversionsDisabledError extends BackwardCompatibleError {} class ConversionError extends BackwardCompatibleError {} @@ -44,5 +45,6 @@ module.exports = { ConversionsDisabledError, WriteError, ReadError, - ConversionError + ConversionError, + HealthCheckError } diff --git a/services/filestore/app/js/HealthCheckController.js b/services/filestore/app/js/HealthCheckController.js index eecadb00d9..8d6e35b783 100644 --- a/services/filestore/app/js/HealthCheckController.js +++ b/services/filestore/app/js/HealthCheckController.js @@ -1,80 +1,72 @@ -// TODO: This file was created by bulk-decaffeinate. -// Sanity-check the conversion and remove this comment. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ const fs = require('fs-extra') const path = require('path') -const async = require('async') -const fileConverter = require('./FileConverter') -const keyBuilder = require('./KeyBuilder') -const fileController = require('./FileController') const logger = require('logger-sharelatex') -const settings = require('settings-sharelatex') +const Settings = require('settings-sharelatex') const streamBuffers = require('stream-buffers') -const _ = require('underscore') +const { promisify } = require('util') +const Stream = require('stream') -const checkCanStoreFiles = function(callback) { - callback = _.once(callback) - const req = { params: {}, query: {}, headers: {} } - req.params.project_id = settings.health_check.project_id - req.params.file_id = settings.health_check.file_id - const myWritableStreamBuffer = new streamBuffers.WritableStreamBuffer({ +const pipeline = promisify(Stream.pipeline) +const fsCopy = promisify(fs.copy) +const fsUnlink = promisify(fs.unlink) + +const { HealthCheckError } = require('./Errors') +const FileConverter = require('./FileConverter').promises +const FileHandler = require('./FileHandler').promises + +async function checkCanGetFiles() { + if (!Settings.health_check) { + return + } + + const projectId = Settings.health_check.project_id + const fileId = Settings.health_check.file_id + const key = `${projectId}/${fileId}` + const bucket = Settings.filestore.stores.user_files + + const buffer = new streamBuffers.WritableStreamBuffer({ initialSize: 100 }) - const res = { - send(code) { - if (code !== 200) { - return callback(new Error(`non-200 code from getFile: ${code}`)) - } - } + + const sourceStream = await FileHandler.getFile(bucket, key, {}) + try { + await pipeline(sourceStream, buffer) + } catch (err) { + throw new HealthCheckError('failed to get health-check file').withCause(err) + } + + if (!buffer.size()) { + throw new HealthCheckError('no bytes written to download stream') } - myWritableStreamBuffer.send = res.send - return keyBuilder.userFileKey(req, res, function() { - fileController.getFile(req, myWritableStreamBuffer) - return myWritableStreamBuffer.on('close', function() { - if (myWritableStreamBuffer.size() > 0) { - return callback() - } else { - const err = 'no data in write stream buffer for health check' - logger.err({ err }, 'error performing health check') - return callback(err) - } - }) - }) } -const checkFileConvert = function(callback) { - if (!settings.enableConversions) { - return callback() +async function checkFileConvert() { + if (!Settings.enableConversions) { + return + } + + const imgPath = path.join(Settings.path.uploadFolder, '/tiny.pdf') + + let resultPath + try { + await fsCopy('./tiny.pdf', imgPath) + resultPath = await FileConverter.thumbnail(imgPath) + } finally { + if (resultPath) { + await fsUnlink(resultPath) + } + await fsUnlink(imgPath) } - const imgPath = path.join(settings.path.uploadFolder, '/tiny.pdf') - return async.waterfall( - [ - cb => fs.copy('./tiny.pdf', imgPath, cb), - cb => fileConverter.thumbnail(imgPath, cb), - (resultPath, cb) => fs.unlink(resultPath, cb), - cb => fs.unlink(imgPath, cb) - ], - callback - ) } module.exports = { check(req, res) { logger.log({}, 'performing health check') - return async.parallel([checkFileConvert, checkCanStoreFiles], function( - err - ) { - if (err != null) { + Promise.all([checkCanGetFiles(), checkFileConvert()]) + .then(() => res.send(200)) + .catch(err => { logger.err({ err }, 'Health check: error running') - return res.send(500) - } else { - return res.send(200) - } - }) + res.send(500) + }) } } diff --git a/services/filestore/app/js/KeyBuilder.js b/services/filestore/app/js/KeyBuilder.js index 110900c991..8de7c0be2a 100644 --- a/services/filestore/app/js/KeyBuilder.js +++ b/services/filestore/app/js/KeyBuilder.js @@ -1,71 +1,78 @@ -/* eslint-disable - camelcase, - no-return-assign, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ const settings = require('settings-sharelatex') module.exports = { - getConvertedFolderKey(key) { - return (key = `${key}-converted-cache/`) - }, - - addCachingToKey(key, opts) { - key = this.getConvertedFolderKey(key) - if (opts.format != null && opts.style == null) { - key = `${key}format-${opts.format}` - } - if (opts.style != null && opts.format == null) { - key = `${key}style-${opts.style}` - } - if (opts.style != null && opts.format != null) { - key = `${key}format-${opts.format}-style-${opts.style}` - } - return key - }, - - userFileKey(req, res, next) { - const { project_id, file_id } = req.params - req.key = `${project_id}/${file_id}` - req.bucket = settings.filestore.stores.user_files - return next() - }, - - publicFileKey(req, res, next) { - const { project_id, public_file_id } = req.params - if (settings.filestore.stores.public_files == null) { - return res.status(501).send('public files not available') - } else { - req.key = `${project_id}/${public_file_id}` - req.bucket = settings.filestore.stores.public_files - return next() - } - }, - - templateFileKey(req, res, next) { - const { template_id, format, version, sub_type } = req.params - req.key = `${template_id}/v/${version}/${format}` - if (sub_type != null) { - req.key = `${req.key}/${sub_type}` - } - req.bucket = settings.filestore.stores.template_files - req.version = version - const opts = req.query - return next() - }, - - publicProjectKey(req, res, next) { - const { project_id } = req.params - req.project_id = project_id - req.bucket = settings.filestore.stores.user_files - return next() - } + getConvertedFolderKey, + addCachingToKey, + userFileKeyMiddleware, + publicFileKeyMiddleware, + publicProjectKeyMiddleware, + templateFileKeyMiddleware +} + +function getConvertedFolderKey(key) { + return `${key}-converted-cache/` +} + +function addCachingToKey(key, opts) { + key = this.getConvertedFolderKey(key) + + if (opts.format && !opts.style) { + key = `${key}format-${opts.format}` + } + if (opts.style && !opts.format) { + key = `${key}style-${opts.style}` + } + if (opts.style && opts.format) { + key = `${key}format-${opts.format}-style-${opts.style}` + } + + return key +} + +function userFileKeyMiddleware(req, res, next) { + const { project_id: projectId, file_id: fileId } = req.params + req.key = `${projectId}/${fileId}` + req.bucket = settings.filestore.stores.user_files + next() +} + +function publicFileKeyMiddleware(req, res, next) { + if (settings.filestore.stores.public_files == null) { + return res.status(501).send('public files not available') + } + + const { project_id: projectId, public_file_id: publicFileId } = req.params + req.key = `${projectId}/${publicFileId}` + req.bucket = settings.filestore.stores.public_files + + next() +} + +function templateFileKeyMiddleware(req, res, next) { + const { + template_id: templateId, + format, + version, + sub_type: subType + } = req.params + + req.key = `${templateId}/v/${version}/${format}` + + if (subType) { + req.key = `${req.key}/${subType}` + } + + req.bucket = settings.filestore.stores.template_files + req.version = version + + next() +} + +function publicProjectKeyMiddleware(req, res, next) { + const { project_id: projectId } = req.params + + req.project_id = projectId + req.bucket = settings.filestore.stores.user_files + + next() } From 006f84abebdd0117ae9d7fc9384d5449caacdeaa Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 19 Dec 2019 16:56:03 +0000 Subject: [PATCH 363/555] Decaf and promisify ImageOptimiser --- services/filestore/app/js/ImageOptimiser.js | 75 ++++++++--------- .../test/unit/js/ImageOptimiserTests.js | 84 +++++++------------ 2 files changed, 67 insertions(+), 92 deletions(-) diff --git a/services/filestore/app/js/ImageOptimiser.js b/services/filestore/app/js/ImageOptimiser.js index 99f1eb21f6..555e6334bd 100644 --- a/services/filestore/app/js/ImageOptimiser.js +++ b/services/filestore/app/js/ImageOptimiser.js @@ -1,44 +1,41 @@ -/* eslint-disable - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const { exec } = require('child_process') const logger = require('logger-sharelatex') -const Settings = require('settings-sharelatex') +const metrics = require('metrics-sharelatex') +const { callbackify } = require('util') +const safeExec = require('./SafeExec').promises module.exports = { - compressPng(localPath, callback) { - const startTime = new Date() - logger.log({ localPath }, 'optimising png path') - const args = `optipng ${localPath}` - const opts = { - timeout: 30 * 1000, - killSignal: 'SIGKILL' - } - if (!Settings.enableConversions) { - const error = new Error('Image conversions are disabled') - return callback(error) - } - return exec(args, opts, function(err, stdout, stderr) { - if (err != null && err.signal === 'SIGKILL') { - logger.warn({ err, stderr, localPath }, 'optimiser timeout reached') - err = null - } else if (err != null) { - logger.err( - { err, stderr, localPath }, - 'something went wrong converting compressPng' - ) - } else { - logger.log({ localPath }, 'finished compressPng file') - } - return callback(err) - }) + compressPng: callbackify(compressPng), + promises: { + compressPng + } +} + +async function compressPng(localPath, callback) { + const timer = new metrics.Timer('compressPng') + logger.log({ localPath }, 'optimising png path') + + const args = ['optipng', localPath] + const opts = { + timeout: 30 * 1000, + killSignal: 'SIGKILL' + } + + try { + await safeExec(args, opts) + timer.done() + logger.log({ localPath }, 'finished compressing png') + } catch (err) { + if (err.code === 'SIGKILL') { + logger.warn( + { err, stderr: err.stderr, localPath }, + 'optimiser timeout reached' + ) + } else { + logger.err( + { err, stderr: err.stderr, localPath }, + 'something went wrong compressing png' + ) + throw err + } } } diff --git a/services/filestore/test/unit/js/ImageOptimiserTests.js b/services/filestore/test/unit/js/ImageOptimiserTests.js index 6fdb09f89d..7940767fdf 100644 --- a/services/filestore/test/unit/js/ImageOptimiserTests.js +++ b/services/filestore/test/unit/js/ImageOptimiserTests.js @@ -1,81 +1,59 @@ -/* eslint-disable - handle-callback-err, - no-return-assign, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const { assert } = require('chai') const sinon = require('sinon') const chai = require('chai') -const should = chai.should() const { expect } = chai const modulePath = '../../../app/js/ImageOptimiser.js' +const { FailedCommandError } = require('../../../app/js/Errors') const SandboxedModule = require('sandboxed-module') describe('ImageOptimiser', function() { + let ImageOptimiser, SafeExec + const sourcePath = '/wombat/potato.eps' + beforeEach(function() { - this.child_process = { exec: sinon.stub() } - this.settings = { enableConversions: true } - this.optimiser = SandboxedModule.require(modulePath, { + SafeExec = { + promises: sinon.stub().resolves() + } + ImageOptimiser = SandboxedModule.require(modulePath, { requires: { - child_process: this.child_process, + './SafeExec': SafeExec, 'logger-sharelatex': { log() {}, err() {}, warn() {} - }, - 'settings-sharelatex': this.settings + } } }) - - this.sourcePath = '/this/path/here.eps' - return (this.error = 'Error') }) describe('compressPng', function() { - it('convert the file', function(done) { - this.child_process.exec.callsArgWith(2) - return this.optimiser.compressPng(this.sourcePath, err => { - const args = this.child_process.exec.args[0][0] - args.should.equal(`optipng ${this.sourcePath}`) - return done() + it('should convert the file', function(done) { + ImageOptimiser.compressPng(sourcePath, err => { + expect(err).not.to.exist + expect(SafeExec.promises).to.have.been.calledWith([ + 'optipng', + sourcePath + ]) + done() }) }) - return it('should return the error', function(done) { - this.child_process.exec.callsArgWith(2, this.error) - return this.optimiser.compressPng(this.sourcePath, err => { - err.should.equal(this.error) - return done() + it('should return the error', function(done) { + SafeExec.promises.rejects('wombat herding failure') + ImageOptimiser.compressPng(sourcePath, err => { + expect(err.toString()).to.equal('wombat herding failure') + done() }) }) }) - describe('when enableConversions is disabled', () => - it('should produce an error', function(done) { - this.settings.enableConversions = false - this.child_process.exec.callsArgWith(2) - return this.optimiser.compressPng(this.sourcePath, err => { - this.child_process.exec.called.should.equal(false) - expect(err).to.exist - return done() - }) - })) - - return describe('when optimiser is sigkilled', () => + describe('when optimiser is sigkilled', function() { it('should not produce an error', function(done) { - this.error = new Error('woops') - this.error.signal = 'SIGKILL' - this.child_process.exec.callsArgWith(2, this.error) - return this.optimiser.compressPng(this.sourcePath, err => { - expect(err).to.equal(null) - return done() + const error = new FailedCommandError('', 'SIGKILL', '', '') + SafeExec.promises.rejects(error) + ImageOptimiser.compressPng(sourcePath, err => { + expect(err).not.to.exist + done() }) - })) + }) + }) }) From a8158d6c8c761ad5c4b616d7aa815fb7a315fbc1 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 23 Dec 2019 13:36:12 +0000 Subject: [PATCH 364/555] Cleanup and promisify acceptance tests. Run tests for all backends. --- services/filestore/npm-shrinkwrap.json | 158 +++++++-- services/filestore/package.json | 5 +- .../test/acceptance/js/FilestoreApp.js | 193 ++++++----- .../test/acceptance/js/FilestoreTests.js | 299 ++++++++++++++++ .../test/acceptance/js/SendingFileTest.js | 326 ------------------ 5 files changed, 521 insertions(+), 460 deletions(-) create mode 100644 services/filestore/test/acceptance/js/FilestoreTests.js delete mode 100644 services/filestore/test/acceptance/js/SendingFileTest.js diff --git a/services/filestore/npm-shrinkwrap.json b/services/filestore/npm-shrinkwrap.json index 44f5ec3263..fa498f3f1b 100644 --- a/services/filestore/npm-shrinkwrap.json +++ b/services/filestore/npm-shrinkwrap.json @@ -1014,6 +1014,15 @@ "type-detect": "^4.0.5" } }, + "chai-as-promised": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/chai-as-promised/-/chai-as-promised-7.1.1.tgz", + "integrity": "sha512-azL6xMoi+uxu6z4rhWQ1jbdUhOMhis2PvscD/xjLqNMkv3BPPp2JyyuTHOrf9BOosGpNQ11v6BKv/g57RXbiaA==", + "dev": true, + "requires": { + "check-error": "^1.0.2" + } + }, "chalk": { "version": "2.4.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", @@ -1284,6 +1293,12 @@ "integrity": "sha1-gAwN0eCov7yVg1wgKtIg/jF+WhI=", "dev": true }, + "disrequire": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/disrequire/-/disrequire-1.1.0.tgz", + "integrity": "sha512-c3lya+wBcnfNipVE7XQC85J6Fty9XWsbNrUub8XT1Qk3mwO6f8tR7P6Ah3X09A3HTQ1biwjcwTLFkGlEejUzUw==", + "dev": true + }, "dlv": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/dlv/-/dlv-1.1.3.tgz", @@ -4446,53 +4461,115 @@ "dev": true }, "request": { - "version": "2.14.0", - "resolved": "https://registry.npmjs.org/request/-/request-2.14.0.tgz", - "integrity": "sha1-DYrLsLFMGrguAAt9OB+oyA0afYg=", + "version": "2.88.0", + "resolved": "https://registry.npmjs.org/request/-/request-2.88.0.tgz", + "integrity": "sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg==", "requires": { - "form-data": "~0.0.3", - "mime": "~1.2.7" + "aws-sign2": "~0.7.0", + "aws4": "^1.8.0", + "caseless": "~0.12.0", + "combined-stream": "~1.0.6", + "extend": "~3.0.2", + "forever-agent": "~0.6.1", + "form-data": "~2.3.2", + "har-validator": "~5.1.0", + "http-signature": "~1.2.0", + "is-typedarray": "~1.0.0", + "isstream": "~0.1.2", + "json-stringify-safe": "~5.0.1", + "mime-types": "~2.1.19", + "oauth-sign": "~0.9.0", + "performance-now": "^2.1.0", + "qs": "~6.5.2", + "safe-buffer": "^5.1.2", + "tough-cookie": "~2.4.3", + "tunnel-agent": "^0.6.0", + "uuid": "^3.3.2" }, "dependencies": { - "form-data": { - "version": "0.0.7", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-0.0.7.tgz", - "integrity": "sha1-chEYKiaiZs45cQ3IvEqBtwQIWb4=", + "caseless": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", + "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=" + }, + "combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", "requires": { - "async": "~0.1.9", - "combined-stream": "~0.0.4", - "mime": "~1.2.2" - }, - "dependencies": { - "async": { - "version": "0.1.22", - "resolved": "https://registry.npmjs.org/async/-/async-0.1.22.tgz", - "integrity": "sha1-D8GqoIig4+8Ovi2IMbqw3PiEUGE=" - }, - "combined-stream": { - "version": "0.0.4", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-0.0.4.tgz", - "integrity": "sha1-LRpDNH2+lRWkonlnMuW4hHOECyI=", - "requires": { - "delayed-stream": "0.0.5" - }, - "dependencies": { - "delayed-stream": { - "version": "0.0.5", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz", - "integrity": "sha1-1LH0OpPoKW3+AmlPRoC8N6MTxz8=" - } - } - } + "delayed-stream": "~1.0.0" } }, - "mime": { - "version": "1.2.9", - "resolved": "https://registry.npmjs.org/mime/-/mime-1.2.9.tgz", - "integrity": "sha1-AJzUCGe9Nd5SGzuWbwTi+NTRPQk=" + "delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=" + }, + "forever-agent": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", + "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=" + }, + "form-data": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", + "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + } + }, + "oauth-sign": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", + "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==" + }, + "safe-buffer": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.0.tgz", + "integrity": "sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg==" + }, + "tunnel-agent": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", + "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", + "requires": { + "safe-buffer": "^5.0.1" + } + }, + "uuid": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.3.tgz", + "integrity": "sha512-pW0No1RGHgzlpHJO1nsVrHKpOEIxkGg1xB+v0ZmdNH5OAeAwzAVrCnI2/6Mtx+Uys6iaylxa+D3g4j63IKKjSQ==" } } }, + "request-promise-core": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/request-promise-core/-/request-promise-core-1.1.3.tgz", + "integrity": "sha512-QIs2+ArIGQVp5ZYbWD5ZLCY29D5CfWizP8eWnm8FoGD1TX61veauETVQbrV60662V0oFBkrDOuaBI8XgtuyYAQ==", + "requires": { + "lodash": "^4.17.15" + }, + "dependencies": { + "lodash": { + "version": "4.17.15", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", + "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==" + } + } + }, + "request-promise-native": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/request-promise-native/-/request-promise-native-1.0.8.tgz", + "integrity": "sha512-dapwLGqkHtwL5AEbfenuzjTYg35Jd6KPytsC2/TLkVMz8rm+tNt72MGUWT1RP/aYawMpN6HqbNGBQaRcBtjQMQ==", + "requires": { + "request-promise-core": "1.1.3", + "stealthy-require": "^1.1.1", + "tough-cookie": "^2.3.3" + } + }, "require-directory": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", @@ -4878,6 +4955,11 @@ "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", "integrity": "sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=" }, + "stealthy-require": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/stealthy-require/-/stealthy-require-1.1.1.tgz", + "integrity": "sha1-NbCYdbT/SfJqd35QmzCQoyJr8ks=" + }, "stream-browserify": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/stream-browserify/-/stream-browserify-2.0.1.tgz", diff --git a/services/filestore/package.json b/services/filestore/package.json index d39d1027be..9515c1850c 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -36,7 +36,8 @@ "pngcrush": "0.0.3", "range-parser": "^1.0.2", "recluster": "^0.3.7", - "request": "2.14.0", + "request": "^2.88.0", + "request-promise-native": "^1.0.8", "response": "0.14.0", "rimraf": "2.2.8", "settings-sharelatex": "^1.1.0", @@ -48,6 +49,8 @@ "babel-eslint": "^10.0.3", "bunyan": "^1.3.5", "chai": "4.2.0", + "chai-as-promised": "^7.1.1", + "disrequire": "^1.1.0", "eslint": "^6.4.0", "eslint-config-prettier": "^6.7.0", "eslint-config-standard": "^14.1.0", diff --git a/services/filestore/test/acceptance/js/FilestoreApp.js b/services/filestore/test/acceptance/js/FilestoreApp.js index 3a5103d5f6..4035262cbc 100644 --- a/services/filestore/test/acceptance/js/FilestoreApp.js +++ b/services/filestore/test/acceptance/js/FilestoreApp.js @@ -1,109 +1,112 @@ -/* eslint-disable - handle-callback-err, - standard/no-callback-literal, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS101: Remove unnecessary use of Array.from - * DS102: Remove unnecessary code created because of implicit returns - * DS103: Rewrite code to no longer use __guard__ - * DS205: Consider reworking code to avoid use of IIFEs - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const app = require('../../../app') -require('logger-sharelatex').logger.level('info') const logger = require('logger-sharelatex') const Settings = require('settings-sharelatex') +const fs = require('fs') +const Path = require('path') const request = require('request') +const { promisify } = require('util') +const disrequire = require('disrequire') const S3_TRIES = 30 -module.exports = { - running: false, - initing: false, - callbacks: [], - ensureRunning(callback) { - if (callback == null) { - callback = function(error) {} - } - if (this.running) { - return callback() - } else if (this.initing) { - return this.callbacks.push(callback) - } else { - this.initing = true - this.callbacks.push(callback) - return app.listen( - __guard__( - Settings.internal != null ? Settings.internal.filestore : undefined, - x => x.port - ), - 'localhost', - error => { - if (error != null) { - throw error - } - this.running = true - logger.log('filestore running in dev mode') +logger.logger.level('info') - return (() => { - const result = [] - for (callback of Array.from(this.callbacks)) { - result.push(callback()) - } - return result - })() +const fsReaddir = promisify(fs.readdir) + +class FilestoreApp { + constructor() { + this.running = false + this.initing = false + } + + async runServer() { + if (this.running) { + return + } + + if (this.initing) { + return this.waitForInit() + } + this.initing = true + + this.app = await FilestoreApp.requireApp() + + await new Promise((resolve, reject) => { + this.server = this.app.listen( + Settings.internal.filestore.port, + 'localhost', + err => { + if (err) { + return reject(err) + } + resolve() } ) - } - }, + }) - waitForS3(callback, tries) { - if ( - !(Settings.filestore.s3 != null - ? Settings.filestore.s3.endpoint - : undefined) - ) { - return callback() - } - if (!tries) { - tries = 1 - } - - return request.get( - `${Settings.filestore.s3.endpoint}/`, - (err, response) => { - console.log( - err, - response != null ? response.statusCode : undefined, - tries - ) - if ( - !err && - [200, 404].includes( - response != null ? response.statusCode : undefined - ) - ) { - return callback() - } - - if (tries === S3_TRIES) { - return callback('timed out waiting for S3') - } - - return setTimeout(() => { - return this.waitForS3(callback, tries + 1) - }, 1000) + if (Settings.filestore.backend === 's3') { + try { + await FilestoreApp.waitForS3() + } catch (err) { + await this.stop() + throw err } - ) + } + + this.initing = false + } + + async waitForInit() { + while (this.initing) { + await new Promise(resolve => setTimeout(resolve, 1000)) + } + } + + async stop() { + if (this.server) { + await new Promise(resolve => { + this.server.close(resolve) + }) + delete this.server + } + } + + static async waitForS3() { + let tries = 0 + if (!Settings.filestore.s3.endpoint) { + return + } + + let s3Available = false + + while (tries < S3_TRIES && !s3Available) { + try { + const response = await promisify(request.get)( + `${Settings.filestore.s3.endpoint}/` + ) + if ([200, 404].includes(response.statusCode)) { + s3Available = true + } + } catch (err) { + } finally { + tries++ + if (!s3Available) { + await new Promise(resolve => setTimeout(resolve, 1000)) + } + } + } + } + + static async requireApp() { + // unload the app, as we may be doing this on multiple runs with + // different settings, which affect startup in some cases + const files = await fsReaddir(Path.resolve(__dirname, '../../../app/js')) + files.forEach(file => { + disrequire(Path.resolve(__dirname, '../../../app/js', file)) + }) + disrequire(Path.resolve(__dirname, '../../../app')) + + return require('../../../app') } } -function __guard__(value, transform) { - return typeof value !== 'undefined' && value !== null - ? transform(value) - : undefined -} +module.exports = FilestoreApp diff --git a/services/filestore/test/acceptance/js/FilestoreTests.js b/services/filestore/test/acceptance/js/FilestoreTests.js new file mode 100644 index 0000000000..3315569a24 --- /dev/null +++ b/services/filestore/test/acceptance/js/FilestoreTests.js @@ -0,0 +1,299 @@ +const chai = require('chai') +const { expect } = chai +const fs = require('fs') +const Settings = require('settings-sharelatex') +const Path = require('path') +const FilestoreApp = require('./FilestoreApp') +const rp = require('request-promise-native').defaults({ + resolveWithFullResponse: true +}) +const Stream = require('stream') +const request = require('request') +const { promisify } = require('util') +chai.use(require('chai-as-promised')) + +const fsWriteFile = promisify(fs.writeFile) +const fsStat = promisify(fs.stat) +const pipeline = promisify(Stream.pipeline) + +async function getMetric(filestoreUrl, metric) { + const res = await rp.get(`${filestoreUrl}/metrics`) + expect(res.statusCode).to.equal(200) + const metricRegex = new RegExp(`^${metric}{[^}]+} ([0-9]+)$`, 'm') + const found = metricRegex.exec(res.body) + return parseInt(found ? found[1] : 0) || 0 +} + +// store settings for multiple backends, so that we can test each one. +// fs will always be available - add others if they are configured +const BackendSettings = { + FSPersistor: { + backend: 'fs', + stores: { + user_files: Path.resolve(__dirname, '../../../user_files'), + public_files: Path.resolve(__dirname, '../../../public_files'), + template_files: Path.resolve(__dirname, '../../../template_files') + } + } +} + +if (process.env.AWS_ACCESS_KEY_ID) { + BackendSettings.S3Persistor = { + backend: 's3', + s3: { + key: process.env.AWS_ACCESS_KEY_ID, + secret: process.env.AWS_SECRET_ACCESS_KEY, + endpoint: process.env.AWS_S3_ENDPOINT + }, + stores: { + user_files: process.env.AWS_S3_USER_FILES_BUCKET_NAME, + template_files: process.env.AWS_S3_TEMPLATE_FILES_BUCKET_NAME, + public_files: process.env.AWS_S3_PUBLIC_FILES_BUCKET_NAME + } + } +} + +describe('Filestore', function() { + this.timeout(1000 * 10) + const filestoreUrl = `http://localhost:${Settings.internal.filestore.port}` + + // redefine the test suite for every available backend + Object.keys(BackendSettings).forEach(backend => { + describe(backend, function() { + let app, previousEgress, previousIngress + + before(async function() { + // create the app with the relevant filestore settings + Settings.filestore = BackendSettings[backend] + app = new FilestoreApp() + await app.runServer() + }) + + after(async function() { + return app.stop() + }) + + beforeEach(async function() { + // retrieve previous metrics from the app + if (Settings.filestore.backend === 's3') { + ;[previousEgress, previousIngress] = await Promise.all([ + getMetric(filestoreUrl, 's3_egress'), + getMetric(filestoreUrl, 's3_ingress') + ]) + } + }) + + it('should send a 200 for the status endpoint', async function() { + const response = await rp(`${filestoreUrl}/status`) + expect(response.statusCode).to.equal(200) + expect(response.body).to.contain('filestore') + expect(response.body).to.contain('up') + }) + + it('should send a 200 for the health-check endpoint', async function() { + const response = await rp(`${filestoreUrl}/health_check`) + expect(response.statusCode).to.equal(200) + expect(response.body).to.equal('OK') + }) + + describe('with a file on the server', function() { + let fileId, fileUrl + + const localFileReadPath = + '/tmp/filestore_acceptance_tests_file_read.txt' + const constantFileContent = [ + 'hello world', + `line 2 goes here ${Math.random()}`, + 'there are 3 lines in all' + ].join('\n') + + before(async function() { + await fsWriteFile(localFileReadPath, constantFileContent) + }) + + beforeEach(async function() { + fileId = Math.random() + fileUrl = `${filestoreUrl}/project/acceptance_tests/file/${fileId}` + + const writeStream = request.post(fileUrl) + const readStream = fs.createReadStream(localFileReadPath) + // consume the result to ensure the http request has been fully processed + const resultStream = fs.createWriteStream('/dev/null') + await pipeline(readStream, writeStream, resultStream) + }) + + it('should return 404 for a non-existant id', async function() { + const options = { uri: fileUrl + '___this_is_clearly_wrong___' } + await expect( + rp.get(options) + ).to.eventually.be.rejected.and.have.property('statusCode', 404) + }) + + it('should return the file size on a HEAD request', async function() { + const expectedLength = Buffer.byteLength(constantFileContent) + const res = await rp.head(fileUrl) + expect(res.statusCode).to.equal(200) + expect(res.headers['content-length']).to.equal( + expectedLength.toString() + ) + }) + + it('should be able get the file back', async function() { + const res = await rp.get(fileUrl) + expect(res.body).to.equal(constantFileContent) + }) + + it('should be able to get back the first 9 bytes of the file', async function() { + const options = { + uri: fileUrl, + headers: { + Range: 'bytes=0-8' + } + } + const res = await rp.get(options) + expect(res.body).to.equal('hello wor') + }) + + it('should be able to get back bytes 4 through 10 of the file', async function() { + const options = { + uri: fileUrl, + headers: { + Range: 'bytes=4-10' + } + } + const res = await rp.get(options) + expect(res.body).to.equal('o world') + }) + + it('should be able to delete the file', async function() { + const response = await rp.del(fileUrl) + expect(response.statusCode).to.equal(204) + await expect( + rp.get(fileUrl) + ).to.eventually.be.rejected.and.have.property('statusCode', 404) + }) + + it('should be able to copy files', async function() { + const newProjectID = 'acceptance_tests_copyied_project' + const newFileId = Math.random() + const newFileUrl = `${filestoreUrl}/project/${newProjectID}/file/${newFileId}` + const opts = { + method: 'put', + uri: newFileUrl, + json: { + source: { + project_id: 'acceptance_tests', + file_id: fileId + } + } + } + let response = await rp(opts) + expect(response.statusCode).to.equal(200) + response = await rp.del(fileUrl) + expect(response.statusCode).to.equal(204) + response = await rp.get(newFileUrl) + expect(response.body).to.equal(constantFileContent) + }) + + if (backend === 'S3Persistor') { + it('should record an egress metric for the upload', async function() { + const metric = await getMetric(filestoreUrl, 's3_egress') + expect(metric - previousEgress).to.equal(constantFileContent.length) + }) + + it('should record an ingress metric when downloading the file', async function() { + await rp.get(fileUrl) + const metric = await getMetric(filestoreUrl, 's3_ingress') + expect(metric - previousIngress).to.equal( + constantFileContent.length + ) + }) + + it('should record an ingress metric for a partial download', async function() { + const options = { + uri: fileUrl, + headers: { + Range: 'bytes=0-8' + } + } + await rp.get(options) + const metric = await getMetric(filestoreUrl, 's3_ingress') + expect(metric - previousIngress).to.equal(9) + }) + } + }) + + describe('with a pdf file', function() { + let fileId, fileUrl, localFileSize + const localFileReadPath = Path.resolve( + __dirname, + '../../fixtures/test.pdf' + ) + + beforeEach(async function() { + fileId = Math.random() + fileUrl = `${filestoreUrl}/project/acceptance_tests/file/${fileId}` + const stat = await fsStat(localFileReadPath) + localFileSize = stat.size + const writeStream = request.post(fileUrl) + const endStream = fs.createWriteStream('/dev/null') + const readStream = fs.createReadStream(localFileReadPath) + await pipeline(readStream, writeStream, endStream) + }) + + it('should be able get the file back', async function() { + const response = await rp.get(fileUrl) + expect(response.body.substring(0, 8)).to.equal('%PDF-1.5') + }) + + if (backend === 'S3Persistor') { + it('should record an egress metric for the upload', async function() { + const metric = await getMetric(filestoreUrl, 's3_egress') + expect(metric - previousEgress).to.equal(localFileSize) + }) + } + + describe('getting the preview image', function() { + this.timeout(1000 * 20) + let previewFileUrl + + beforeEach(function() { + previewFileUrl = `${fileUrl}?style=preview` + }) + + it('should not time out', async function() { + const response = await rp.get(previewFileUrl) + expect(response.statusCode).to.equal(200) + }) + + it('should respond with image data', async function() { + // note: this test relies of the imagemagick conversion working + const response = await rp.get(previewFileUrl) + expect(response.body.length).to.be.greaterThan(400) + expect(response.body.substr(1, 3)).to.equal('PNG') + }) + }) + + describe('warming the cache', function() { + this.timeout(1000 * 20) + let previewFileUrl + + beforeEach(function() { + previewFileUrl = `${fileUrl}?style=preview&cacheWarm=true` + }) + + it('should not time out', async function() { + const response = await rp.get(previewFileUrl) + expect(response.statusCode).to.equal(200) + }) + + it("should respond with only an 'OK'", async function() { + // note: this test relies of the imagemagick conversion working + const response = await rp.get(previewFileUrl) + expect(response.body).to.equal('OK') + }) + }) + }) + }) + }) +}) diff --git a/services/filestore/test/acceptance/js/SendingFileTest.js b/services/filestore/test/acceptance/js/SendingFileTest.js deleted file mode 100644 index c20fa01c42..0000000000 --- a/services/filestore/test/acceptance/js/SendingFileTest.js +++ /dev/null @@ -1,326 +0,0 @@ -/* eslint-disable - handle-callback-err, - no-path-concat, - no-return-assign, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * DS103: Rewrite code to no longer use __guard__ - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const { assert } = require('chai') -const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() -const { expect } = chai -const modulePath = '../../../app/js/LocalFileWriter.js' -const SandboxedModule = require('sandboxed-module') -const fs = require('fs') -const request = require('request') -const settings = require('settings-sharelatex') -const FilestoreApp = require('./FilestoreApp') -const async = require('async') - -const getMetric = (filestoreUrl, metric, cb) => - request.get(`${filestoreUrl}/metrics`, function(err, res) { - expect(res.statusCode).to.equal(200) - const metricRegex = new RegExp(`^${metric}{[^}]+} ([0-9]+)$`, 'm') - return cb(parseInt(__guard__(metricRegex.exec(res.body), x => x[1]) || '0')) - }) - -describe('Filestore', function() { - before(function(done) { - this.localFileReadPath = '/tmp/filestore_acceptence_tests_file_read.txt' - this.localFileWritePath = '/tmp/filestore_acceptence_tests_file_write.txt' - - this.constantFileContent = [ - 'hello world', - `line 2 goes here ${Math.random()}`, - 'there are 3 lines in all' - ].join('\n') - - this.filestoreUrl = `http://localhost:${settings.internal.filestore.port}` - return fs.writeFile( - this.localFileReadPath, - this.constantFileContent, - function(err) { - if (err) { - return done(err) - } - return FilestoreApp.waitForS3(done) - } - ) - }) - - beforeEach(function(done) { - return FilestoreApp.ensureRunning(() => { - return async.parallel( - [ - cb => { - return fs.unlink(this.localFileWritePath, () => cb()) - }, - cb => { - return getMetric(this.filestoreUrl, 's3_egress', metric => { - this.previousEgress = metric - return cb() - }) - }, - cb => { - return getMetric(this.filestoreUrl, 's3_ingress', metric => { - this.previousIngress = metric - return cb() - }) - } - ], - done - ) - }) - }) - - it('should send a 200 for status endpoint', function(done) { - return request(`${this.filestoreUrl}/status`, function( - err, - response, - body - ) { - response.statusCode.should.equal(200) - body.indexOf('filestore').should.not.equal(-1) - body.indexOf('up').should.not.equal(-1) - return done() - }) - }) - - describe('with a file on the server', function() { - beforeEach(function(done) { - this.timeout(1000 * 10) - this.file_id = Math.random() - this.fileUrl = `${this.filestoreUrl}/project/acceptence_tests/file/${this.file_id}` - - const writeStream = request.post(this.fileUrl) - - writeStream.on('end', done) - return fs.createReadStream(this.localFileReadPath).pipe(writeStream) - }) - - it('should return 404 for a non-existant id', function(done) { - this.timeout(1000 * 20) - const options = { uri: this.fileUrl + '___this_is_clearly_wrong___' } - return request.get(options, (err, response, body) => { - response.statusCode.should.equal(404) - return done() - }) - }) - - it('should record an egress metric for the upload', function(done) { - return getMetric(this.filestoreUrl, 's3_egress', metric => { - expect(metric - this.previousEgress).to.equal( - this.constantFileContent.length - ) - return done() - }) - }) - - it('should return the file size on a HEAD request', function(done) { - const expectedLength = Buffer.byteLength(this.constantFileContent) - return request.head(this.fileUrl, (err, res) => { - expect(res.statusCode).to.equal(200) - expect(res.headers['content-length']).to.equal( - expectedLength.toString() - ) - return done() - }) - }) - - it('should be able get the file back', function(done) { - this.timeout(1000 * 10) - return request.get(this.fileUrl, (err, response, body) => { - body.should.equal(this.constantFileContent) - return done() - }) - }) - - it('should record an ingress metric when downloading the file', function(done) { - this.timeout(1000 * 10) - return request.get(this.fileUrl, () => { - return getMetric(this.filestoreUrl, 's3_ingress', metric => { - expect(metric - this.previousIngress).to.equal( - this.constantFileContent.length - ) - return done() - }) - }) - }) - - it('should be able to get back the first 9 bytes of the file', function(done) { - this.timeout(1000 * 10) - const options = { - uri: this.fileUrl, - headers: { - Range: 'bytes=0-8' - } - } - return request.get(options, (err, response, body) => { - body.should.equal('hello wor') - return done() - }) - }) - - it('should record an ingress metric for a partial download', function(done) { - this.timeout(1000 * 10) - const options = { - uri: this.fileUrl, - headers: { - Range: 'bytes=0-8' - } - } - return request.get(options, () => { - return getMetric(this.filestoreUrl, 's3_ingress', metric => { - expect(metric - this.previousIngress).to.equal(9) - return done() - }) - }) - }) - - it('should be able to get back bytes 4 through 10 of the file', function(done) { - this.timeout(1000 * 10) - const options = { - uri: this.fileUrl, - headers: { - Range: 'bytes=4-10' - } - } - return request.get(options, (err, response, body) => { - body.should.equal('o world') - return done() - }) - }) - - it('should be able to delete the file', function(done) { - this.timeout(1000 * 20) - return request.del(this.fileUrl, (err, response, body) => { - response.statusCode.should.equal(204) - return request.get(this.fileUrl, (err, response, body) => { - response.statusCode.should.equal(404) - return done() - }) - }) - }) - - return it('should be able to copy files', function(done) { - this.timeout(1000 * 20) - - const newProjectID = 'acceptence_tests_copyied_project' - const newFileId = Math.random() - const newFileUrl = `${this.filestoreUrl}/project/${newProjectID}/file/${newFileId}` - const opts = { - method: 'put', - uri: newFileUrl, - json: { - source: { - project_id: 'acceptence_tests', - file_id: this.file_id - } - } - } - return request(opts, (err, response, body) => { - response.statusCode.should.equal(200) - return request.del(this.fileUrl, (err, response, body) => { - response.statusCode.should.equal(204) - return request.get(newFileUrl, (err, response, body) => { - body.should.equal(this.constantFileContent) - return done() - }) - }) - }) - }) - }) - - return describe('with a pdf file', function() { - beforeEach(function(done) { - this.timeout(1000 * 10) - this.file_id = Math.random() - this.fileUrl = `${this.filestoreUrl}/project/acceptence_tests/file/${this.file_id}` - this.localFileReadPath = __dirname + '/../../fixtures/test.pdf' - return fs.stat(this.localFileReadPath, (err, stat) => { - this.localFileSize = stat.size - const writeStream = request.post(this.fileUrl) - - writeStream.on('end', done) - return fs.createReadStream(this.localFileReadPath).pipe(writeStream) - }) - }) - - it('should record an egress metric for the upload', function(done) { - return getMetric(this.filestoreUrl, 's3_egress', metric => { - expect(metric - this.previousEgress).to.equal(this.localFileSize) - return done() - }) - }) - - it('should be able get the file back', function(done) { - this.timeout(1000 * 10) - return request.get(this.fileUrl, (err, response, body) => { - expect(body.substring(0, 8)).to.equal('%PDF-1.5') - return done() - }) - }) - - describe('getting the preview image', function() { - beforeEach(function() { - return (this.previewFileUrl = `${this.fileUrl}?style=preview`) - }) - - it('should not time out', function(done) { - this.timeout(1000 * 20) - return request.get(this.previewFileUrl, (err, response, body) => { - expect(response).to.not.equal(null) - return done() - }) - }) - - return it('should respond with image data', function(done) { - // note: this test relies of the imagemagick conversion working - this.timeout(1000 * 20) - return request.get(this.previewFileUrl, (err, response, body) => { - expect(response.statusCode).to.equal(200) - expect(body.length).to.be.greaterThan(400) - return done() - }) - }) - }) - - return describe('warming the cache', function() { - beforeEach(function() { - return (this.fileUrl = this.fileUrl + '?style=preview&cacheWarm=true') - }) - - it('should not time out', function(done) { - this.timeout(1000 * 20) - return request.get(this.fileUrl, (err, response, body) => { - expect(response).to.not.equal(null) - return done() - }) - }) - - return it("should respond with only an 'OK'", function(done) { - // note: this test relies of the imagemagick conversion working - this.timeout(1000 * 20) - return request.get(this.fileUrl, (err, response, body) => { - expect(response.statusCode).to.equal(200) - body.should.equal('OK') - return done() - }) - }) - }) - }) -}) - -function __guard__(value, transform) { - return typeof value !== 'undefined' && value !== null - ? transform(value) - : undefined -} From 4315824d3c06160a7a9c9b84b530bf487cc08f6b Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 2 Jan 2020 09:53:07 +0000 Subject: [PATCH 365/555] Minor cleanup of FilestoreApp acceptance-test helper --- .../test/acceptance/js/FilestoreApp.js | 22 ++++++++++--------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/services/filestore/test/acceptance/js/FilestoreApp.js b/services/filestore/test/acceptance/js/FilestoreApp.js index 4035262cbc..718d53bcf8 100644 --- a/services/filestore/test/acceptance/js/FilestoreApp.js +++ b/services/filestore/test/acceptance/js/FilestoreApp.js @@ -2,15 +2,18 @@ const logger = require('logger-sharelatex') const Settings = require('settings-sharelatex') const fs = require('fs') const Path = require('path') -const request = require('request') const { promisify } = require('util') const disrequire = require('disrequire') +const rp = require('request-promise-native').defaults({ + resolveWithFullResponse: true +}) const S3_TRIES = 30 logger.logger.level('info') const fsReaddir = promisify(fs.readdir) +const sleep = promisify(setTimeout) class FilestoreApp { constructor() { @@ -57,15 +60,15 @@ class FilestoreApp { async waitForInit() { while (this.initing) { - await new Promise(resolve => setTimeout(resolve, 1000)) + await sleep(1000) } } async stop() { - if (this.server) { - await new Promise(resolve => { - this.server.close(resolve) - }) + const closeServer = promisify(this.server.close).bind(this.server) + try { + await closeServer() + } finally { delete this.server } } @@ -80,17 +83,16 @@ class FilestoreApp { while (tries < S3_TRIES && !s3Available) { try { - const response = await promisify(request.get)( - `${Settings.filestore.s3.endpoint}/` - ) + const response = await rp.get(`${Settings.filestore.s3.endpoint}/`) if ([200, 404].includes(response.statusCode)) { s3Available = true } } catch (err) { + // swallow errors, as we may experience them until fake-s3 is running } finally { tries++ if (!s3Available) { - await new Promise(resolve => setTimeout(resolve, 1000)) + await sleep(1000) } } } From ce90292394a0c83cb3e085a4ebd10e578f32bf16 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 2 Jan 2020 11:29:28 +0000 Subject: [PATCH 366/555] Decaf cleanup for FSPersistorManager --- .../filestore/app/js/FSPersistorManager.js | 353 +++++----- .../test/unit/js/FSPersistorManagerTests.js | 631 ++++++------------ 2 files changed, 376 insertions(+), 608 deletions(-) diff --git a/services/filestore/app/js/FSPersistorManager.js b/services/filestore/app/js/FSPersistorManager.js index d11d839df7..ea793cfc64 100644 --- a/services/filestore/app/js/FSPersistorManager.js +++ b/services/filestore/app/js/FSPersistorManager.js @@ -1,206 +1,169 @@ -/* eslint-disable - handle-callback-err, - no-unreachable, - node/no-deprecated-api, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const logger = require('logger-sharelatex') const fs = require('fs') +const logger = require('logger-sharelatex') const path = require('path') -const LocalFileWriter = require('./LocalFileWriter') -const Errors = require('./Errors') const rimraf = require('rimraf') -const _ = require('underscore') +const Stream = require('stream') +const { promisify, callbackify } = require('util') + +const LocalFileWriter = require('./LocalFileWriter').promises +const { NotFoundError, ReadError } = require('./Errors') + +const pipeline = promisify(Stream.pipeline) +const fsUnlink = promisify(fs.unlink) +const fsOpen = promisify(fs.open) +const fsStat = promisify(fs.stat) +const fsReaddir = promisify(fs.readdir) +const rmrf = promisify(rimraf) const filterName = key => key.replace(/\//g, '_') -module.exports = { - sendFile(location, target, source, callback) { - if (callback == null) { - callback = function(err) {} - } - const filteredTarget = filterName(target) - logger.log({ location, target: filteredTarget, source }, 'sending file') - const done = _.once(function(err) { - if (err != null) { - logger.err( - { err, location, target: filteredTarget, source }, - 'Error on put of file' - ) - } - return callback(err) - }) - // actually copy the file (instead of moving it) to maintain consistent behaviour - // between the different implementations - const sourceStream = fs.createReadStream(source) - sourceStream.on('error', done) - const targetStream = fs.createWriteStream(`${location}/${filteredTarget}`) - targetStream.on('error', done) - targetStream.on('finish', () => done()) - return sourceStream.pipe(targetStream) - }, +async function sendFile(location, target, source) { + const filteredTarget = filterName(target) + logger.log({ location, target: filteredTarget, source }, 'sending file') - sendStream(location, target, sourceStream, callback) { - if (callback == null) { - callback = function(err) {} - } - logger.log({ location, target }, 'sending file stream') - sourceStream.on('error', err => - logger.err({ location, target, err: err('error on stream to send') }) - ) - return LocalFileWriter.writeStream(sourceStream, null, (err, fsPath) => { - if (err != null) { - logger.err( - { location, target, fsPath, err }, - 'something went wrong writing stream to disk' - ) - return callback(err) - } - return this.sendFile(location, target, fsPath, ( - err // delete the temporary file created above and return the original error - ) => LocalFileWriter.deleteFile(fsPath, () => callback(err))) - }) - }, + // actually copy the file (instead of moving it) to maintain consistent behaviour + // between the different implementations + const sourceStream = fs.createReadStream(source) + const targetStream = fs.createWriteStream(`${location}/${filteredTarget}`) + await pipeline(sourceStream, targetStream) +} - // opts may be {start: Number, end: Number} - getFileStream(location, name, opts, callback) { - if (callback == null) { - callback = function(err, res) {} - } - const filteredName = filterName(name) - logger.log({ location, filteredName }, 'getting file') - return fs.open(`${location}/${filteredName}`, 'r', function(err, fd) { - if (err != null) { - logger.err( - { err, location, filteredName: name }, - 'Error reading from file' - ) - if (err.code === 'ENOENT') { - return callback(new Errors.NotFoundError(err.message), null) - } else { - return callback(err, null) - } - } - opts.fd = fd - const sourceStream = fs.createReadStream(null, opts) - return callback(null, sourceStream) - }) - }, +async function sendStream(location, target, sourceStream) { + logger.log({ location, target }, 'sending file stream') - getFileSize(location, filename, callback) { - const fullPath = path.join(location, filterName(filename)) - return fs.stat(fullPath, function(err, stats) { - if (err != null) { - if (err.code === 'ENOENT') { - logger.log({ location, filename }, 'file not found') - callback(new Errors.NotFoundError(err.message)) - } else { - logger.err({ err, location, filename }, 'failed to stat file') - callback(err) - } - return - } - return callback(null, stats.size) - }) - }, - - copyFile(location, fromName, toName, callback) { - if (callback == null) { - callback = function(err) {} - } - const filteredFromName = filterName(fromName) - const filteredToName = filterName(toName) - logger.log( - { location, fromName: filteredFromName, toName: filteredToName }, - 'copying file' - ) - const sourceStream = fs.createReadStream(`${location}/${filteredFromName}`) - sourceStream.on('error', function(err) { - logger.err( - { err, location, key: filteredFromName }, - 'Error reading from file' - ) - return callback(err) - }) - const targetStream = fs.createWriteStream(`${location}/${filteredToName}`) - targetStream.on('error', function(err) { - logger.err( - { err, location, key: filteredToName }, - 'Error writing to file' - ) - return callback(err) - }) - targetStream.on('finish', () => callback(null)) - return sourceStream.pipe(targetStream) - }, - - deleteFile(location, name, callback) { - const filteredName = filterName(name) - logger.log({ location, filteredName }, 'delete file') - return fs.unlink(`${location}/${filteredName}`, function(err) { - if (err != null) { - logger.err({ err, location, filteredName }, 'Error on delete.') - return callback(err) - } else { - return callback() - } - }) - }, - - deleteDirectory(location, name, callback) { - if (callback == null) { - callback = function(err) {} - } - const filteredName = filterName(name.replace(/\/$/, '')) - return rimraf(`${location}/${filteredName}`, function(err) { - if (err != null) { - logger.err({ err, location, filteredName }, 'Error on rimraf rmdir.') - return callback(err) - } else { - return callback() - } - }) - }, - - checkIfFileExists(location, name, callback) { - if (callback == null) { - callback = function(err, exists) {} - } - const filteredName = filterName(name) - logger.log({ location, filteredName }, 'checking if file exists') - return fs.exists(`${location}/${filteredName}`, function(exists) { - logger.log({ location, filteredName, exists }, 'checked if file exists') - return callback(null, exists) - }) - }, - - directorySize(location, name, callback) { - const filteredName = filterName(name.replace(/\/$/, '')) - logger.log({ location, filteredName }, 'get project size in file system') - return fs.readdir(`${location}/${filteredName}`, function(err, files) { - if (err != null) { - logger.err( - { err, location, filteredName }, - 'something went wrong listing prefix in aws' - ) - return callback(err) - } - let totalSize = 0 - _.each(files, function(entry) { - const fd = fs.openSync(`${location}/${filteredName}/${entry}`, 'r') - const fileStats = fs.fstatSync(fd) - totalSize += fileStats.size - return fs.closeSync(fd) - }) - logger.log({ totalSize }, 'total size', { files }) - return callback(null, totalSize) - }) + let fsPath + try { + fsPath = await LocalFileWriter.writeStream(sourceStream) + await sendFile(location, target, fsPath) + } finally { + await LocalFileWriter.deleteFile(fsPath) + } +} + +// opts may be {start: Number, end: Number} +async function getFileStream(location, name, opts) { + const filteredName = filterName(name) + logger.log({ location, filteredName }, 'getting file') + + try { + opts.fd = await fsOpen(`${location}/${filteredName}`, 'r') + } catch (err) { + logger.err({ err, location, filteredName: name }, 'Error reading from file') + + if (err.code === 'ENOENT') { + throw new NotFoundError({ + message: 'file not found', + info: { + location, + filteredName + } + }).withCause(err) + } + throw new ReadError('failed to open file for streaming').withCause(err) + } + + return fs.createReadStream(null, opts) +} + +async function getFileSize(location, filename) { + const fullPath = path.join(location, filterName(filename)) + + try { + const stat = await fsStat(fullPath) + return stat.size + } catch (err) { + logger.err({ err, location, filename }, 'failed to stat file') + + if (err.code === 'ENOENT') { + throw new NotFoundError({ + message: 'file not found', + info: { + location, + fullPath + } + }).withCause(err) + } + throw new ReadError('failed to stat file').withCause(err) + } +} + +async function copyFile(location, fromName, toName) { + const filteredFromName = filterName(fromName) + const filteredToName = filterName(toName) + logger.log({ location, filteredFromName, filteredToName }, 'copying file') + + const sourceStream = fs.createReadStream(`${location}/${filteredFromName}`) + const targetStream = fs.createWriteStream(`${location}/${filteredToName}`) + await pipeline(sourceStream, targetStream) +} + +async function deleteFile(location, name) { + const filteredName = filterName(name) + logger.log({ location, filteredName }, 'delete file') + await fsUnlink(`${location}/${filteredName}`) +} + +async function deleteDirectory(location, name) { + const filteredName = filterName(name.replace(/\/$/, '')) + + logger.log({ location, filteredName }, 'deleting directory') + + await rmrf(`${location}/${filteredName}`) +} + +async function checkIfFileExists(location, name) { + const filteredName = filterName(name) + try { + const stat = await fsStat(`${location}/${filteredName}`) + return !!stat + } catch (err) { + if (err.code === 'ENOENT') { + return false + } + throw new ReadError('failed to stat file').withCause(err) + } +} + +// note, does not recurse into subdirectories +async function directorySize(location, name) { + const filteredName = filterName(name.replace(/\/$/, '')) + let size = 0 + + try { + const files = await fsReaddir(`${location}/${filteredName}`) + for (const file of files) { + const stat = await fsStat(`${location}/${filteredName}/${file}`) + size += stat.size + } + } catch (err) { + throw new ReadError({ + message: 'failed to get directory size', + info: { location, name } + }).withCause(err) + } + + return size +} + +module.exports = { + sendFile: callbackify(sendFile), + sendStream: callbackify(sendStream), + getFileStream: callbackify(getFileStream), + getFileSize: callbackify(getFileSize), + copyFile: callbackify(copyFile), + deleteFile: callbackify(deleteFile), + deleteDirectory: callbackify(deleteDirectory), + checkIfFileExists: callbackify(checkIfFileExists), + directorySize: callbackify(directorySize), + promises: { + sendFile, + sendStream, + getFileStream, + getFileSize, + copyFile, + deleteFile, + deleteDirectory, + checkIfFileExists, + directorySize } } diff --git a/services/filestore/test/unit/js/FSPersistorManagerTests.js b/services/filestore/test/unit/js/FSPersistorManagerTests.js index 9e9018c17b..d399a87cee 100644 --- a/services/filestore/test/unit/js/FSPersistorManagerTests.js +++ b/services/filestore/test/unit/js/FSPersistorManagerTests.js @@ -1,502 +1,307 @@ -/* eslint-disable - handle-callback-err, - no-return-assign, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const { assert } = require('chai') const sinon = require('sinon') const chai = require('chai') -const { should } = chai const { expect } = chai -const modulePath = '../../../app/js/FSPersistorManager.js' const SandboxedModule = require('sandboxed-module') -const fs = require('fs') -const response = require('response') +const Errors = require('../../../app/js/Errors') + +chai.use(require('sinon-chai')) +chai.use(require('chai-as-promised')) + +const modulePath = '../../../app/js/FSPersistorManager.js' describe('FSPersistorManagerTests', function() { + const stat = { size: 4 } + const fd = 1234 + const readStream = 'readStream' + const writeStream = 'writeStream' + const remoteStream = 'remoteStream' + const tempFile = '/tmp/potato.txt' + const location = '/foo' + const error = new Error('guru meditation error') + + const files = ['wombat.txt', 'potato.tex'] + let fs, rimraf, stream, LocalFileWriter, FSPersistorManager + beforeEach(function() { - this.Fs = { - rename: sinon.stub(), - createReadStream: sinon.stub(), - createWriteStream: sinon.stub(), - unlink: sinon.stub(), - rmdir: sinon.stub(), - exists: sinon.stub(), - readdir: sinon.stub(), - open: sinon.stub(), - openSync: sinon.stub(), - fstatSync: sinon.stub(), - closeSync: sinon.stub(), - stat: sinon.stub() + fs = { + createReadStream: sinon.stub().returns(readStream), + createWriteStream: sinon.stub().returns(writeStream), + unlink: sinon.stub().yields(), + open: sinon.stub().yields(null, fd), + readdir: sinon.stub().yields(null, files), + stat: sinon.stub().yields(null, stat) } - this.Rimraf = sinon.stub() - this.LocalFileWriter = { - writeStream: sinon.stub(), - deleteFile: sinon.stub() + rimraf = sinon.stub().yields() + stream = { pipeline: sinon.stub().yields() } + LocalFileWriter = { + promises: { + writeStream: sinon.stub().resolves(tempFile), + deleteFile: sinon.stub().resolves() + } } - this.requires = { - './LocalFileWriter': this.LocalFileWriter, - fs: this.Fs, - 'logger-sharelatex': { - log() {}, - err() {} + FSPersistorManager = SandboxedModule.require(modulePath, { + requires: { + './LocalFileWriter': LocalFileWriter, + fs: fs, + 'logger-sharelatex': { + log() {}, + err() {} + }, + rimraf: rimraf, + stream: stream, + './Errors': Errors }, - response: response, - rimraf: this.Rimraf, - './Errors': (this.Errors = { NotFoundError: sinon.stub() }) - } - this.location = '/tmp' - this.name1 = '530f2407e7ef165704000007/530f838b46d9a9e859000008' - this.name1Filtered = '530f2407e7ef165704000007_530f838b46d9a9e859000008' - this.name2 = 'second_file' - this.error = 'error_message' - return (this.FSPersistorManager = SandboxedModule.require(modulePath, { - requires: this.requires - })) + globals: { console } + }) }) describe('sendFile', function() { - beforeEach(function() { - return (this.Fs.createReadStream = sinon.stub().returns({ - on() {}, - pipe() {} - })) + it('should copy the file', async function() { + await FSPersistorManager.promises.sendFile(location, files[0], files[1]) + expect(fs.createReadStream).to.have.been.calledWith(files[1]) + expect(fs.createWriteStream).to.have.been.calledWith( + `${location}/${files[0]}` + ) + expect(stream.pipeline).to.have.been.calledWith(readStream, writeStream) }) - it('should copy the file', function(done) { - this.Fs.createWriteStream = sinon.stub().returns({ - on(event, handler) { - if (event === 'finish') { - return process.nextTick(handler) - } - } - }) - return this.FSPersistorManager.sendFile( - this.location, - this.name1, - this.name2, - err => { - this.Fs.createReadStream.calledWith(this.name2).should.equal(true) - this.Fs.createWriteStream - .calledWith(`${this.location}/${this.name1Filtered}`) - .should.equal(true) - return done() - } - ) - }) - - return it('should return an error if the file cannot be stored', function(done) { - this.Fs.createWriteStream = sinon.stub().returns({ - on: (event, handler) => { - if (event === 'error') { - return process.nextTick(() => { - return handler(this.error) - }) - } - } - }) - return this.FSPersistorManager.sendFile( - this.location, - this.name1, - this.name2, - err => { - this.Fs.createReadStream.calledWith(this.name2).should.equal(true) - this.Fs.createWriteStream - .calledWith(`${this.location}/${this.name1Filtered}`) - .should.equal(true) - err.should.equal(this.error) - return done() - } - ) + it('should return an error if the file cannot be stored', async function() { + stream.pipeline.yields(error) + await expect( + FSPersistorManager.promises.sendFile(location, files[0], files[1]) + ).to.eventually.be.rejectedWith(error) }) }) describe('sendStream', function() { - beforeEach(function() { - this.FSPersistorManager.sendFile = sinon.stub().callsArgWith(3) - this.LocalFileWriter.writeStream.callsArgWith(2, null, this.name1) - this.LocalFileWriter.deleteFile.callsArg(1) - return (this.SourceStream = { on() {} }) - }) - - it('should sent stream to LocalFileWriter', function(done) { - return this.FSPersistorManager.sendStream( - this.location, - this.name1, - this.SourceStream, - () => { - this.LocalFileWriter.writeStream - .calledWith(this.SourceStream) - .should.equal(true) - return done() - } + it('should send the stream to LocalFileWriter', async function() { + await FSPersistorManager.promises.sendStream( + location, + files[0], + remoteStream + ) + expect(LocalFileWriter.promises.writeStream).to.have.been.calledWith( + remoteStream ) }) - it('should return the error from LocalFileWriter', function(done) { - this.LocalFileWriter.writeStream.callsArgWith(2, this.error) - return this.FSPersistorManager.sendStream( - this.location, - this.name1, - this.SourceStream, - err => { - err.should.equal(this.error) - return done() - } + it('should delete the temporary file', async function() { + await FSPersistorManager.promises.sendStream( + location, + files[0], + remoteStream + ) + expect(LocalFileWriter.promises.deleteFile).to.have.been.calledWith( + tempFile ) }) - return it('should send the file to the filestore', function(done) { - this.LocalFileWriter.writeStream.callsArgWith(2) - return this.FSPersistorManager.sendStream( - this.location, - this.name1, - this.SourceStream, - err => { - this.FSPersistorManager.sendFile.called.should.equal(true) - return done() - } + it('should return the error from LocalFileWriter', async function() { + LocalFileWriter.promises.writeStream.rejects(error) + await expect( + FSPersistorManager.promises.sendStream(location, files[0], remoteStream) + ).to.eventually.be.rejectedWith(error) + }) + + it('should send the temporary file to the filestore', async function() { + await FSPersistorManager.promises.sendStream( + location, + files[0], + remoteStream ) + expect(fs.createReadStream).to.have.been.calledWith(tempFile) }) }) describe('getFileStream', function() { - beforeEach(function() { - return (this.opts = {}) + const filename = 'wombat/potato' + const filteredFilename = 'wombat_potato' + + it('should use correct file location', async function() { + await FSPersistorManager.promises.getFileStream(location, filename, {}) + expect(fs.open).to.have.been.calledWith(`${location}/${filteredFilename}`) }) - it('should use correct file location', function(done) { - this.FSPersistorManager.getFileStream( - this.location, - this.name1, - this.opts, - (err, res) => {} + it('should pass the options to createReadStream', async function() { + await FSPersistorManager.promises.getFileStream(location, filename, { + start: 0, + end: 8 + }) + expect(fs.createReadStream).to.have.been.calledWith(null, { + start: 0, + end: 8, + fd + }) + }) + + it('should give a NotFoundError if the file does not exist', async function() { + const err = new Error() + err.code = 'ENOENT' + fs.open.yields(err) + + await expect( + FSPersistorManager.promises.getFileStream(location, filename, {}) ) - this.Fs.open - .calledWith(`${this.location}/${this.name1Filtered}`) - .should.equal(true) - return done() + .to.eventually.be.rejectedWith('file not found') + .and.be.an.instanceOf(Errors.NotFoundError) }) - describe('with start and end options', function() { - beforeEach(function() { - this.fd = 2019 - this.opts_in = { start: 0, end: 8 } - this.opts = { start: 0, end: 8, fd: this.fd } - return this.Fs.open.callsArgWith(2, null, this.fd) - }) - - return it('should pass the options to createReadStream', function(done) { - this.FSPersistorManager.getFileStream( - this.location, - this.name1, - this.opts_in, - (err, res) => {} - ) - this.Fs.createReadStream.calledWith(null, this.opts).should.equal(true) - return done() - }) - }) - - return describe('error conditions', function() { - describe('when the file does not exist', function() { - beforeEach(function() { - this.fakeCode = 'ENOENT' - const err = new Error() - err.code = this.fakeCode - return this.Fs.open.callsArgWith(2, err, null) - }) - - return it('should give a NotFoundError', function(done) { - return this.FSPersistorManager.getFileStream( - this.location, - this.name1, - this.opts, - (err, res) => { - expect(res).to.equal(null) - expect(err).to.not.equal(null) - expect(err instanceof this.Errors.NotFoundError).to.equal(true) - return done() - } - ) - }) - }) - - return describe('when some other error happens', function() { - beforeEach(function() { - this.fakeCode = 'SOMETHINGHORRIBLE' - const err = new Error() - err.code = this.fakeCode - return this.Fs.open.callsArgWith(2, err, null) - }) - - return it('should give an Error', function(done) { - return this.FSPersistorManager.getFileStream( - this.location, - this.name1, - this.opts, - (err, res) => { - expect(res).to.equal(null) - expect(err).to.not.equal(null) - expect(err instanceof Error).to.equal(true) - return done() - } - ) - }) - }) + it('should wrap any other error', async function() { + fs.open.yields(error) + await expect( + FSPersistorManager.promises.getFileStream(location, filename, {}) + ) + .to.eventually.be.rejectedWith('failed to open file for streaming') + .and.be.an.instanceOf(Errors.ReadError) + .and.have.property('cause', error) }) }) describe('getFileSize', function() { - it('should return the file size', function(done) { - const expectedFileSize = 75382 - this.Fs.stat.yields(new Error('fs.stat got unexpected arguments')) - this.Fs.stat - .withArgs(`${this.location}/${this.name1Filtered}`) - .yields(null, { size: expectedFileSize }) + const filename = 'wombat/potato' + const badFilename = 'neenaw.tex' + const filteredFilename = 'wombat_potato' + const size = 65536 + const noentError = new Error('not found') + noentError.code = 'ENOENT' - return this.FSPersistorManager.getFileSize( - this.location, - this.name1, - (err, fileSize) => { - if (err != null) { - return done(err) - } - expect(fileSize).to.equal(expectedFileSize) - return done() - } - ) + beforeEach(function() { + fs.stat + .yields(error) + .withArgs(`${location}/${filteredFilename}`) + .yields(null, { size }) + .withArgs(`${location}/${badFilename}`) + .yields(noentError) }) - it('should throw a NotFoundError if the file does not exist', function(done) { - const error = new Error() - error.code = 'ENOENT' - this.Fs.stat.yields(error) - - return this.FSPersistorManager.getFileSize( - this.location, - this.name1, - (err, fileSize) => { - expect(err).to.be.instanceof(this.Errors.NotFoundError) - return done() - } - ) + it('should return the file size', async function() { + expect( + await FSPersistorManager.promises.getFileSize(location, filename) + ).to.equal(size) }) - return it('should rethrow any other error', function(done) { - const error = new Error() - this.Fs.stat.yields(error) + it('should throw a NotFoundError if the file does not exist', async function() { + await expect( + FSPersistorManager.promises.getFileSize(location, badFilename) + ).to.eventually.be.rejected.and.be.an.instanceOf(Errors.NotFoundError) + }) - return this.FSPersistorManager.getFileSize( - this.location, - this.name1, - (err, fileSize) => { - expect(err).to.equal(error) - return done() - } - ) + it('should wrap any other error', async function() { + await expect(FSPersistorManager.promises.getFileSize(location, 'raccoon')) + .to.eventually.be.rejected.and.be.an.instanceOf(Errors.ReadError) + .and.have.property('cause', error) }) }) describe('copyFile', function() { - beforeEach(function() { - this.ReadStream = { - on() {}, - pipe: sinon.stub() - } - this.WriteStream = { on() {} } - this.Fs.createReadStream.returns(this.ReadStream) - return this.Fs.createWriteStream.returns(this.WriteStream) + it('Should open the source for reading', async function() { + await FSPersistorManager.promises.copyFile(location, files[0], files[1]) + expect(fs.createReadStream).to.have.been.calledWith( + `${location}/${files[0]}` + ) }) - it('Should open the source for reading', function(done) { - this.FSPersistorManager.copyFile( - this.location, - this.name1, - this.name2, - function() {} + it('Should open the target for writing', async function() { + await FSPersistorManager.promises.copyFile(location, files[0], files[1]) + expect(fs.createWriteStream).to.have.been.calledWith( + `${location}/${files[1]}` ) - this.Fs.createReadStream - .calledWith(`${this.location}/${this.name1Filtered}`) - .should.equal(true) - return done() }) - it('Should open the target for writing', function(done) { - this.FSPersistorManager.copyFile( - this.location, - this.name1, - this.name2, - function() {} - ) - this.Fs.createWriteStream - .calledWith(`${this.location}/${this.name2}`) - .should.equal(true) - return done() - }) - - return it('Should pipe the source to the target', function(done) { - this.FSPersistorManager.copyFile( - this.location, - this.name1, - this.name2, - function() {} - ) - this.ReadStream.pipe.calledWith(this.WriteStream).should.equal(true) - return done() + it('Should pipe the source to the target', async function() { + await FSPersistorManager.promises.copyFile(location, files[0], files[1]) + expect(stream.pipeline).to.have.been.calledWith(readStream, writeStream) }) }) describe('deleteFile', function() { - beforeEach(function() { - return this.Fs.unlink.callsArgWith(1, this.error) + it('Should call unlink with correct options', async function() { + await FSPersistorManager.promises.deleteFile(location, files[0]) + expect(fs.unlink).to.have.been.calledWith(`${location}/${files[0]}`) }) - it('Should call unlink with correct options', function(done) { - return this.FSPersistorManager.deleteFile( - this.location, - this.name1, - err => { - this.Fs.unlink - .calledWith(`${this.location}/${this.name1Filtered}`) - .should.equal(true) - return done() - } - ) - }) - - return it('Should propogate the error', function(done) { - return this.FSPersistorManager.deleteFile( - this.location, - this.name1, - err => { - err.should.equal(this.error) - return done() - } - ) + it('Should propagate the error', async function() { + fs.unlink.yields(error) + await expect( + FSPersistorManager.promises.deleteFile(location, files[0]) + ).to.eventually.be.rejectedWith(error) }) }) describe('deleteDirectory', function() { - beforeEach(function() { - return this.Rimraf.callsArgWith(1, this.error) + it('Should call rmdir(rimraf) with correct options', async function() { + await FSPersistorManager.promises.deleteDirectory(location, files[0]) + expect(rimraf).to.have.been.calledWith(`${location}/${files[0]}`) }) - it('Should call rmdir(rimraf) with correct options', function(done) { - return this.FSPersistorManager.deleteDirectory( - this.location, - this.name1, - err => { - this.Rimraf.calledWith( - `${this.location}/${this.name1Filtered}` - ).should.equal(true) - return done() - } - ) - }) - - return it('Should propogate the error', function(done) { - return this.FSPersistorManager.deleteDirectory( - this.location, - this.name1, - err => { - err.should.equal(this.error) - return done() - } - ) + it('Should propagate the error', async function() { + rimraf.yields(error) + await expect( + FSPersistorManager.promises.deleteDirectory(location, files[0]) + ).to.eventually.be.rejectedWith(error) }) }) describe('checkIfFileExists', function() { + const filename = 'wombat' + const badFilename = 'potato' + const noentError = new Error('not found') + noentError.code = 'ENOENT' + beforeEach(function() { - return this.Fs.exists.callsArgWith(1, true) + fs.stat + .yields(error) + .withArgs(`${location}/${filename}`) + .yields(null, {}) + .withArgs(`${location}/${badFilename}`) + .yields(noentError) }) - it('Should call exists with correct options', function(done) { - return this.FSPersistorManager.checkIfFileExists( - this.location, - this.name1, - exists => { - this.Fs.exists - .calledWith(`${this.location}/${this.name1Filtered}`) - .should.equal(true) - return done() - } - ) + it('Should call stat with correct options', async function() { + await FSPersistorManager.promises.checkIfFileExists(location, filename) + expect(fs.stat).to.have.been.calledWith(`${location}/${filename}`) }) - // fs.exists simply returns false on any error, so... - it('should not return an error', function(done) { - return this.FSPersistorManager.checkIfFileExists( - this.location, - this.name1, - (err, exists) => { - expect(err).to.be.null - return done() - } - ) + it('Should return true for existing files', async function() { + expect( + await FSPersistorManager.promises.checkIfFileExists(location, filename) + ).to.equal(true) }) - it('Should return true for existing files', function(done) { - this.Fs.exists.callsArgWith(1, true) - return this.FSPersistorManager.checkIfFileExists( - this.location, - this.name1, - (err, exists) => { - exists.should.be.true - return done() - } - ) + it('Should return false for non-existing files', async function() { + expect( + await FSPersistorManager.promises.checkIfFileExists( + location, + badFilename + ) + ).to.equal(false) }) - return it('Should return false for non-existing files', function(done) { - this.Fs.exists.callsArgWith(1, false) - return this.FSPersistorManager.checkIfFileExists( - this.location, - this.name1, - (err, exists) => { - exists.should.be.false - return done() - } + it('should wrap the error if there is a problem', async function() { + await expect( + FSPersistorManager.promises.checkIfFileExists(location, 'llama') ) + .to.eventually.be.rejected.and.be.an.instanceOf(Errors.ReadError) + .and.have.property('cause', error) }) }) - return describe('directorySize', function() { - it('should propogate the error', function(done) { - this.Fs.readdir.callsArgWith(1, this.error) - return this.FSPersistorManager.directorySize( - this.location, - this.name1, - (err, totalsize) => { - err.should.equal(this.error) - return done() - } + describe('directorySize', function() { + it('should wrap the error', async function() { + fs.readdir.yields(error) + await expect( + FSPersistorManager.promises.directorySize(location, 'wombat') ) + .to.eventually.be.rejected.and.be.an.instanceOf(Errors.ReadError) + .and.include({ cause: error }) + .and.have.property('info') + .which.includes({ location, name: 'wombat' }) }) - return it('should sum directory files size', function(done) { - this.Fs.readdir.callsArgWith(1, null, [ - { file1: 'file1' }, - { file2: 'file2' } - ]) - this.Fs.fstatSync.returns({ size: 1024 }) - return this.FSPersistorManager.directorySize( - this.location, - this.name1, - (err, totalsize) => { - expect(totalsize).to.equal(2048) - return done() - } - ) + it('should sum directory files size', async function() { + expect( + await FSPersistorManager.promises.directorySize(location, 'wombat') + ).to.equal(stat.size * files.length) }) }) }) From 21ba083877d6f5aa961161b2a69f458c844eca72 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 6 Jan 2020 14:09:28 +0000 Subject: [PATCH 367/555] Use 'glob' to determine 'directory' size. Check for filtered directory paths in all tests. --- .../filestore/app/js/FSPersistorManager.js | 20 +++- services/filestore/npm-shrinkwrap.json | 29 ++++-- services/filestore/package.json | 1 + .../test/unit/js/FSPersistorManagerTests.js | 96 +++++++++++-------- 4 files changed, 96 insertions(+), 50 deletions(-) diff --git a/services/filestore/app/js/FSPersistorManager.js b/services/filestore/app/js/FSPersistorManager.js index ea793cfc64..8c3ad9ae02 100644 --- a/services/filestore/app/js/FSPersistorManager.js +++ b/services/filestore/app/js/FSPersistorManager.js @@ -1,4 +1,5 @@ const fs = require('fs') +const glob = require('glob') const logger = require('logger-sharelatex') const path = require('path') const rimraf = require('rimraf') @@ -12,7 +13,7 @@ const pipeline = promisify(Stream.pipeline) const fsUnlink = promisify(fs.unlink) const fsOpen = promisify(fs.open) const fsStat = promisify(fs.stat) -const fsReaddir = promisify(fs.readdir) +const fsGlob = promisify(glob) const rmrf = promisify(rimraf) const filterName = key => key.replace(/\//g, '_') @@ -124,16 +125,25 @@ async function checkIfFileExists(location, name) { } } -// note, does not recurse into subdirectories +// note, does not recurse into subdirectories, as we use a flattened directory structure async function directorySize(location, name) { const filteredName = filterName(name.replace(/\/$/, '')) let size = 0 try { - const files = await fsReaddir(`${location}/${filteredName}`) + const files = await fsGlob(`${location}/${filteredName}_*`) for (const file of files) { - const stat = await fsStat(`${location}/${filteredName}/${file}`) - size += stat.size + try { + const stat = await fsStat(file) + if (stat.isFile()) { + size += stat.size + } + } catch (err) { + // ignore files that may have just been deleted + if (err.code !== 'ENOENT') { + throw err + } + } } } catch (err) { throw new ReadError({ diff --git a/services/filestore/npm-shrinkwrap.json b/services/filestore/npm-shrinkwrap.json index fa498f3f1b..3ed1400a61 100644 --- a/services/filestore/npm-shrinkwrap.json +++ b/services/filestore/npm-shrinkwrap.json @@ -473,7 +473,7 @@ "@sinonjs/text-encoding": { "version": "0.7.1", "resolved": "https://registry.npmjs.org/@sinonjs/text-encoding/-/text-encoding-0.7.1.tgz", - "integrity": "sha1-jaXGUwkVZT86Hzj9XxAdjD+AecU=", + "integrity": "sha512-+iTbntw2IZPb/anVDbypzfQa+ay64MW0Zo8aJ8gZPWMMK6/OubMVb6lUPMagqjOPnmtauXnFCACVl3O7ogjeqQ==", "dev": true }, "@types/caseless": { @@ -2189,14 +2189,14 @@ "integrity": "sha1-uKLHAUu1zUFTTpg7XKFgo3RwhGk=" }, "glob": { - "version": "6.0.4", - "resolved": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz", - "integrity": "sha1-DwiGD2oVUSey+t1PnOJLGqtuTSI=", - "optional": true, + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", + "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", "requires": { + "fs.realpath": "^1.0.0", "inflight": "^1.0.4", "inherits": "2", - "minimatch": "2 || 3", + "minimatch": "^3.0.4", "once": "^1.3.0", "path-is-absolute": "^1.0.0" } @@ -2671,7 +2671,7 @@ "just-extend": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/just-extend/-/just-extend-4.0.2.tgz", - "integrity": "sha1-8/R/ffyg+YnFVBCn68iFSwcQivw=", + "integrity": "sha512-FrLwOgm+iXrPV+5zDU6Jqu4gCRXbWEQg2O3SKONsWE4w7AXFRkryS53bpWdaL9cNol+AmR3AEYz6kn+o0fCPnw==", "dev": true }, "jwa": { @@ -3259,6 +3259,21 @@ "optional": true, "requires": { "glob": "^6.0.1" + }, + "dependencies": { + "glob": { + "version": "6.0.4", + "resolved": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz", + "integrity": "sha1-DwiGD2oVUSey+t1PnOJLGqtuTSI=", + "optional": true, + "requires": { + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "2 || 3", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + } } } } diff --git a/services/filestore/package.json b/services/filestore/package.json index 9515c1850c..5d7c3e3ec1 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -26,6 +26,7 @@ "body-parser": "^1.2.0", "express": "^4.2.0", "fs-extra": "^1.0.0", + "glob": "^7.1.6", "heapdump": "^0.3.2", "knox": "~0.9.1", "logger-sharelatex": "^1.7.0", diff --git a/services/filestore/test/unit/js/FSPersistorManagerTests.js b/services/filestore/test/unit/js/FSPersistorManagerTests.js index d399a87cee..cb177989a5 100644 --- a/services/filestore/test/unit/js/FSPersistorManagerTests.js +++ b/services/filestore/test/unit/js/FSPersistorManagerTests.js @@ -10,7 +10,7 @@ chai.use(require('chai-as-promised')) const modulePath = '../../../app/js/FSPersistorManager.js' describe('FSPersistorManagerTests', function() { - const stat = { size: 4 } + const stat = { size: 4, isFile: sinon.stub().returns(true) } const fd = 1234 const readStream = 'readStream' const writeStream = 'writeStream' @@ -19,8 +19,9 @@ describe('FSPersistorManagerTests', function() { const location = '/foo' const error = new Error('guru meditation error') - const files = ['wombat.txt', 'potato.tex'] - let fs, rimraf, stream, LocalFileWriter, FSPersistorManager + const files = ['animals/wombat.tex', 'vegetables/potato.tex'] + const filteredFilenames = ['animals_wombat.tex', 'vegetables_potato.tex'] + let fs, rimraf, stream, LocalFileWriter, FSPersistorManager, glob beforeEach(function() { fs = { @@ -28,9 +29,9 @@ describe('FSPersistorManagerTests', function() { createWriteStream: sinon.stub().returns(writeStream), unlink: sinon.stub().yields(), open: sinon.stub().yields(null, fd), - readdir: sinon.stub().yields(null, files), stat: sinon.stub().yields(null, stat) } + glob = sinon.stub().yields(null, files) rimraf = sinon.stub().yields() stream = { pipeline: sinon.stub().yields() } LocalFileWriter = { @@ -42,25 +43,31 @@ describe('FSPersistorManagerTests', function() { FSPersistorManager = SandboxedModule.require(modulePath, { requires: { './LocalFileWriter': LocalFileWriter, - fs: fs, 'logger-sharelatex': { log() {}, err() {} }, - rimraf: rimraf, - stream: stream, - './Errors': Errors + './Errors': Errors, + fs, + glob, + rimraf, + stream }, globals: { console } }) }) describe('sendFile', function() { + const localFilesystemPath = '/path/to/local/file' it('should copy the file', async function() { - await FSPersistorManager.promises.sendFile(location, files[0], files[1]) - expect(fs.createReadStream).to.have.been.calledWith(files[1]) + await FSPersistorManager.promises.sendFile( + location, + files[0], + localFilesystemPath + ) + expect(fs.createReadStream).to.have.been.calledWith(localFilesystemPath) expect(fs.createWriteStream).to.have.been.calledWith( - `${location}/${files[0]}` + `${location}/${filteredFilenames[0]}` ) expect(stream.pipeline).to.have.been.calledWith(readStream, writeStream) }) @@ -68,7 +75,11 @@ describe('FSPersistorManagerTests', function() { it('should return an error if the file cannot be stored', async function() { stream.pipeline.yields(error) await expect( - FSPersistorManager.promises.sendFile(location, files[0], files[1]) + FSPersistorManager.promises.sendFile( + location, + files[0], + localFilesystemPath + ) ).to.eventually.be.rejectedWith(error) }) }) @@ -114,16 +125,15 @@ describe('FSPersistorManagerTests', function() { }) describe('getFileStream', function() { - const filename = 'wombat/potato' - const filteredFilename = 'wombat_potato' - it('should use correct file location', async function() { - await FSPersistorManager.promises.getFileStream(location, filename, {}) - expect(fs.open).to.have.been.calledWith(`${location}/${filteredFilename}`) + await FSPersistorManager.promises.getFileStream(location, files[0], {}) + expect(fs.open).to.have.been.calledWith( + `${location}/${filteredFilenames[0]}` + ) }) it('should pass the options to createReadStream', async function() { - await FSPersistorManager.promises.getFileStream(location, filename, { + await FSPersistorManager.promises.getFileStream(location, files[0], { start: 0, end: 8 }) @@ -140,7 +150,7 @@ describe('FSPersistorManagerTests', function() { fs.open.yields(err) await expect( - FSPersistorManager.promises.getFileStream(location, filename, {}) + FSPersistorManager.promises.getFileStream(location, files[0], {}) ) .to.eventually.be.rejectedWith('file not found') .and.be.an.instanceOf(Errors.NotFoundError) @@ -149,7 +159,7 @@ describe('FSPersistorManagerTests', function() { it('should wrap any other error', async function() { fs.open.yields(error) await expect( - FSPersistorManager.promises.getFileStream(location, filename, {}) + FSPersistorManager.promises.getFileStream(location, files[0], {}) ) .to.eventually.be.rejectedWith('failed to open file for streaming') .and.be.an.instanceOf(Errors.ReadError) @@ -158,9 +168,7 @@ describe('FSPersistorManagerTests', function() { }) describe('getFileSize', function() { - const filename = 'wombat/potato' const badFilename = 'neenaw.tex' - const filteredFilename = 'wombat_potato' const size = 65536 const noentError = new Error('not found') noentError.code = 'ENOENT' @@ -168,7 +176,7 @@ describe('FSPersistorManagerTests', function() { beforeEach(function() { fs.stat .yields(error) - .withArgs(`${location}/${filteredFilename}`) + .withArgs(`${location}/${filteredFilenames[0]}`) .yields(null, { size }) .withArgs(`${location}/${badFilename}`) .yields(noentError) @@ -176,7 +184,7 @@ describe('FSPersistorManagerTests', function() { it('should return the file size', async function() { expect( - await FSPersistorManager.promises.getFileSize(location, filename) + await FSPersistorManager.promises.getFileSize(location, files[0]) ).to.equal(size) }) @@ -197,14 +205,14 @@ describe('FSPersistorManagerTests', function() { it('Should open the source for reading', async function() { await FSPersistorManager.promises.copyFile(location, files[0], files[1]) expect(fs.createReadStream).to.have.been.calledWith( - `${location}/${files[0]}` + `${location}/${filteredFilenames[0]}` ) }) it('Should open the target for writing', async function() { await FSPersistorManager.promises.copyFile(location, files[0], files[1]) expect(fs.createWriteStream).to.have.been.calledWith( - `${location}/${files[1]}` + `${location}/${filteredFilenames[1]}` ) }) @@ -217,7 +225,9 @@ describe('FSPersistorManagerTests', function() { describe('deleteFile', function() { it('Should call unlink with correct options', async function() { await FSPersistorManager.promises.deleteFile(location, files[0]) - expect(fs.unlink).to.have.been.calledWith(`${location}/${files[0]}`) + expect(fs.unlink).to.have.been.calledWith( + `${location}/${filteredFilenames[0]}` + ) }) it('Should propagate the error', async function() { @@ -231,7 +241,9 @@ describe('FSPersistorManagerTests', function() { describe('deleteDirectory', function() { it('Should call rmdir(rimraf) with correct options', async function() { await FSPersistorManager.promises.deleteDirectory(location, files[0]) - expect(rimraf).to.have.been.calledWith(`${location}/${files[0]}`) + expect(rimraf).to.have.been.calledWith( + `${location}/${filteredFilenames[0]}` + ) }) it('Should propagate the error', async function() { @@ -243,28 +255,29 @@ describe('FSPersistorManagerTests', function() { }) describe('checkIfFileExists', function() { - const filename = 'wombat' - const badFilename = 'potato' + const badFilename = 'pototo' const noentError = new Error('not found') noentError.code = 'ENOENT' beforeEach(function() { fs.stat .yields(error) - .withArgs(`${location}/${filename}`) + .withArgs(`${location}/${filteredFilenames[0]}`) .yields(null, {}) .withArgs(`${location}/${badFilename}`) .yields(noentError) }) it('Should call stat with correct options', async function() { - await FSPersistorManager.promises.checkIfFileExists(location, filename) - expect(fs.stat).to.have.been.calledWith(`${location}/${filename}`) + await FSPersistorManager.promises.checkIfFileExists(location, files[0]) + expect(fs.stat).to.have.been.calledWith( + `${location}/${filteredFilenames[0]}` + ) }) it('Should return true for existing files', async function() { expect( - await FSPersistorManager.promises.checkIfFileExists(location, filename) + await FSPersistorManager.promises.checkIfFileExists(location, files[0]) ).to.equal(true) }) @@ -288,19 +301,26 @@ describe('FSPersistorManagerTests', function() { describe('directorySize', function() { it('should wrap the error', async function() { - fs.readdir.yields(error) + glob.yields(error) await expect( - FSPersistorManager.promises.directorySize(location, 'wombat') + FSPersistorManager.promises.directorySize(location, files[0]) ) .to.eventually.be.rejected.and.be.an.instanceOf(Errors.ReadError) .and.include({ cause: error }) .and.have.property('info') - .which.includes({ location, name: 'wombat' }) + .which.includes({ location, name: files[0] }) + }) + + it('should filter the directory name', async function() { + await FSPersistorManager.promises.directorySize(location, files[0]) + expect(glob).to.have.been.calledWith( + `${location}/${filteredFilenames[0]}_*` + ) }) it('should sum directory files size', async function() { expect( - await FSPersistorManager.promises.directorySize(location, 'wombat') + await FSPersistorManager.promises.directorySize(location, files[0]) ).to.equal(stat.size * files.length) }) }) From 039bec02f729f08c0dea3eabd5da9c24d0b772ca Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 6 Jan 2020 14:14:22 +0000 Subject: [PATCH 368/555] Use directory names in acceptance tests, and add test for getting size --- .../test/acceptance/js/FilestoreTests.js | 75 +++++++++++++++++-- 1 file changed, 70 insertions(+), 5 deletions(-) diff --git a/services/filestore/test/acceptance/js/FilestoreTests.js b/services/filestore/test/acceptance/js/FilestoreTests.js index 3315569a24..9260b1bd62 100644 --- a/services/filestore/test/acceptance/js/FilestoreTests.js +++ b/services/filestore/test/acceptance/js/FilestoreTests.js @@ -56,6 +56,7 @@ if (process.env.AWS_ACCESS_KEY_ID) { describe('Filestore', function() { this.timeout(1000 * 10) const filestoreUrl = `http://localhost:${Settings.internal.filestore.port}` + const directoryName = 'directory' // redefine the test suite for every available backend Object.keys(BackendSettings).forEach(backend => { @@ -113,11 +114,11 @@ describe('Filestore', function() { beforeEach(async function() { fileId = Math.random() - fileUrl = `${filestoreUrl}/project/acceptance_tests/file/${fileId}` + fileUrl = `${filestoreUrl}/project/acceptance_tests/file/${directoryName}%2F${fileId}` const writeStream = request.post(fileUrl) const readStream = fs.createReadStream(localFileReadPath) - // consume the result to ensure the http request has been fully processed + // hack to consume the result to ensure the http request has been fully processed const resultStream = fs.createWriteStream('/dev/null') await pipeline(readStream, writeStream, resultStream) }) @@ -176,14 +177,14 @@ describe('Filestore', function() { it('should be able to copy files', async function() { const newProjectID = 'acceptance_tests_copyied_project' const newFileId = Math.random() - const newFileUrl = `${filestoreUrl}/project/${newProjectID}/file/${newFileId}` + const newFileUrl = `${filestoreUrl}/project/${newProjectID}/file/${directoryName}%2F${newFileId}` const opts = { method: 'put', uri: newFileUrl, json: { source: { project_id: 'acceptance_tests', - file_id: fileId + file_id: `${directoryName}/${fileId}` } } } @@ -223,6 +224,70 @@ describe('Filestore', function() { } }) + describe('with multiple files', function() { + let fileIds, fileUrls, project + const directoryName = 'directory' + const localFileReadPaths = [ + '/tmp/filestore_acceptance_tests_file_read_1.txt', + '/tmp/filestore_acceptance_tests_file_read_2.txt' + ] + const constantFileContents = [ + [ + 'hello world', + `line 2 goes here ${Math.random()}`, + 'there are 3 lines in all' + ].join('\n'), + [ + `for reference: ${Math.random()}`, + 'cats are the best animals', + 'wombats are a close second' + ].join('\n') + ] + + before(async function() { + return Promise.all([ + fsWriteFile(localFileReadPaths[0], constantFileContents[0]), + fsWriteFile(localFileReadPaths[1], constantFileContents[1]) + ]) + }) + + beforeEach(async function() { + project = `acceptance_tests_${Math.random()}` + fileIds = [Math.random(), Math.random()] + fileUrls = [ + `${filestoreUrl}/project/${project}/file/${directoryName}%2F${fileIds[0]}`, + `${filestoreUrl}/project/${project}/file/${directoryName}%2F${fileIds[1]}` + ] + + const writeStreams = [ + request.post(fileUrls[0]), + request.post(fileUrls[1]) + ] + const readStreams = [ + fs.createReadStream(localFileReadPaths[0]), + fs.createReadStream(localFileReadPaths[1]) + ] + // hack to consume the result to ensure the http request has been fully processed + const resultStreams = [ + fs.createWriteStream('/dev/null'), + fs.createWriteStream('/dev/null') + ] + return Promise.all([ + pipeline(readStreams[0], writeStreams[0], resultStreams[0]), + pipeline(readStreams[1], writeStreams[1], resultStreams[1]) + ]) + }) + + it('should get the directory size', async function() { + const response = await rp.get( + `${filestoreUrl}/project/${project}/size` + ) + expect(parseInt(JSON.parse(response.body)['total bytes'])).to.equal( + constantFileContents[0].length + constantFileContents[1].length + ) + }) + }) + describe('with a pdf file', function() { let fileId, fileUrl, localFileSize const localFileReadPath = Path.resolve( @@ -232,7 +297,7 @@ describe('Filestore', function() { beforeEach(async function() { fileId = Math.random() - fileUrl = `${filestoreUrl}/project/acceptance_tests/file/${fileId}` + fileUrl = `${filestoreUrl}/project/acceptance_tests/file/${directoryName}%2F${fileId}` const stat = await fsStat(localFileReadPath) localFileSize = stat.size const writeStream = request.post(fileUrl) From 25f1c2bfc407a2ed156082b3cfde4186cafbe75c Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 6 Jan 2020 14:43:23 +0000 Subject: [PATCH 369/555] Delete temporary file when error in writing to stream --- services/filestore/app/js/LocalFileWriter.js | 13 ++++- .../test/unit/js/LocalFileWriterTests.js | 48 +++++++++++++++---- 2 files changed, 52 insertions(+), 9 deletions(-) diff --git a/services/filestore/app/js/LocalFileWriter.js b/services/filestore/app/js/LocalFileWriter.js index 44f3f9433a..22957e15d1 100644 --- a/services/filestore/app/js/LocalFileWriter.js +++ b/services/filestore/app/js/LocalFileWriter.js @@ -32,6 +32,8 @@ async function writeStream(stream, key) { logger.log({ fsPath }, 'finished writing file locally') return fsPath } catch (err) { + await deleteFile(fsPath) + logger.err({ err, fsPath }, 'problem writing file locally') throw new WriteError({ message: 'problem writing file locally', @@ -45,7 +47,16 @@ async function deleteFile(fsPath) { return } logger.log({ fsPath }, 'removing local temp file') - await promisify(fs.unlink)(fsPath) + try { + await promisify(fs.unlink)(fsPath) + } catch (err) { + if (err.code !== 'ENOENT') { + throw new WriteError({ + message: 'failed to delete file', + info: { fsPath } + }).withCause(err) + } + } } function _getPath(key) { diff --git a/services/filestore/test/unit/js/LocalFileWriterTests.js b/services/filestore/test/unit/js/LocalFileWriterTests.js index 5d7008a91f..ad4d73bce6 100644 --- a/services/filestore/test/unit/js/LocalFileWriterTests.js +++ b/services/filestore/test/unit/js/LocalFileWriterTests.js @@ -49,6 +49,26 @@ describe('LocalFileWriter', function() { done() }) }) + + describe('when there is an error', function() { + const error = new Error('not enough ketchup') + beforeEach(function() { + stream.pipeline.yields(error) + }) + + it('should wrap the error', function() { + LocalFileWriter.writeStream(readStream, filename, err => { + expect(err).to.exist + expect(err.cause).to.equal(error) + }) + }) + + it('should delete the temporary file', function() { + LocalFileWriter.writeStream(readStream, filename, () => { + expect(fs.unlink).to.have.been.calledWith(fsPath) + }) + }) + }) }) describe('deleteFile', function() { @@ -60,14 +80,6 @@ describe('LocalFileWriter', function() { }) }) - it('should not do anything if called with an empty path', function(done) { - fs.unlink = sinon.stub().yields(new Error('failed to reticulate splines')) - LocalFileWriter.deleteFile(fsPath, err => { - expect(err).to.exist - done() - }) - }) - it('should not call unlink with an empty path', function(done) { LocalFileWriter.deleteFile('', err => { expect(err).not.to.exist @@ -75,5 +87,25 @@ describe('LocalFileWriter', function() { done() }) }) + + it('should not throw a error if the file does not exist', function(done) { + const error = new Error('file not found') + error.code = 'ENOENT' + fs.unlink = sinon.stub().yields(error) + LocalFileWriter.deleteFile(fsPath, err => { + expect(err).not.to.exist + done() + }) + }) + + it('should wrap the error', function(done) { + const error = new Error('failed to reticulate splines') + fs.unlink = sinon.stub().yields(error) + LocalFileWriter.deleteFile(fsPath, err => { + expect(err).to.exist + expect(err.cause).to.equal(error) + done() + }) + }) }) }) From f1b6b35c691ff0c418c23203ef6821df01dedad1 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 6 Jan 2020 15:11:35 +0000 Subject: [PATCH 370/555] Throw errors more consistently in FSPersistorManager --- .../filestore/app/js/FSPersistorManager.js | 121 ++++++++++++------ .../test/unit/js/FSPersistorManagerTests.js | 10 +- 2 files changed, 90 insertions(+), 41 deletions(-) diff --git a/services/filestore/app/js/FSPersistorManager.js b/services/filestore/app/js/FSPersistorManager.js index 8c3ad9ae02..c649dfb61b 100644 --- a/services/filestore/app/js/FSPersistorManager.js +++ b/services/filestore/app/js/FSPersistorManager.js @@ -7,7 +7,7 @@ const Stream = require('stream') const { promisify, callbackify } = require('util') const LocalFileWriter = require('./LocalFileWriter').promises -const { NotFoundError, ReadError } = require('./Errors') +const { NotFoundError, ReadError, WriteError } = require('./Errors') const pipeline = promisify(Stream.pipeline) const fsUnlink = promisify(fs.unlink) @@ -24,17 +24,26 @@ async function sendFile(location, target, source) { // actually copy the file (instead of moving it) to maintain consistent behaviour // between the different implementations - const sourceStream = fs.createReadStream(source) - const targetStream = fs.createWriteStream(`${location}/${filteredTarget}`) - await pipeline(sourceStream, targetStream) + try { + const sourceStream = fs.createReadStream(source) + const targetStream = fs.createWriteStream(`${location}/${filteredTarget}`) + await pipeline(sourceStream, targetStream) + } catch (err) { + throw _wrapError( + err, + 'failed to copy the specified file', + { location, target, source }, + WriteError + ) + } } async function sendStream(location, target, sourceStream) { logger.log({ location, target }, 'sending file stream') - let fsPath + const fsPath = await LocalFileWriter.writeStream(sourceStream) + try { - fsPath = await LocalFileWriter.writeStream(sourceStream) await sendFile(location, target, fsPath) } finally { await LocalFileWriter.deleteFile(fsPath) @@ -51,16 +60,12 @@ async function getFileStream(location, name, opts) { } catch (err) { logger.err({ err, location, filteredName: name }, 'Error reading from file') - if (err.code === 'ENOENT') { - throw new NotFoundError({ - message: 'file not found', - info: { - location, - filteredName - } - }).withCause(err) - } - throw new ReadError('failed to open file for streaming').withCause(err) + throw _wrapError( + err, + 'failed to open file for streaming', + { location, filteredName, opts }, + ReadError + ) } return fs.createReadStream(null, opts) @@ -75,16 +80,12 @@ async function getFileSize(location, filename) { } catch (err) { logger.err({ err, location, filename }, 'failed to stat file') - if (err.code === 'ENOENT') { - throw new NotFoundError({ - message: 'file not found', - info: { - location, - fullPath - } - }).withCause(err) - } - throw new ReadError('failed to stat file').withCause(err) + throw _wrapError( + err, + 'failed to stat file', + { location, filename }, + ReadError + ) } } @@ -93,15 +94,33 @@ async function copyFile(location, fromName, toName) { const filteredToName = filterName(toName) logger.log({ location, filteredFromName, filteredToName }, 'copying file') - const sourceStream = fs.createReadStream(`${location}/${filteredFromName}`) - const targetStream = fs.createWriteStream(`${location}/${filteredToName}`) - await pipeline(sourceStream, targetStream) + try { + const sourceStream = fs.createReadStream(`${location}/${filteredFromName}`) + const targetStream = fs.createWriteStream(`${location}/${filteredToName}`) + await pipeline(sourceStream, targetStream) + } catch (err) { + throw _wrapError( + err, + 'failed to copy file', + { location, filteredFromName, filteredToName }, + WriteError + ) + } } async function deleteFile(location, name) { const filteredName = filterName(name) logger.log({ location, filteredName }, 'delete file') - await fsUnlink(`${location}/${filteredName}`) + try { + await fsUnlink(`${location}/${filteredName}`) + } catch (err) { + throw _wrapError( + err, + 'failed to delete file', + { location, filteredName }, + WriteError + ) + } } async function deleteDirectory(location, name) { @@ -109,7 +128,16 @@ async function deleteDirectory(location, name) { logger.log({ location, filteredName }, 'deleting directory') - await rmrf(`${location}/${filteredName}`) + try { + await rmrf(`${location}/${filteredName}`) + } catch (err) { + throw _wrapError( + err, + 'failed to delete directory', + { location, filteredName }, + WriteError + ) + } } async function checkIfFileExists(location, name) { @@ -121,7 +149,12 @@ async function checkIfFileExists(location, name) { if (err.code === 'ENOENT') { return false } - throw new ReadError('failed to stat file').withCause(err) + throw _wrapError( + err, + 'failed to stat file', + { location, filteredName }, + ReadError + ) } } @@ -146,15 +179,31 @@ async function directorySize(location, name) { } } } catch (err) { - throw new ReadError({ - message: 'failed to get directory size', - info: { location, name } - }).withCause(err) + throw _wrapError( + err, + 'failed to get directory size', + { location, name }, + ReadError + ) } return size } +function _wrapError(error, message, params, ErrorType) { + if (error.code === 'ENOENT') { + return new NotFoundError({ + message: 'no such file or directory', + info: params + }).withCause(error) + } else { + return new ErrorType({ + message: message, + info: params + }).withCause(error) + } +} + module.exports = { sendFile: callbackify(sendFile), sendStream: callbackify(sendStream), diff --git a/services/filestore/test/unit/js/FSPersistorManagerTests.js b/services/filestore/test/unit/js/FSPersistorManagerTests.js index cb177989a5..d0bd6b078e 100644 --- a/services/filestore/test/unit/js/FSPersistorManagerTests.js +++ b/services/filestore/test/unit/js/FSPersistorManagerTests.js @@ -80,7 +80,7 @@ describe('FSPersistorManagerTests', function() { files[0], localFilesystemPath ) - ).to.eventually.be.rejectedWith(error) + ).to.eventually.be.rejected.and.have.property('cause', error) }) }) @@ -152,8 +152,8 @@ describe('FSPersistorManagerTests', function() { await expect( FSPersistorManager.promises.getFileStream(location, files[0], {}) ) - .to.eventually.be.rejectedWith('file not found') - .and.be.an.instanceOf(Errors.NotFoundError) + .to.eventually.be.rejected.and.be.an.instanceOf(Errors.NotFoundError) + .and.have.property('cause', err) }) it('should wrap any other error', async function() { @@ -234,7 +234,7 @@ describe('FSPersistorManagerTests', function() { fs.unlink.yields(error) await expect( FSPersistorManager.promises.deleteFile(location, files[0]) - ).to.eventually.be.rejectedWith(error) + ).to.eventually.be.rejected.and.have.property('cause', error) }) }) @@ -250,7 +250,7 @@ describe('FSPersistorManagerTests', function() { rimraf.yields(error) await expect( FSPersistorManager.promises.deleteDirectory(location, files[0]) - ).to.eventually.be.rejectedWith(error) + ).to.eventually.be.rejected.and.have.property('cause', error) }) }) From fccb28898554a98c90e3e5b1cc062761b38007df Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 6 Jan 2020 15:13:50 +0000 Subject: [PATCH 371/555] Add comment regarding deleteDirectory being internal-only --- services/filestore/app/js/FSPersistorManager.js | 1 + 1 file changed, 1 insertion(+) diff --git a/services/filestore/app/js/FSPersistorManager.js b/services/filestore/app/js/FSPersistorManager.js index c649dfb61b..1a9d2b824d 100644 --- a/services/filestore/app/js/FSPersistorManager.js +++ b/services/filestore/app/js/FSPersistorManager.js @@ -123,6 +123,7 @@ async function deleteFile(location, name) { } } +// this is only called internally for clean-up by `FileHandler` and isn't part of the external API async function deleteDirectory(location, name) { const filteredName = filterName(name.replace(/\/$/, '')) From 37d44d5d8655fd83179b60193bf5742e36b304a5 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 6 Jan 2020 15:58:57 +0000 Subject: [PATCH 372/555] Return full path in stubbed 'glob' method --- services/filestore/test/unit/js/FSPersistorManagerTests.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/services/filestore/test/unit/js/FSPersistorManagerTests.js b/services/filestore/test/unit/js/FSPersistorManagerTests.js index d0bd6b078e..6847f5b8ef 100644 --- a/services/filestore/test/unit/js/FSPersistorManagerTests.js +++ b/services/filestore/test/unit/js/FSPersistorManagerTests.js @@ -20,6 +20,7 @@ describe('FSPersistorManagerTests', function() { const error = new Error('guru meditation error') const files = ['animals/wombat.tex', 'vegetables/potato.tex'] + const globs = [`${location}/${files[0]}`, `${location}/${files[1]}`] const filteredFilenames = ['animals_wombat.tex', 'vegetables_potato.tex'] let fs, rimraf, stream, LocalFileWriter, FSPersistorManager, glob @@ -31,7 +32,7 @@ describe('FSPersistorManagerTests', function() { open: sinon.stub().yields(null, fd), stat: sinon.stub().yields(null, stat) } - glob = sinon.stub().yields(null, files) + glob = sinon.stub().yields(null, globs) rimraf = sinon.stub().yields() stream = { pipeline: sinon.stub().yields() } LocalFileWriter = { From ec80052d9a70935da8b75983f5a89a9e1426ec57 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 2 Jan 2020 15:51:09 +0000 Subject: [PATCH 373/555] Decaf cleanup of FileController Remove deprecated res.send --- services/filestore/app/js/FileController.js | 288 +++++++------- .../filestore/app/js/HealthCheckController.js | 4 +- .../test/unit/js/FileControllerTests.js | 361 +++++++++--------- 3 files changed, 320 insertions(+), 333 deletions(-) diff --git a/services/filestore/app/js/FileController.js b/services/filestore/app/js/FileController.js index ea41f3d813..dbba9a93cc 100644 --- a/services/filestore/app/js/FileController.js +++ b/services/filestore/app/js/FileController.js @@ -1,41 +1,37 @@ -/* eslint-disable - camelcase, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -let FileController const PersistorManager = require('./PersistorManager') -const settings = require('settings-sharelatex') const logger = require('logger-sharelatex') const FileHandler = require('./FileHandler') const metrics = require('metrics-sharelatex') const parseRange = require('range-parser') const Errors = require('./Errors') +const { pipeline } = require('stream') -const oneDayInSeconds = 60 * 60 * 24 const maxSizeInBytes = 1024 * 1024 * 1024 // 1GB -module.exports = FileController = { - getFile(req, res) { - const { key, bucket } = req - const { format, style } = req.query - const options = { - key, - bucket, - format, - style - } - metrics.inc('getFile') - logger.log({ key, bucket, format, style }, 'receiving request to get file') - if (req.headers.range != null) { - const range = FileController._get_range(req.headers.range) +module.exports = { + getFile, + getFileHead, + insertFile, + copyFile, + deleteFile, + directorySize +} + +function getFile(req, res) { + const { key, bucket } = req + const { format, style } = req.query + const options = { + key, + bucket, + format, + style + } + metrics.inc('getFile') + logger.log({ key, bucket, format, style }, 'receiving request to get file') + + if (req.headers.range) { + const range = _getRange(req.headers.range) + if (range) { options.start = range.start options.end = range.end logger.log( @@ -43,131 +39,131 @@ module.exports = FileController = { 'getting range of bytes from file' ) } - return FileHandler.getFile(bucket, key, options, function(err, fileStream) { - if (err != null) { - if (err instanceof Errors.NotFoundError) { - return res.send(404) - } else { - logger.err( - { err, key, bucket, format, style }, - 'problem getting file' - ) - return res.send(500) - } - } else if (req.query.cacheWarm) { - logger.log( - { key, bucket, format, style }, - 'request is only for cache warm so not sending stream' - ) - return res.send(200) - } else { - logger.log({ key, bucket, format, style }, 'sending file to response') - return fileStream.pipe(res) - } - }) - }, + } - getFileHead(req, res) { - const { key, bucket } = req - metrics.inc('getFileSize') - logger.log({ key, bucket }, 'receiving request to get file metadata') - return FileHandler.getFileSize(bucket, key, function(err, fileSize) { - if (err != null) { + FileHandler.getFile(bucket, key, options, function(err, fileStream) { + if (err) { + if (err instanceof Errors.NotFoundError) { + res.sendStatus(404) + } else { + logger.err({ err, key, bucket, format, style }, 'problem getting file') + res.sendStatus(500) + } + return + } + + if (req.query.cacheWarm) { + logger.log( + { key, bucket, format, style }, + 'request is only for cache warm so not sending stream' + ) + return res.sendStatus(200) + } + + logger.log({ key, bucket, format, style }, 'sending file to response') + pipeline(fileStream, res) + }) +} + +function getFileHead(req, res) { + const { key, bucket } = req + metrics.inc('getFileSize') + logger.log({ key, bucket }, 'receiving request to get file metadata') + FileHandler.getFileSize(bucket, key, function(err, fileSize) { + if (err) { + if (err instanceof Errors.NotFoundError) { + res.sendStatus(404) + } else { + res.sendStatus(500) + } + return + } + res.set('Content-Length', fileSize) + res.status(200).end() + }) +} + +function insertFile(req, res) { + metrics.inc('insertFile') + const { key, bucket } = req + logger.log({ key, bucket }, 'receiving request to insert file') + FileHandler.insertFile(bucket, key, req, function(err) { + if (err) { + logger.log({ err, key, bucket }, 'error inserting file') + res.sendStatus(500) + } else { + res.sendStatus(200) + } + }) +} + +function copyFile(req, res) { + metrics.inc('copyFile') + const { key, bucket } = req + const oldProjectId = req.body.source.project_id + const oldFileId = req.body.source.file_id + logger.log( + { key, bucket, oldProject_id: oldProjectId, oldFile_id: oldFileId }, + 'receiving request to copy file' + ) + + PersistorManager.copyFile( + bucket, + `${oldProjectId}/${oldFileId}`, + key, + function(err) { + if (err) { if (err instanceof Errors.NotFoundError) { - res.status(404).end() + res.sendStatus(404) } else { - res.status(500).end() + logger.log( + { err, oldProject_id: oldProjectId, oldFile_id: oldFileId }, + 'something went wrong copying file' + ) + res.sendStatus(500) } return } - res.set('Content-Length', fileSize) - return res.status(200).end() - }) - }, - insertFile(req, res) { - metrics.inc('insertFile') - const { key, bucket } = req - logger.log({ key, bucket }, 'receiving request to insert file') - return FileHandler.insertFile(bucket, key, req, function(err) { - if (err != null) { - logger.log({ err, key, bucket }, 'error inserting file') - return res.send(500) - } else { - return res.send(200) - } - }) - }, - - copyFile(req, res) { - metrics.inc('copyFile') - const { key, bucket } = req - const oldProject_id = req.body.source.project_id - const oldFile_id = req.body.source.file_id - logger.log( - { key, bucket, oldProject_id, oldFile_id }, - 'receiving request to copy file' - ) - return PersistorManager.copyFile( - bucket, - `${oldProject_id}/${oldFile_id}`, - key, - function(err) { - if (err != null) { - if (err instanceof Errors.NotFoundError) { - return res.send(404) - } else { - logger.log( - { err, oldProject_id, oldFile_id }, - 'something went wrong copying file' - ) - return res.send(500) - } - } else { - return res.send(200) - } - } - ) - }, - - deleteFile(req, res) { - metrics.inc('deleteFile') - const { key, bucket } = req - logger.log({ key, bucket }, 'receiving request to delete file') - return FileHandler.deleteFile(bucket, key, function(err) { - if (err != null) { - logger.log({ err, key, bucket }, 'something went wrong deleting file') - return res.send(500) - } else { - return res.send(204) - } - }) - }, - - _get_range(header) { - const parsed = parseRange(maxSizeInBytes, header) - if (parsed === -1 || parsed === -2 || parsed.type !== 'bytes') { - return null - } else { - const range = parsed[0] - return { start: range.start, end: range.end } + res.sendStatus(200) } - }, + ) +} - directorySize(req, res) { - metrics.inc('projectSize') - const { project_id, bucket } = req - logger.log({ project_id, bucket }, 'receiving request to project size') - return FileHandler.getDirectorySize(bucket, project_id, function( - err, - size - ) { - if (err != null) { - logger.log({ err, project_id, bucket }, 'error inserting file') - return res.send(500) - } else { - return res.json({ 'total bytes': size }) - } - }) +function deleteFile(req, res) { + metrics.inc('deleteFile') + const { key, bucket } = req + logger.log({ key, bucket }, 'receiving request to delete file') + return FileHandler.deleteFile(bucket, key, function(err) { + if (err != null) { + logger.log({ err, key, bucket }, 'something went wrong deleting file') + return res.sendStatus(500) + } else { + return res.sendStatus(204) + } + }) +} + +function directorySize(req, res) { + metrics.inc('projectSize') + const { project_id: projectId, bucket } = req + logger.log({ projectId, bucket }, 'receiving request to project size') + FileHandler.getDirectorySize(bucket, projectId, function(err, size) { + if (err) { + logger.log({ err, projectId, bucket }, 'error inserting file') + return res.sendStatus(500) + } + + res.json({ 'total bytes': size }) + }) +} + +function _getRange(header) { + const parsed = parseRange(maxSizeInBytes, header) + if (parsed === -1 || parsed === -2 || parsed.type !== 'bytes') { + return null + } else { + const range = parsed[0] + return { start: range.start, end: range.end } } } diff --git a/services/filestore/app/js/HealthCheckController.js b/services/filestore/app/js/HealthCheckController.js index 8d6e35b783..5e12469ad3 100644 --- a/services/filestore/app/js/HealthCheckController.js +++ b/services/filestore/app/js/HealthCheckController.js @@ -63,10 +63,10 @@ module.exports = { check(req, res) { logger.log({}, 'performing health check') Promise.all([checkCanGetFiles(), checkFileConvert()]) - .then(() => res.send(200)) + .then(() => res.sendStatus(200)) .catch(err => { logger.err({ err }, 'Health check: error running') - res.send(500) + res.sendStatus(500) }) } } diff --git a/services/filestore/test/unit/js/FileControllerTests.js b/services/filestore/test/unit/js/FileControllerTests.js index 2505e87c9b..00e3fd1505 100644 --- a/services/filestore/test/unit/js/FileControllerTests.js +++ b/services/filestore/test/unit/js/FileControllerTests.js @@ -1,52 +1,60 @@ -/* eslint-disable - no-return-assign, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const { assert } = require('chai') const sinon = require('sinon') const chai = require('chai') -const should = chai.should() const { expect } = chai -const modulePath = '../../../app/js/FileController.js' const SandboxedModule = require('sandboxed-module') +const Errors = require('../../../app/js/Errors') +const modulePath = '../../../app/js/FileController.js' describe('FileController', function() { - beforeEach(function() { - this.PersistorManager = { - sendStream: sinon.stub(), - copyFile: sinon.stub(), - deleteFile: sinon.stub() - } - - this.settings = { - s3: { - buckets: { - user_files: 'user_files' - } + let PersistorManager, + FileHandler, + LocalFileWriter, + FileController, + req, + res, + stream + const settings = { + s3: { + buckets: { + user_files: 'user_files' } } - this.FileHandler = { - getFile: sinon.stub(), - getFileSize: sinon.stub(), - deleteFile: sinon.stub(), - insertFile: sinon.stub(), - getDirectorySize: sinon.stub() + } + const fileSize = 1234 + const fileStream = 'fileStream' + const projectId = 'projectId' + const fileId = 'file_id' + const bucket = 'user_files' + const key = `${projectId}/${fileId}` + + beforeEach(function() { + PersistorManager = { + sendStream: sinon.stub().yields(), + copyFile: sinon.stub().yields(), + deleteFile: sinon.stub().yields() } - this.LocalFileWriter = {} - this.controller = SandboxedModule.require(modulePath, { + + FileHandler = { + getFile: sinon.stub().yields(null, fileStream), + getFileSize: sinon.stub().yields(null, fileSize), + deleteFile: sinon.stub().yields(), + insertFile: sinon.stub().yields(), + getDirectorySize: sinon.stub().yields(null, fileSize) + } + + LocalFileWriter = {} + stream = { + pipeline: sinon.stub() + } + + FileController = SandboxedModule.require(modulePath, { requires: { - './LocalFileWriter': this.LocalFileWriter, - './FileHandler': this.FileHandler, - './PersistorManager': this.PersistorManager, - './Errors': (this.Errors = { NotFoundError: sinon.stub() }), - 'settings-sharelatex': this.settings, + './LocalFileWriter': LocalFileWriter, + './FileHandler': FileHandler, + './PersistorManager': PersistorManager, + './Errors': Errors, + stream: stream, + 'settings-sharelatex': settings, 'metrics-sharelatex': { inc() {} }, @@ -54,244 +62,227 @@ describe('FileController', function() { log() {}, err() {} } - } + }, + globals: { console } }) - this.project_id = 'project_id' - this.file_id = 'file_id' - this.bucket = 'user_files' - this.key = `${this.project_id}/${this.file_id}` - this.req = { - key: this.key, - bucket: this.bucket, + + req = { + key: key, + bucket: bucket, query: {}, params: { - project_id: this.project_id, - file_id: this.file_id + project_id: projectId, + file_id: fileId }, headers: {} } - this.res = { + + res = { set: sinon.stub().returnsThis(), + sendStatus: sinon.stub().returnsThis(), status: sinon.stub().returnsThis() } - return (this.fileStream = {}) }) describe('getFile', function() { - it('should pipe the stream', function(done) { - this.FileHandler.getFile.callsArgWith(3, null, this.fileStream) - this.fileStream.pipe = res => { - res.should.equal(this.res) - return done() - } - return this.controller.getFile(this.req, this.res) + it('should pipe the stream', function() { + FileController.getFile(req, res) + expect(stream.pipeline).to.have.been.calledWith(fileStream, res) }) it('should send a 200 if the cacheWarm param is true', function(done) { - this.req.query.cacheWarm = true - this.FileHandler.getFile.callsArgWith(3, null, this.fileStream) - this.res.send = statusCode => { + req.query.cacheWarm = true + res.sendStatus = statusCode => { statusCode.should.equal(200) - return done() + done() } - return this.controller.getFile(this.req, this.res) + FileController.getFile(req, res) }) it('should send a 500 if there is a problem', function(done) { - this.FileHandler.getFile.callsArgWith(3, 'error') - this.res.send = code => { + FileHandler.getFile.yields('error') + res.sendStatus = code => { code.should.equal(500) - return done() + done() } - return this.controller.getFile(this.req, this.res) + FileController.getFile(req, res) }) - return describe("with a 'Range' header set", function() { + describe('with a range header', function() { + let expectedOptions + beforeEach(function() { - return (this.req.headers.range = 'bytes=0-8') + expectedOptions = { + bucket, + key, + format: undefined, + style: undefined + } }) - return it("should pass 'start' and 'end' options to FileHandler", function(done) { - this.FileHandler.getFile.callsArgWith(3, null, this.fileStream) - this.fileStream.pipe = res => { - expect(this.FileHandler.getFile.lastCall.args[2].start).to.equal(0) - expect(this.FileHandler.getFile.lastCall.args[2].end).to.equal(8) - return done() - } - return this.controller.getFile(this.req, this.res) + it('should pass range options to FileHandler', function() { + req.headers.range = 'bytes=0-8' + expectedOptions.start = 0 + expectedOptions.end = 8 + + FileController.getFile(req, res) + expect(FileHandler.getFile).to.have.been.calledWith( + bucket, + key, + expectedOptions + ) + }) + + it('should ignore an invalid range header', function() { + req.headers.range = 'potato' + FileController.getFile(req, res) + expect(FileHandler.getFile).to.have.been.calledWith( + bucket, + key, + expectedOptions + ) + }) + + it("should ignore any type other than 'bytes'", function() { + req.headers.range = 'wombats=0-8' + FileController.getFile(req, res) + expect(FileHandler.getFile).to.have.been.calledWith( + bucket, + key, + expectedOptions + ) }) }) }) describe('getFileHead', function() { it('should return the file size in a Content-Length header', function(done) { - const expectedFileSize = 84921 - this.FileHandler.getFileSize.yields( - new Error('FileHandler.getFileSize: unexpected arguments') - ) - this.FileHandler.getFileSize - .withArgs(this.bucket, this.key) - .yields(null, expectedFileSize) - - this.res.end = () => { - expect(this.res.status.lastCall.args[0]).to.equal(200) - expect( - this.res.set.calledWith('Content-Length', expectedFileSize) - ).to.equal(true) - return done() + res.end = () => { + expect(res.status).to.have.been.calledWith(200) + expect(res.set).to.have.been.calledWith('Content-Length', fileSize) + done() } - return this.controller.getFileHead(this.req, this.res) + FileController.getFileHead(req, res) }) it('should return a 404 is the file is not found', function(done) { - this.FileHandler.getFileSize.yields(new this.Errors.NotFoundError()) + FileHandler.getFileSize.yields(new Errors.NotFoundError()) - this.res.end = () => { - expect(this.res.status.lastCall.args[0]).to.equal(404) - return done() + res.sendStatus = code => { + expect(code).to.equal(404) + done() } - return this.controller.getFileHead(this.req, this.res) + FileController.getFileHead(req, res) }) - return it('should return a 500 on internal errors', function(done) { - this.FileHandler.getFileSize.yields(new Error()) + it('should return a 500 on internal errors', function(done) { + FileHandler.getFileSize.yields(new Error()) - this.res.end = () => { - expect(this.res.status.lastCall.args[0]).to.equal(500) - return done() + res.sendStatus = code => { + expect(code).to.equal(500) + done() } - return this.controller.getFileHead(this.req, this.res) + FileController.getFileHead(req, res) }) }) - describe('insertFile', () => + describe('insertFile', function() { it('should send bucket name key and res to PersistorManager', function(done) { - this.FileHandler.insertFile.callsArgWith(3) - this.res.send = () => { - this.FileHandler.insertFile - .calledWith(this.bucket, this.key, this.req) - .should.equal(true) - return done() + res.sendStatus = code => { + expect(FileHandler.insertFile).to.have.been.calledWith(bucket, key, req) + expect(code).to.equal(200) + done() } - return this.controller.insertFile(this.req, this.res) - })) + FileController.insertFile(req, res) + }) + }) describe('copyFile', function() { + const oldFileId = 'oldFileId' + const oldProjectId = 'oldProjectid' + const oldKey = `${oldProjectId}/${oldFileId}` + beforeEach(function() { - this.oldFile_id = 'old_file_id' - this.oldProject_id = 'old_project_id' - return (this.req.body = { + req.body = { source: { - project_id: this.oldProject_id, - file_id: this.oldFile_id + project_id: oldProjectId, + file_id: oldFileId } - }) + } }) it('should send bucket name and both keys to PersistorManager', function(done) { - this.PersistorManager.copyFile.callsArgWith(3) - this.res.send = code => { + res.sendStatus = code => { code.should.equal(200) - this.PersistorManager.copyFile - .calledWith( - this.bucket, - `${this.oldProject_id}/${this.oldFile_id}`, - this.key - ) - .should.equal(true) - return done() + expect(PersistorManager.copyFile).to.have.been.calledWith( + bucket, + oldKey, + key + ) + done() } - return this.controller.copyFile(this.req, this.res) + FileController.copyFile(req, res) }) it('should send a 404 if the original file was not found', function(done) { - this.PersistorManager.copyFile.callsArgWith( - 3, - new this.Errors.NotFoundError() - ) - this.res.send = code => { + PersistorManager.copyFile.yields(new Errors.NotFoundError()) + res.sendStatus = code => { code.should.equal(404) - return done() + done() } - return this.controller.copyFile(this.req, this.res) + FileController.copyFile(req, res) }) - return it('should send a 500 if there was an error', function(done) { - this.PersistorManager.copyFile.callsArgWith(3, 'error') - this.res.send = code => { + it('should send a 500 if there was an error', function(done) { + PersistorManager.copyFile.yields('error') + res.sendStatus = code => { code.should.equal(500) - return done() + done() } - return this.controller.copyFile(this.req, this.res) + FileController.copyFile(req, res) }) }) describe('delete file', function() { it('should tell the file handler', function(done) { - this.FileHandler.deleteFile.callsArgWith(2) - this.res.send = code => { + res.sendStatus = code => { code.should.equal(204) - this.FileHandler.deleteFile - .calledWith(this.bucket, this.key) - .should.equal(true) - return done() + expect(FileHandler.deleteFile).to.have.been.calledWith(bucket, key) + done() } - return this.controller.deleteFile(this.req, this.res) + FileController.deleteFile(req, res) }) - return it('should send a 500 if there was an error', function(done) { - this.FileHandler.deleteFile.callsArgWith(2, 'error') - this.res.send = function(code) { + it('should send a 500 if there was an error', function(done) { + FileHandler.deleteFile.yields('error') + res.sendStatus = code => { code.should.equal(500) - return done() + done() } - return this.controller.deleteFile(this.req, this.res) + FileController.deleteFile(req, res) }) }) - describe('_get_range', function() { - it('should parse a valid Range header', function(done) { - const result = this.controller._get_range('bytes=0-200') - expect(result).to.not.equal(null) - expect(result.start).to.equal(0) - expect(result.end).to.equal(200) - return done() - }) - - it('should return null for an invalid Range header', function(done) { - const result = this.controller._get_range('wat') - expect(result).to.equal(null) - return done() - }) - - return it("should return null for any type other than 'bytes'", function(done) { - const result = this.controller._get_range('carrots=0-200') - expect(result).to.equal(null) - return done() - }) - }) - - return describe('directorySize', function() { + describe('directorySize', function() { it('should return total directory size bytes', function(done) { - this.FileHandler.getDirectorySize.callsArgWith(2, null, 1024) - return this.controller.directorySize(this.req, { + FileController.directorySize(req, { json: result => { - expect(result['total bytes']).to.equal(1024) - return done() + expect(result['total bytes']).to.equal(fileSize) + done() } }) }) - return it('should send a 500 if there was an error', function(done) { - this.FileHandler.getDirectorySize.callsArgWith(2, 'error') - this.res.send = function(code) { + it('should send a 500 if there was an error', function(done) { + FileHandler.getDirectorySize.callsArgWith(2, 'error') + res.sendStatus = code => { code.should.equal(500) - return done() + done() } - return this.controller.directorySize(this.req, this.res) + FileController.directorySize(req, res) }) }) }) From 10f708791237c1e3fc166a404751a0df86b01368 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 2 Jan 2020 16:12:07 +0000 Subject: [PATCH 374/555] Post-decaf cleanup of KeyBuilderTests --- .../filestore/test/unit/js/KeybuilderTests.js | 48 +++++++------------ 1 file changed, 16 insertions(+), 32 deletions(-) diff --git a/services/filestore/test/unit/js/KeybuilderTests.js b/services/filestore/test/unit/js/KeybuilderTests.js index 09a0ea8717..5271e892ed 100644 --- a/services/filestore/test/unit/js/KeybuilderTests.js +++ b/services/filestore/test/unit/js/KeybuilderTests.js @@ -1,26 +1,13 @@ -/* eslint-disable - no-return-assign, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ - -const { assert } = require('chai') -const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() -const { expect } = chai -const modulePath = '../../../app/js/KeyBuilder.js' const SandboxedModule = require('sandboxed-module') +const modulePath = '../../../app/js/KeyBuilder.js' + describe('LocalFileWriter', function() { + let KeyBuilder + const key = 'wombat/potato' + beforeEach(function() { - this.keyBuilder = SandboxedModule.require(modulePath, { + KeyBuilder = SandboxedModule.require(modulePath, { requires: { 'logger-sharelatex': { log() {}, @@ -28,31 +15,28 @@ describe('LocalFileWriter', function() { } } }) - return (this.key = '123/456') }) - return describe('cachedKey', function() { - it('should add the fomat on', function() { + describe('cachedKey', function() { + it('should add the format to the key', function() { const opts = { format: 'png' } - const newKey = this.keyBuilder.addCachingToKey(this.key, opts) - return newKey.should.equal(`${this.key}-converted-cache/format-png`) + const newKey = KeyBuilder.addCachingToKey(key, opts) + newKey.should.equal(`${key}-converted-cache/format-png`) }) - it('should add the style on', function() { + it('should add the style to the key', function() { const opts = { style: 'thumbnail' } - const newKey = this.keyBuilder.addCachingToKey(this.key, opts) - return newKey.should.equal(`${this.key}-converted-cache/style-thumbnail`) + const newKey = KeyBuilder.addCachingToKey(key, opts) + newKey.should.equal(`${key}-converted-cache/style-thumbnail`) }) - return it('should add format on first', function() { + it('should add format first, then style', function() { const opts = { style: 'thumbnail', format: 'png' } - const newKey = this.keyBuilder.addCachingToKey(this.key, opts) - return newKey.should.equal( - `${this.key}-converted-cache/format-png-style-thumbnail` - ) + const newKey = KeyBuilder.addCachingToKey(key, opts) + newKey.should.equal(`${key}-converted-cache/format-png-style-thumbnail`) }) }) }) From dbfacce98822036574945451250b1f71b2537403 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 2 Jan 2020 16:37:47 +0000 Subject: [PATCH 375/555] Post-decaf cleanup of PersistorManager --- services/filestore/app/js/PersistorManager.js | 50 ++--- .../test/unit/js/PersistorManagerTests.js | 185 ++++++------------ 2 files changed, 74 insertions(+), 161 deletions(-) diff --git a/services/filestore/app/js/PersistorManager.js b/services/filestore/app/js/PersistorManager.js index 182e39b085..f8ca7b9d2c 100644 --- a/services/filestore/app/js/PersistorManager.js +++ b/services/filestore/app/js/PersistorManager.js @@ -1,37 +1,19 @@ -// TODO: This file was created by bulk-decaffeinate. -// Sanity-check the conversion and remove this comment. -/* - * decaffeinate suggestions: - * DS103: Rewrite code to no longer use __guard__ - * DS205: Consider reworking code to avoid use of IIFEs - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ const settings = require('settings-sharelatex') const logger = require('logger-sharelatex') -// assume s3 if none specified -__guard__( - settings != null ? settings.filestore : undefined, - x => x.backend || (settings.filestore.backend = 's3') -) +module.exports = (function() { + logger.log( + { + backend: settings.filestore.backend + }, + 'Loading backend' + ) -logger.log( - { - backend: __guard__( - settings != null ? settings.filestore : undefined, - x1 => x1.backend - ) - }, - 'Loading backend' -) -module.exports = (() => { - switch ( - __guard__( - settings != null ? settings.filestore : undefined, - x2 => x2.backend - ) - ) { + if (!settings.filestore.backend) { + throw new Error('no backend specified - config incomplete') + } + + switch (settings.filestore.backend) { case 'aws-sdk': return require('./AWSSDKPersistorManager') case 's3': @@ -40,13 +22,7 @@ module.exports = (() => { return require('./FSPersistorManager') default: throw new Error( - `Unknown filestore backend: ${settings.filestore.backend}` + `unknown filestore backend: ${settings.filestore.backend}` ) } })() - -function __guard__(value, transform) { - return typeof value !== 'undefined' && value !== null - ? transform(value) - : undefined -} diff --git a/services/filestore/test/unit/js/PersistorManagerTests.js b/services/filestore/test/unit/js/PersistorManagerTests.js index ff49c05ce9..d8fd887265 100644 --- a/services/filestore/test/unit/js/PersistorManagerTests.js +++ b/services/filestore/test/unit/js/PersistorManagerTests.js @@ -1,137 +1,74 @@ -/* eslint-disable - no-return-assign, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const logger = require('logger-sharelatex') -const { assert } = require('chai') const sinon = require('sinon') const chai = require('chai') -const should = chai.should() const { expect } = chai -const modulePath = '../../../app/js/PersistorManager.js' const SandboxedModule = require('sandboxed-module') -describe('PersistorManagerTests', function() { +const modulePath = '../../../app/js/PersistorManager.js' + +describe('PersistorManager', function() { + let PersistorManager, + FSPersistorManager, + S3PersistorManager, + settings, + requires + beforeEach(function() { - return (this.S3PersistorManager = { - getFileStream: sinon.stub(), - checkIfFileExists: sinon.stub(), - deleteFile: sinon.stub(), - deleteDirectory: sinon.stub(), - sendStream: sinon.stub(), - insertFile: sinon.stub() - }) + FSPersistorManager = { + wrappedMethod: sinon.stub().returns('FSPersistorManager') + } + S3PersistorManager = { + wrappedMethod: sinon.stub().returns('S3PersistorManager') + } + + settings = { + filestore: {} + } + + requires = { + './S3PersistorManager': S3PersistorManager, + './FSPersistorManager': FSPersistorManager, + 'settings-sharelatex': settings, + 'logger-sharelatex': { + log() {}, + err() {} + } + } }) - describe('test s3 mixin', function() { - beforeEach(function() { - this.settings = { - filestore: { - backend: 's3' - } - } - this.requires = { - './S3PersistorManager': this.S3PersistorManager, - 'settings-sharelatex': this.settings, - 'logger-sharelatex': { - log() {}, - err() {} - } - } - return (this.PersistorManager = SandboxedModule.require(modulePath, { - requires: this.requires - })) - }) + it('should implement the S3 wrapped method when S3 is configured', function() { + settings.filestore.backend = 's3' + PersistorManager = SandboxedModule.require(modulePath, { requires }) - it('should load getFileStream', function(done) { - this.PersistorManager.should.respondTo('getFileStream') - this.PersistorManager.getFileStream() - this.S3PersistorManager.getFileStream.calledOnce.should.equal(true) - return done() - }) - - it('should load checkIfFileExists', function(done) { - this.PersistorManager.checkIfFileExists() - this.S3PersistorManager.checkIfFileExists.calledOnce.should.equal(true) - return done() - }) - - it('should load deleteFile', function(done) { - this.PersistorManager.deleteFile() - this.S3PersistorManager.deleteFile.calledOnce.should.equal(true) - return done() - }) - - it('should load deleteDirectory', function(done) { - this.PersistorManager.deleteDirectory() - this.S3PersistorManager.deleteDirectory.calledOnce.should.equal(true) - return done() - }) - - it('should load sendStream', function(done) { - this.PersistorManager.sendStream() - this.S3PersistorManager.sendStream.calledOnce.should.equal(true) - return done() - }) - - return it('should load insertFile', function(done) { - this.PersistorManager.insertFile() - this.S3PersistorManager.insertFile.calledOnce.should.equal(true) - return done() - }) + expect(PersistorManager).to.respondTo('wrappedMethod') + expect(PersistorManager.wrappedMethod()).to.equal('S3PersistorManager') }) - describe('test unspecified mixins', () => - it('should load s3 when no wrapper specified', function(done) { - this.settings = { filestore: {} } - this.requires = { - './S3PersistorManager': this.S3PersistorManager, - 'settings-sharelatex': this.settings, - 'logger-sharelatex': { - log() {}, - err() {} - } - } - this.PersistorManager = SandboxedModule.require(modulePath, { - requires: this.requires - }) - this.PersistorManager.should.respondTo('getFileStream') - this.PersistorManager.getFileStream() - this.S3PersistorManager.getFileStream.calledOnce.should.equal(true) - return done() - })) + it('should implement the FS wrapped method when FS is configured', function() { + settings.filestore.backend = 'fs' + PersistorManager = SandboxedModule.require(modulePath, { requires }) - return describe('test invalid mixins', () => - it('should not load an invalid wrapper', function(done) { - this.settings = { - filestore: { - backend: 'magic' - } - } - this.requires = { - './S3PersistorManager': this.S3PersistorManager, - 'settings-sharelatex': this.settings, - 'logger-sharelatex': { - log() {}, - err() {} - } - } - this.fsWrapper = null - try { - this.PersistorManager = SandboxedModule.require(modulePath, { - requires: this.requires - }) - } catch (error) { - assert.equal('Unknown filestore backend: magic', error.message) - } - assert.isNull(this.fsWrapper) - return done() - })) + expect(PersistorManager).to.respondTo('wrappedMethod') + expect(PersistorManager.wrappedMethod()).to.equal('FSPersistorManager') + }) + + it('should throw an error when the backend is not configured', function() { + try { + SandboxedModule.require(modulePath, { requires }) + } catch (err) { + expect(err.message).to.equal('no backend specified - config incomplete') + return + } + expect('should have caught an error').not.to.exist + }) + + it('should throw an error when the backend is unknown', function() { + settings.filestore.backend = 'magic' + try { + SandboxedModule.require(modulePath, { requires }) + } catch (err) { + expect(err.message).to.equal('unknown filestore backend: magic') + return + } + expect('should have caught an error').not.to.exist + }) }) From 0329c759dc99f836e511e9c38c4a1e1410385e22 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Sat, 4 Jan 2020 17:27:12 +0000 Subject: [PATCH 376/555] Remove wrapper function in PersistorManager --- services/filestore/app/js/PersistorManager.js | 46 +++++++++---------- 1 file changed, 22 insertions(+), 24 deletions(-) diff --git a/services/filestore/app/js/PersistorManager.js b/services/filestore/app/js/PersistorManager.js index f8ca7b9d2c..8124d66101 100644 --- a/services/filestore/app/js/PersistorManager.js +++ b/services/filestore/app/js/PersistorManager.js @@ -1,28 +1,26 @@ const settings = require('settings-sharelatex') const logger = require('logger-sharelatex') -module.exports = (function() { - logger.log( - { - backend: settings.filestore.backend - }, - 'Loading backend' - ) +logger.log( + { + backend: settings.filestore.backend + }, + 'Loading backend' +) +if (!settings.filestore.backend) { + throw new Error('no backend specified - config incomplete') +} - if (!settings.filestore.backend) { - throw new Error('no backend specified - config incomplete') - } - - switch (settings.filestore.backend) { - case 'aws-sdk': - return require('./AWSSDKPersistorManager') - case 's3': - return require('./S3PersistorManager') - case 'fs': - return require('./FSPersistorManager') - default: - throw new Error( - `unknown filestore backend: ${settings.filestore.backend}` - ) - } -})() +switch (settings.filestore.backend) { + case 'aws-sdk': + module.exports = require('./AWSSDKPersistorManager') + break + case 's3': + module.exports = require('./S3PersistorManager') + break + case 'fs': + module.exports = require('./FSPersistorManager') + break + default: + throw new Error(`unknown filestore backend: ${settings.filestore.backend}`) +} From fce275e1d4b7864b3d64ee238aeb7b4d26d3af3e Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Fri, 3 Jan 2020 10:06:19 +0000 Subject: [PATCH 377/555] Post-decaf cleanup of app.js --- services/filestore/app.js | 125 ++++-------------- services/filestore/app/js/ExceptionHandler.js | 97 ++++++++++++++ .../test/acceptance/js/FilestoreApp.js | 1 + 3 files changed, 127 insertions(+), 96 deletions(-) create mode 100644 services/filestore/app/js/ExceptionHandler.js diff --git a/services/filestore/app.js b/services/filestore/app.js index 9e76107ea6..d80514738c 100644 --- a/services/filestore/app.js +++ b/services/filestore/app.js @@ -1,89 +1,40 @@ -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -/* eslint-disable node/no-deprecated-api */ const Metrics = require('metrics-sharelatex') +const logger = require('logger-sharelatex') + Metrics.initialize('filestore') +logger.initialize('filestore') + +const settings = require('settings-sharelatex') const express = require('express') const bodyParser = require('body-parser') -let logger = require('logger-sharelatex') -logger.initialize('filestore') -const settings = require('settings-sharelatex') + const fileController = require('./app/js/FileController') const bucketController = require('./app/js/BucketController') const keyBuilder = require('./app/js/KeyBuilder') const healthCheckController = require('./app/js/HealthCheckController') -const domain = require('domain') -let appIsOk = true -const app = express() +const ExceptionHandler = require('./app/js/ExceptionHandler') +const exceptionHandler = new ExceptionHandler() -if ((settings.sentry != null ? settings.sentry.dsn : undefined) != null) { +const app = express() +app.exceptionHandler = exceptionHandler + +if (settings.sentry && settings.sentry.dsn) { logger.initializeErrorReporting(settings.sentry.dsn) } Metrics.open_sockets.monitor(logger) -if (Metrics.event_loop != null) { +Metrics.memory.monitor(logger) +if (Metrics.event_loop) { Metrics.event_loop.monitor(logger) } -Metrics.memory.monitor(logger) app.use(Metrics.http.monitor(logger)) - app.use(function(req, res, next) { Metrics.inc('http-request') - return next() + next() }) -app.use(function(req, res, next) { - const requestDomain = domain.create() - requestDomain.add(req) - requestDomain.add(res) - requestDomain.on('error', function(err) { - try { - // request a shutdown to prevent memory leaks - beginShutdown() - if (!res.headerSent) { - res.send(500, 'uncaught exception') - } - logger = require('logger-sharelatex') - req = { - body: req.body, - headers: req.headers, - url: req.url, - key: req.key, - statusCode: req.statusCode - } - err = { - message: err.message, - stack: err.stack, - name: err.name, - type: err.type, - arguments: err.arguments - } - return logger.err( - { err, req, res }, - 'uncaught exception thrown on request' - ) - } catch (exception) { - return logger.err( - { err: exception }, - 'exception in request domain handler' - ) - } - }) - return requestDomain.run(next) -}) - -app.use(function(req, res, next) { - if (!appIsOk) { - // when shutting down, close any HTTP keep-alive connections - res.set('Connection', 'close') - } - return next() -}) +exceptionHandler.addMiddleware(app) Metrics.injectMetricsRoute(app) @@ -108,7 +59,7 @@ app.put( bodyParser.json(), fileController.copyFile ) -app.del( +app.delete( '/project/:project_id/file/:file_id', keyBuilder.userFileKeyMiddleware, fileController.deleteFile @@ -156,7 +107,7 @@ app.put( bodyParser.json(), fileController.copyFile ) -app.del( +app.delete( '/project/:project_id/public/:public_file_id', keyBuilder.publicFileKeyMiddleware, fileController.deleteFile @@ -183,68 +134,50 @@ app.get('/heapdump', (req, res, next) => ) app.post('/shutdown', function(req, res) { - appIsOk = false - return res.send() + exceptionHandler.setNotOk() + res.sendStatus(200) }) app.get('/status', function(req, res) { - if (appIsOk) { - return res.send('filestore sharelatex up') + if (exceptionHandler.appIsOk()) { + res.send('filestore sharelatex up') } else { logger.log('app is not ok - shutting down') - return res.send('server is being shut down', 500) + res.send('server is being shut down').status(500) } }) app.get('/health_check', healthCheckController.check) -app.get('*', (req, res) => res.send(404)) - -var beginShutdown = function() { - if (appIsOk) { - appIsOk = false - // hard-terminate this process if graceful shutdown fails - const killTimer = setTimeout(() => process.exit(1), 120 * 1000) - if (typeof killTimer.unref === 'function') { - killTimer.unref() - } // prevent timer from keeping process alive - server.close(function() { - logger.log('closed all connections') - Metrics.close() - return typeof process.disconnect === 'function' - ? process.disconnect() - : undefined - }) - return logger.log('server will stop accepting connections') - } -} +app.get('*', (req, res) => res.sendStatus(404)) const port = settings.internal.filestore.port || 3009 const host = '0.0.0.0' if (!module.parent) { // Called directly - var server = app.listen(port, host, error => { + const server = app.listen(port, host, error => { if (error) { logger.error('Error starting Filestore', error) throw error } logger.info(`Filestore starting up, listening on ${host}:${port}`) }) + exceptionHandler.server = server } module.exports = app process.on('SIGTERM', function() { logger.log('filestore got SIGTERM, shutting down gracefully') - return beginShutdown() + exceptionHandler.beginShutdown() }) -if (global.gc != null) { +if (global.gc) { const oneMinute = 60 * 1000 const gcTimer = setInterval(function() { global.gc() - return logger.log(process.memoryUsage(), 'global.gc') + logger.log(process.memoryUsage(), 'global.gc') }, 3 * oneMinute) gcTimer.unref() } diff --git a/services/filestore/app/js/ExceptionHandler.js b/services/filestore/app/js/ExceptionHandler.js new file mode 100644 index 0000000000..122e78805c --- /dev/null +++ b/services/filestore/app/js/ExceptionHandler.js @@ -0,0 +1,97 @@ +const Metrics = require('metrics-sharelatex') +const logger = require('logger-sharelatex') + +// TODO: domain has been deprecated for some time - do we need it and is there a better way? + +// eslint-disable-next-line node/no-deprecated-api +const domain = require('domain') + +const TWO_MINUTES = 120 * 1000 + +class ExceptionHandler { + constructor() { + this._appIsOk = true + } + + beginShutdown() { + if (this._appIsOk) { + this._appIsOk = false + + // hard-terminate this process if graceful shutdown fails + const killTimer = setTimeout(() => process.exit(1), TWO_MINUTES) + + if (typeof killTimer.unref === 'function') { + killTimer.unref() + } // prevent timer from keeping process alive + + this.server.close(function() { + logger.log('closed all connections') + Metrics.close() + if (typeof process.disconnect === 'function') { + process.disconnect() + } + }) + logger.log('server will stop accepting connections') + } + } + + addMiddleware(app) { + app.use(this.middleware.bind(this)) + } + + appIsOk() { + return this._appIsOk + } + + setNotOk() { + this._appIsOk = false + } + + middleware(req, res, next) { + const rescueLogger = require('logger-sharelatex') + const requestDomain = domain.create() + requestDomain.add(req) + requestDomain.add(res) + requestDomain.on('error', err => { + try { + // request a shutdown to prevent memory leaks + this.beginShutdown() + if (!res.headerSent) { + res.send('uncaught exception').status(500) + } + req = { + body: req.body, + headers: req.headers, + url: req.url, + key: req.key, + statusCode: req.statusCode + } + err = { + message: err.message, + stack: err.stack, + name: err.name, + type: err.type, + arguments: err.arguments + } + rescueLogger.err( + { err, req, res }, + 'uncaught exception thrown on request' + ) + } catch (exception) { + rescueLogger.err( + { err: exception }, + 'exception in request domain handler' + ) + } + }) + + if (!this._appIsOk) { + // when shutting down, close any HTTP keep-alive connections + res.set('Connection', 'close') + } + + requestDomain.run(next) + } +} + +module.exports = ExceptionHandler diff --git a/services/filestore/test/acceptance/js/FilestoreApp.js b/services/filestore/test/acceptance/js/FilestoreApp.js index 718d53bcf8..32ec7a5adf 100644 --- a/services/filestore/test/acceptance/js/FilestoreApp.js +++ b/services/filestore/test/acceptance/js/FilestoreApp.js @@ -44,6 +44,7 @@ class FilestoreApp { resolve() } ) + this.app.exceptionHandler.server = this.server }) if (Settings.filestore.backend === 's3') { From 6a679023d30bc13cbc9fb18a1ba96f7547363cd3 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Sat, 4 Jan 2020 17:15:21 +0000 Subject: [PATCH 378/555] Fix order of .status().send() --- services/filestore/app.js | 2 +- services/filestore/app/js/ExceptionHandler.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/services/filestore/app.js b/services/filestore/app.js index d80514738c..1e84440eb6 100644 --- a/services/filestore/app.js +++ b/services/filestore/app.js @@ -143,7 +143,7 @@ app.get('/status', function(req, res) { res.send('filestore sharelatex up') } else { logger.log('app is not ok - shutting down') - res.send('server is being shut down').status(500) + res.status(500).send('server is being shut down') } }) diff --git a/services/filestore/app/js/ExceptionHandler.js b/services/filestore/app/js/ExceptionHandler.js index 122e78805c..d52c00bb2c 100644 --- a/services/filestore/app/js/ExceptionHandler.js +++ b/services/filestore/app/js/ExceptionHandler.js @@ -57,7 +57,7 @@ class ExceptionHandler { // request a shutdown to prevent memory leaks this.beginShutdown() if (!res.headerSent) { - res.send('uncaught exception').status(500) + res.status(500).send('uncaught exception') } req = { body: req.body, From 85d3c0a852d1ebc6b0118dc7cc477b87bb0fc13d Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 6 Jan 2020 15:43:24 +0000 Subject: [PATCH 379/555] Remove old exception-handling and shutdown-related mechanisms --- services/filestore/app.js | 36 +------ services/filestore/app/js/ExceptionHandler.js | 97 ------------------- .../test/acceptance/js/FilestoreApp.js | 1 - 3 files changed, 2 insertions(+), 132 deletions(-) delete mode 100644 services/filestore/app/js/ExceptionHandler.js diff --git a/services/filestore/app.js b/services/filestore/app.js index 1e84440eb6..232c5b24bc 100644 --- a/services/filestore/app.js +++ b/services/filestore/app.js @@ -12,11 +12,8 @@ const fileController = require('./app/js/FileController') const bucketController = require('./app/js/BucketController') const keyBuilder = require('./app/js/KeyBuilder') const healthCheckController = require('./app/js/HealthCheckController') -const ExceptionHandler = require('./app/js/ExceptionHandler') -const exceptionHandler = new ExceptionHandler() const app = express() -app.exceptionHandler = exceptionHandler if (settings.sentry && settings.sentry.dsn) { logger.initializeErrorReporting(settings.sentry.dsn) @@ -34,8 +31,6 @@ app.use(function(req, res, next) { next() }) -exceptionHandler.addMiddleware(app) - Metrics.injectMetricsRoute(app) app.head( @@ -133,51 +128,24 @@ app.get('/heapdump', (req, res, next) => ) ) -app.post('/shutdown', function(req, res) { - exceptionHandler.setNotOk() - res.sendStatus(200) -}) - app.get('/status', function(req, res) { - if (exceptionHandler.appIsOk()) { - res.send('filestore sharelatex up') - } else { - logger.log('app is not ok - shutting down') - res.status(500).send('server is being shut down') - } + res.send('filestore sharelatex up') }) app.get('/health_check', healthCheckController.check) -app.get('*', (req, res) => res.sendStatus(404)) - const port = settings.internal.filestore.port || 3009 const host = '0.0.0.0' if (!module.parent) { // Called directly - const server = app.listen(port, host, error => { + app.listen(port, host, error => { if (error) { logger.error('Error starting Filestore', error) throw error } logger.info(`Filestore starting up, listening on ${host}:${port}`) }) - exceptionHandler.server = server } module.exports = app - -process.on('SIGTERM', function() { - logger.log('filestore got SIGTERM, shutting down gracefully') - exceptionHandler.beginShutdown() -}) - -if (global.gc) { - const oneMinute = 60 * 1000 - const gcTimer = setInterval(function() { - global.gc() - logger.log(process.memoryUsage(), 'global.gc') - }, 3 * oneMinute) - gcTimer.unref() -} diff --git a/services/filestore/app/js/ExceptionHandler.js b/services/filestore/app/js/ExceptionHandler.js deleted file mode 100644 index d52c00bb2c..0000000000 --- a/services/filestore/app/js/ExceptionHandler.js +++ /dev/null @@ -1,97 +0,0 @@ -const Metrics = require('metrics-sharelatex') -const logger = require('logger-sharelatex') - -// TODO: domain has been deprecated for some time - do we need it and is there a better way? - -// eslint-disable-next-line node/no-deprecated-api -const domain = require('domain') - -const TWO_MINUTES = 120 * 1000 - -class ExceptionHandler { - constructor() { - this._appIsOk = true - } - - beginShutdown() { - if (this._appIsOk) { - this._appIsOk = false - - // hard-terminate this process if graceful shutdown fails - const killTimer = setTimeout(() => process.exit(1), TWO_MINUTES) - - if (typeof killTimer.unref === 'function') { - killTimer.unref() - } // prevent timer from keeping process alive - - this.server.close(function() { - logger.log('closed all connections') - Metrics.close() - if (typeof process.disconnect === 'function') { - process.disconnect() - } - }) - logger.log('server will stop accepting connections') - } - } - - addMiddleware(app) { - app.use(this.middleware.bind(this)) - } - - appIsOk() { - return this._appIsOk - } - - setNotOk() { - this._appIsOk = false - } - - middleware(req, res, next) { - const rescueLogger = require('logger-sharelatex') - const requestDomain = domain.create() - requestDomain.add(req) - requestDomain.add(res) - requestDomain.on('error', err => { - try { - // request a shutdown to prevent memory leaks - this.beginShutdown() - if (!res.headerSent) { - res.status(500).send('uncaught exception') - } - req = { - body: req.body, - headers: req.headers, - url: req.url, - key: req.key, - statusCode: req.statusCode - } - err = { - message: err.message, - stack: err.stack, - name: err.name, - type: err.type, - arguments: err.arguments - } - rescueLogger.err( - { err, req, res }, - 'uncaught exception thrown on request' - ) - } catch (exception) { - rescueLogger.err( - { err: exception }, - 'exception in request domain handler' - ) - } - }) - - if (!this._appIsOk) { - // when shutting down, close any HTTP keep-alive connections - res.set('Connection', 'close') - } - - requestDomain.run(next) - } -} - -module.exports = ExceptionHandler diff --git a/services/filestore/test/acceptance/js/FilestoreApp.js b/services/filestore/test/acceptance/js/FilestoreApp.js index 32ec7a5adf..718d53bcf8 100644 --- a/services/filestore/test/acceptance/js/FilestoreApp.js +++ b/services/filestore/test/acceptance/js/FilestoreApp.js @@ -44,7 +44,6 @@ class FilestoreApp { resolve() } ) - this.app.exceptionHandler.server = this.server }) if (Settings.filestore.backend === 's3') { From 74b480fc55bd5475cf4c7e39aa724f110967f708 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Fri, 3 Jan 2020 10:06:19 +0000 Subject: [PATCH 380/555] Post-decaf cleanup of app.js --- services/filestore/app.js | 34 ++++++- services/filestore/app/js/ExceptionHandler.js | 97 +++++++++++++++++++ .../test/acceptance/js/FilestoreApp.js | 1 + 3 files changed, 131 insertions(+), 1 deletion(-) create mode 100644 services/filestore/app/js/ExceptionHandler.js diff --git a/services/filestore/app.js b/services/filestore/app.js index 232c5b24bc..6bf68f6a34 100644 --- a/services/filestore/app.js +++ b/services/filestore/app.js @@ -12,8 +12,11 @@ const fileController = require('./app/js/FileController') const bucketController = require('./app/js/BucketController') const keyBuilder = require('./app/js/KeyBuilder') const healthCheckController = require('./app/js/HealthCheckController') +const ExceptionHandler = require('./app/js/ExceptionHandler') +const exceptionHandler = new ExceptionHandler() const app = express() +app.exceptionHandler = exceptionHandler if (settings.sentry && settings.sentry.dsn) { logger.initializeErrorReporting(settings.sentry.dsn) @@ -31,6 +34,8 @@ app.use(function(req, res, next) { next() }) +exceptionHandler.addMiddleware(app) + Metrics.injectMetricsRoute(app) app.head( @@ -128,12 +133,24 @@ app.get('/heapdump', (req, res, next) => ) ) +app.post('/shutdown', function(req, res) { + exceptionHandler.setNotOk() + res.sendStatus(200) +}) + app.get('/status', function(req, res) { - res.send('filestore sharelatex up') + if (exceptionHandler.appIsOk()) { + res.send('filestore sharelatex up') + } else { + logger.log('app is not ok - shutting down') + res.send('server is being shut down').status(500) + } }) app.get('/health_check', healthCheckController.check) +app.get('*', (req, res) => res.sendStatus(404)) + const port = settings.internal.filestore.port || 3009 const host = '0.0.0.0' @@ -146,6 +163,21 @@ if (!module.parent) { } logger.info(`Filestore starting up, listening on ${host}:${port}`) }) + exceptionHandler.server = server } module.exports = app + +process.on('SIGTERM', function() { + logger.log('filestore got SIGTERM, shutting down gracefully') + exceptionHandler.beginShutdown() +}) + +if (global.gc) { + const oneMinute = 60 * 1000 + const gcTimer = setInterval(function() { + global.gc() + logger.log(process.memoryUsage(), 'global.gc') + }, 3 * oneMinute) + gcTimer.unref() +} diff --git a/services/filestore/app/js/ExceptionHandler.js b/services/filestore/app/js/ExceptionHandler.js new file mode 100644 index 0000000000..122e78805c --- /dev/null +++ b/services/filestore/app/js/ExceptionHandler.js @@ -0,0 +1,97 @@ +const Metrics = require('metrics-sharelatex') +const logger = require('logger-sharelatex') + +// TODO: domain has been deprecated for some time - do we need it and is there a better way? + +// eslint-disable-next-line node/no-deprecated-api +const domain = require('domain') + +const TWO_MINUTES = 120 * 1000 + +class ExceptionHandler { + constructor() { + this._appIsOk = true + } + + beginShutdown() { + if (this._appIsOk) { + this._appIsOk = false + + // hard-terminate this process if graceful shutdown fails + const killTimer = setTimeout(() => process.exit(1), TWO_MINUTES) + + if (typeof killTimer.unref === 'function') { + killTimer.unref() + } // prevent timer from keeping process alive + + this.server.close(function() { + logger.log('closed all connections') + Metrics.close() + if (typeof process.disconnect === 'function') { + process.disconnect() + } + }) + logger.log('server will stop accepting connections') + } + } + + addMiddleware(app) { + app.use(this.middleware.bind(this)) + } + + appIsOk() { + return this._appIsOk + } + + setNotOk() { + this._appIsOk = false + } + + middleware(req, res, next) { + const rescueLogger = require('logger-sharelatex') + const requestDomain = domain.create() + requestDomain.add(req) + requestDomain.add(res) + requestDomain.on('error', err => { + try { + // request a shutdown to prevent memory leaks + this.beginShutdown() + if (!res.headerSent) { + res.send('uncaught exception').status(500) + } + req = { + body: req.body, + headers: req.headers, + url: req.url, + key: req.key, + statusCode: req.statusCode + } + err = { + message: err.message, + stack: err.stack, + name: err.name, + type: err.type, + arguments: err.arguments + } + rescueLogger.err( + { err, req, res }, + 'uncaught exception thrown on request' + ) + } catch (exception) { + rescueLogger.err( + { err: exception }, + 'exception in request domain handler' + ) + } + }) + + if (!this._appIsOk) { + // when shutting down, close any HTTP keep-alive connections + res.set('Connection', 'close') + } + + requestDomain.run(next) + } +} + +module.exports = ExceptionHandler diff --git a/services/filestore/test/acceptance/js/FilestoreApp.js b/services/filestore/test/acceptance/js/FilestoreApp.js index 718d53bcf8..32ec7a5adf 100644 --- a/services/filestore/test/acceptance/js/FilestoreApp.js +++ b/services/filestore/test/acceptance/js/FilestoreApp.js @@ -44,6 +44,7 @@ class FilestoreApp { resolve() } ) + this.app.exceptionHandler.server = this.server }) if (Settings.filestore.backend === 's3') { From 87b51d0c3516e9694dd28c97a9a0cebdfa10c5b5 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 6 Jan 2020 15:43:24 +0000 Subject: [PATCH 381/555] Remove old exception-handling and shutdown-related mechanisms --- services/filestore/app.js | 34 +------ services/filestore/app/js/ExceptionHandler.js | 97 ------------------- .../test/acceptance/js/FilestoreApp.js | 1 - 3 files changed, 1 insertion(+), 131 deletions(-) delete mode 100644 services/filestore/app/js/ExceptionHandler.js diff --git a/services/filestore/app.js b/services/filestore/app.js index 6bf68f6a34..232c5b24bc 100644 --- a/services/filestore/app.js +++ b/services/filestore/app.js @@ -12,11 +12,8 @@ const fileController = require('./app/js/FileController') const bucketController = require('./app/js/BucketController') const keyBuilder = require('./app/js/KeyBuilder') const healthCheckController = require('./app/js/HealthCheckController') -const ExceptionHandler = require('./app/js/ExceptionHandler') -const exceptionHandler = new ExceptionHandler() const app = express() -app.exceptionHandler = exceptionHandler if (settings.sentry && settings.sentry.dsn) { logger.initializeErrorReporting(settings.sentry.dsn) @@ -34,8 +31,6 @@ app.use(function(req, res, next) { next() }) -exceptionHandler.addMiddleware(app) - Metrics.injectMetricsRoute(app) app.head( @@ -133,24 +128,12 @@ app.get('/heapdump', (req, res, next) => ) ) -app.post('/shutdown', function(req, res) { - exceptionHandler.setNotOk() - res.sendStatus(200) -}) - app.get('/status', function(req, res) { - if (exceptionHandler.appIsOk()) { - res.send('filestore sharelatex up') - } else { - logger.log('app is not ok - shutting down') - res.send('server is being shut down').status(500) - } + res.send('filestore sharelatex up') }) app.get('/health_check', healthCheckController.check) -app.get('*', (req, res) => res.sendStatus(404)) - const port = settings.internal.filestore.port || 3009 const host = '0.0.0.0' @@ -163,21 +146,6 @@ if (!module.parent) { } logger.info(`Filestore starting up, listening on ${host}:${port}`) }) - exceptionHandler.server = server } module.exports = app - -process.on('SIGTERM', function() { - logger.log('filestore got SIGTERM, shutting down gracefully') - exceptionHandler.beginShutdown() -}) - -if (global.gc) { - const oneMinute = 60 * 1000 - const gcTimer = setInterval(function() { - global.gc() - logger.log(process.memoryUsage(), 'global.gc') - }, 3 * oneMinute) - gcTimer.unref() -} diff --git a/services/filestore/app/js/ExceptionHandler.js b/services/filestore/app/js/ExceptionHandler.js deleted file mode 100644 index 122e78805c..0000000000 --- a/services/filestore/app/js/ExceptionHandler.js +++ /dev/null @@ -1,97 +0,0 @@ -const Metrics = require('metrics-sharelatex') -const logger = require('logger-sharelatex') - -// TODO: domain has been deprecated for some time - do we need it and is there a better way? - -// eslint-disable-next-line node/no-deprecated-api -const domain = require('domain') - -const TWO_MINUTES = 120 * 1000 - -class ExceptionHandler { - constructor() { - this._appIsOk = true - } - - beginShutdown() { - if (this._appIsOk) { - this._appIsOk = false - - // hard-terminate this process if graceful shutdown fails - const killTimer = setTimeout(() => process.exit(1), TWO_MINUTES) - - if (typeof killTimer.unref === 'function') { - killTimer.unref() - } // prevent timer from keeping process alive - - this.server.close(function() { - logger.log('closed all connections') - Metrics.close() - if (typeof process.disconnect === 'function') { - process.disconnect() - } - }) - logger.log('server will stop accepting connections') - } - } - - addMiddleware(app) { - app.use(this.middleware.bind(this)) - } - - appIsOk() { - return this._appIsOk - } - - setNotOk() { - this._appIsOk = false - } - - middleware(req, res, next) { - const rescueLogger = require('logger-sharelatex') - const requestDomain = domain.create() - requestDomain.add(req) - requestDomain.add(res) - requestDomain.on('error', err => { - try { - // request a shutdown to prevent memory leaks - this.beginShutdown() - if (!res.headerSent) { - res.send('uncaught exception').status(500) - } - req = { - body: req.body, - headers: req.headers, - url: req.url, - key: req.key, - statusCode: req.statusCode - } - err = { - message: err.message, - stack: err.stack, - name: err.name, - type: err.type, - arguments: err.arguments - } - rescueLogger.err( - { err, req, res }, - 'uncaught exception thrown on request' - ) - } catch (exception) { - rescueLogger.err( - { err: exception }, - 'exception in request domain handler' - ) - } - }) - - if (!this._appIsOk) { - // when shutting down, close any HTTP keep-alive connections - res.set('Connection', 'close') - } - - requestDomain.run(next) - } -} - -module.exports = ExceptionHandler diff --git a/services/filestore/test/acceptance/js/FilestoreApp.js b/services/filestore/test/acceptance/js/FilestoreApp.js index 32ec7a5adf..718d53bcf8 100644 --- a/services/filestore/test/acceptance/js/FilestoreApp.js +++ b/services/filestore/test/acceptance/js/FilestoreApp.js @@ -44,7 +44,6 @@ class FilestoreApp { resolve() } ) - this.app.exceptionHandler.server = this.server }) if (Settings.filestore.backend === 's3') { From e27cf4db7bcb846505935eae612cc80550919ec6 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Fri, 3 Jan 2020 17:42:28 +0000 Subject: [PATCH 382/555] Pass pipeline errors onto 'next' error handler --- services/filestore/app/js/FileController.js | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/services/filestore/app/js/FileController.js b/services/filestore/app/js/FileController.js index dbba9a93cc..726438a557 100644 --- a/services/filestore/app/js/FileController.js +++ b/services/filestore/app/js/FileController.js @@ -17,7 +17,7 @@ module.exports = { directorySize } -function getFile(req, res) { +function getFile(req, res, next) { const { key, bucket } = req const { format, style } = req.query const options = { @@ -61,7 +61,8 @@ function getFile(req, res) { } logger.log({ key, bucket, format, style }, 'sending file to response') - pipeline(fileStream, res) + + pipeline(fileStream, res, next) }) } From 473aea4e60437096dffe77750413563a0fd030fa Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Fri, 3 Jan 2020 18:21:21 +0000 Subject: [PATCH 383/555] Remove AWSSDKPersistorManager --- .../app/js/AWSSDKPersistorManager.js | 197 ------- .../unit/js/AWSSDKPersistorManagerTests.js | 509 ------------------ 2 files changed, 706 deletions(-) delete mode 100644 services/filestore/app/js/AWSSDKPersistorManager.js delete mode 100644 services/filestore/test/unit/js/AWSSDKPersistorManagerTests.js diff --git a/services/filestore/app/js/AWSSDKPersistorManager.js b/services/filestore/app/js/AWSSDKPersistorManager.js deleted file mode 100644 index 4dbc836280..0000000000 --- a/services/filestore/app/js/AWSSDKPersistorManager.js +++ /dev/null @@ -1,197 +0,0 @@ -/* eslint-disable - handle-callback-err, - no-return-assign, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -// This module is not used in production, which currently uses -// S3PersistorManager. The intention is to migrate S3PersistorManager to use the -// latest aws-sdk and delete this module so that PersistorManager would load the -// same backend for both the 's3' and 'aws-sdk' options. - -const logger = require('logger-sharelatex') -const aws = require('aws-sdk') -const _ = require('underscore') -const fs = require('fs') -const Errors = require('./Errors') - -const s3 = new aws.S3() - -module.exports = { - sendFile(bucketName, key, fsPath, callback) { - logger.log({ bucketName, key }, 'send file data to s3') - const stream = fs.createReadStream(fsPath) - return s3.upload({ Bucket: bucketName, Key: key, Body: stream }, function( - err, - data - ) { - if (err != null) { - logger.err( - { err, Bucket: bucketName, Key: key }, - 'error sending file data to s3' - ) - } - return callback(err) - }) - }, - - sendStream(bucketName, key, stream, callback) { - logger.log({ bucketName, key }, 'send file stream to s3') - return s3.upload({ Bucket: bucketName, Key: key, Body: stream }, function( - err, - data - ) { - if (err != null) { - logger.err( - { err, Bucket: bucketName, Key: key }, - 'error sending file stream to s3' - ) - } - return callback(err) - }) - }, - - getFileStream(bucketName, key, opts, callback) { - if (callback == null) { - callback = function(err, res) {} - } - logger.log({ bucketName, key }, 'get file stream from s3') - callback = _.once(callback) - const params = { - Bucket: bucketName, - Key: key - } - if (opts.start != null && opts.end != null) { - params.Range = `bytes=${opts.start}-${opts.end}` - } - const request = s3.getObject(params) - const stream = request.createReadStream() - stream.on('readable', () => callback(null, stream)) - return stream.on('error', function(err) { - logger.err({ err, bucketName, key }, 'error getting file stream from s3') - if (err.code === 'NoSuchKey') { - return callback( - new Errors.NotFoundError(`File not found in S3: ${bucketName}:${key}`) - ) - } - return callback(err) - }) - }, - - copyFile(bucketName, sourceKey, destKey, callback) { - logger.log({ bucketName, sourceKey, destKey }, 'copying file in s3') - const source = bucketName + '/' + sourceKey - return s3.copyObject( - { Bucket: bucketName, Key: destKey, CopySource: source }, - function(err) { - if (err != null) { - logger.err( - { err, bucketName, sourceKey, destKey }, - 'something went wrong copying file in s3' - ) - } - return callback(err) - } - ) - }, - - deleteFile(bucketName, key, callback) { - logger.log({ bucketName, key }, 'delete file in s3') - return s3.deleteObject({ Bucket: bucketName, Key: key }, function(err) { - if (err != null) { - logger.err( - { err, bucketName, key }, - 'something went wrong deleting file in s3' - ) - } - return callback(err) - }) - }, - - deleteDirectory(bucketName, key, callback) { - logger.log({ bucketName, key }, 'delete directory in s3') - return s3.listObjects({ Bucket: bucketName, Prefix: key }, function( - err, - data - ) { - if (err != null) { - logger.err( - { err, bucketName, key }, - 'something went wrong listing prefix in s3' - ) - return callback(err) - } - if (data.Contents.length === 0) { - logger.log({ bucketName, key }, 'the directory is empty') - return callback() - } - const keys = _.map(data.Contents, entry => ({ - Key: entry.Key - })) - return s3.deleteObjects( - { - Bucket: bucketName, - Delete: { - Objects: keys, - Quiet: true - } - }, - function(err) { - if (err != null) { - logger.err( - { err, bucketName, key: keys }, - 'something went wrong deleting directory in s3' - ) - } - return callback(err) - } - ) - }) - }, - - checkIfFileExists(bucketName, key, callback) { - logger.log({ bucketName, key }, 'check file existence in s3') - return s3.headObject({ Bucket: bucketName, Key: key }, function(err, data) { - if (err != null) { - if (err.code === 'NotFound') { - return callback(null, false) - } - logger.err( - { err, bucketName, key }, - 'something went wrong checking head in s3' - ) - return callback(err) - } - return callback(null, data.ETag != null) - }) - }, - - directorySize(bucketName, key, callback) { - logger.log({ bucketName, key }, 'get project size in s3') - return s3.listObjects({ Bucket: bucketName, Prefix: key }, function( - err, - data - ) { - if (err != null) { - logger.err( - { err, bucketName, key }, - 'something went wrong listing prefix in s3' - ) - return callback(err) - } - if (data.Contents.length === 0) { - logger.log({ bucketName, key }, 'the directory is empty') - return callback() - } - let totalSize = 0 - _.each(data.Contents, entry => (totalSize += entry.Size)) - return callback(null, totalSize) - }) - } -} diff --git a/services/filestore/test/unit/js/AWSSDKPersistorManagerTests.js b/services/filestore/test/unit/js/AWSSDKPersistorManagerTests.js deleted file mode 100644 index ea88da71c3..0000000000 --- a/services/filestore/test/unit/js/AWSSDKPersistorManagerTests.js +++ /dev/null @@ -1,509 +0,0 @@ -/* eslint-disable - handle-callback-err, - no-dupe-keys, - no-return-assign, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const sinon = require('sinon') -const chai = require('chai') - -const should = chai.should() -const { expect } = chai - -const modulePath = '../../../app/js/AWSSDKPersistorManager.js' -const SandboxedModule = require('sandboxed-module') - -describe('AWSSDKPersistorManager', function() { - beforeEach(function() { - this.settings = { - filestore: { - backend: 'aws-sdk' - } - } - this.s3 = { - upload: sinon.stub(), - getObject: sinon.stub(), - copyObject: sinon.stub(), - deleteObject: sinon.stub(), - listObjects: sinon.stub(), - deleteObjects: sinon.stub(), - headObject: sinon.stub() - } - this.awssdk = { S3: sinon.stub().returns(this.s3) } - - this.requires = { - 'aws-sdk': this.awssdk, - 'settings-sharelatex': this.settings, - 'logger-sharelatex': { - log() {}, - err() {} - }, - fs: (this.fs = { createReadStream: sinon.stub() }), - './Errors': (this.Errors = { NotFoundError: sinon.stub() }) - } - this.key = 'my/key' - this.bucketName = 'my-bucket' - this.error = 'my error' - return (this.AWSSDKPersistorManager = SandboxedModule.require(modulePath, { - requires: this.requires - })) - }) - - describe('sendFile', function() { - beforeEach(function() { - this.stream = {} - this.fsPath = '/usr/local/some/file' - return this.fs.createReadStream.returns(this.stream) - }) - - it('should put the file with s3.upload', function(done) { - this.s3.upload.callsArgWith(1) - return this.AWSSDKPersistorManager.sendFile( - this.bucketName, - this.key, - this.fsPath, - err => { - expect(err).to.not.be.ok - expect(this.s3.upload.calledOnce, 'called only once').to.be.true - expect( - this.s3.upload.calledWith({ - Bucket: this.bucketName, - Key: this.key, - Body: this.stream - }), - 'called with correct arguments' - ).to.be.true - return done() - } - ) - }) - - return it('should dispatch the error from s3.upload', function(done) { - this.s3.upload.callsArgWith(1, this.error) - return this.AWSSDKPersistorManager.sendFile( - this.bucketName, - this.key, - this.fsPath, - err => { - expect(err).to.equal(this.error) - return done() - } - ) - }) - }) - - describe('sendStream', function() { - beforeEach(function() { - return (this.stream = {}) - }) - - it('should put the file with s3.upload', function(done) { - this.s3.upload.callsArgWith(1) - return this.AWSSDKPersistorManager.sendStream( - this.bucketName, - this.key, - this.stream, - err => { - expect(err).to.not.be.ok - expect(this.s3.upload.calledOnce, 'called only once').to.be.true - expect( - this.s3.upload.calledWith({ - Bucket: this.bucketName, - Key: this.key, - Body: this.stream - }), - 'called with correct arguments' - ).to.be.true - return done() - } - ) - }) - - return it('should dispatch the error from s3.upload', function(done) { - this.s3.upload.callsArgWith(1, this.error) - return this.AWSSDKPersistorManager.sendStream( - this.bucketName, - this.key, - this.stream, - err => { - expect(err).to.equal(this.error) - return done() - } - ) - }) - }) - - describe('getFileStream', function() { - beforeEach(function() { - this.opts = {} - this.stream = {} - this.read_stream = { on: (this.read_stream_on = sinon.stub()) } - this.object = { createReadStream: sinon.stub().returns(this.read_stream) } - return this.s3.getObject.returns(this.object) - }) - - it('should return a stream from s3.getObject', function(done) { - this.read_stream_on.withArgs('readable').callsArgWith(1) - - return this.AWSSDKPersistorManager.getFileStream( - this.bucketName, - this.key, - this.opts, - (err, stream) => { - expect(this.read_stream_on.calledTwice) - expect(err).to.not.be.ok - expect(stream, 'returned the stream').to.equal(this.read_stream) - expect( - this.s3.getObject.calledWith({ - Bucket: this.bucketName, - Key: this.key - }), - 'called with correct arguments' - ).to.be.true - return done() - } - ) - }) - - describe('with start and end options', function() { - beforeEach(function() { - return (this.opts = { - start: 0, - end: 8 - }) - }) - return it('should pass headers to the s3.GetObject', function(done) { - this.read_stream_on.withArgs('readable').callsArgWith(1) - this.AWSSDKPersistorManager.getFileStream( - this.bucketName, - this.key, - this.opts, - (err, stream) => { - return expect( - this.s3.getObject.calledWith({ - Bucket: this.bucketName, - Key: this.key, - Range: 'bytes=0-8' - }), - 'called with correct arguments' - ).to.be.true - } - ) - return done() - }) - }) - - return describe('error conditions', function() { - describe("when the file doesn't exist", function() { - beforeEach(function() { - this.error = new Error() - return (this.error.code = 'NoSuchKey') - }) - return it('should produce a NotFoundError', function(done) { - this.read_stream_on.withArgs('error').callsArgWith(1, this.error) - return this.AWSSDKPersistorManager.getFileStream( - this.bucketName, - this.key, - this.opts, - (err, stream) => { - expect(stream).to.not.be.ok - expect(err).to.be.ok - expect( - err instanceof this.Errors.NotFoundError, - 'error is a correct instance' - ).to.equal(true) - return done() - } - ) - }) - }) - - return describe('when there is some other error', function() { - beforeEach(function() { - return (this.error = new Error()) - }) - return it('should dispatch the error from s3 object stream', function(done) { - this.read_stream_on.withArgs('error').callsArgWith(1, this.error) - return this.AWSSDKPersistorManager.getFileStream( - this.bucketName, - this.key, - this.opts, - (err, stream) => { - expect(stream).to.not.be.ok - expect(err).to.be.ok - expect(err).to.equal(this.error) - return done() - } - ) - }) - }) - }) - }) - - describe('copyFile', function() { - beforeEach(function() { - this.destKey = 'some/key' - return (this.stream = {}) - }) - - it('should copy the file with s3.copyObject', function(done) { - this.s3.copyObject.callsArgWith(1) - return this.AWSSDKPersistorManager.copyFile( - this.bucketName, - this.key, - this.destKey, - err => { - expect(err).to.not.be.ok - expect(this.s3.copyObject.calledOnce, 'called only once').to.be.true - expect( - this.s3.copyObject.calledWith({ - Bucket: this.bucketName, - Key: this.destKey, - CopySource: this.bucketName + '/' + this.key - }), - 'called with correct arguments' - ).to.be.true - return done() - } - ) - }) - - return it('should dispatch the error from s3.copyObject', function(done) { - this.s3.copyObject.callsArgWith(1, this.error) - return this.AWSSDKPersistorManager.copyFile( - this.bucketName, - this.key, - this.destKey, - err => { - expect(err).to.equal(this.error) - return done() - } - ) - }) - }) - - describe('deleteFile', function() { - it('should delete the file with s3.deleteObject', function(done) { - this.s3.deleteObject.callsArgWith(1) - return this.AWSSDKPersistorManager.deleteFile( - this.bucketName, - this.key, - err => { - expect(err).to.not.be.ok - expect(this.s3.deleteObject.calledOnce, 'called only once').to.be.true - expect( - this.s3.deleteObject.calledWith({ - Bucket: this.bucketName, - Key: this.key - }), - 'called with correct arguments' - ).to.be.true - return done() - } - ) - }) - - return it('should dispatch the error from s3.deleteObject', function(done) { - this.s3.deleteObject.callsArgWith(1, this.error) - return this.AWSSDKPersistorManager.deleteFile( - this.bucketName, - this.key, - err => { - expect(err).to.equal(this.error) - return done() - } - ) - }) - }) - - describe('deleteDirectory', function() { - it('should list the directory content using s3.listObjects', function(done) { - this.s3.listObjects.callsArgWith(1, null, { Contents: [] }) - return this.AWSSDKPersistorManager.deleteDirectory( - this.bucketName, - this.key, - err => { - expect(err).to.not.be.ok - expect(this.s3.listObjects.calledOnce, 'called only once').to.be.true - expect( - this.s3.listObjects.calledWith({ - Bucket: this.bucketName, - Prefix: this.key - }), - 'called with correct arguments' - ).to.be.true - return done() - } - ) - }) - - it('should dispatch the error from s3.listObjects', function(done) { - this.s3.listObjects.callsArgWith(1, this.error) - return this.AWSSDKPersistorManager.deleteDirectory( - this.bucketName, - this.key, - err => { - expect(err).to.equal(this.error) - return done() - } - ) - }) - - return describe('with directory content', function() { - beforeEach(function() { - return (this.fileList = [{ Key: 'foo' }, { Key: 'bar', Key: 'baz' }]) - }) - - it('should forward the file keys to s3.deleteObjects', function(done) { - this.s3.listObjects.callsArgWith(1, null, { Contents: this.fileList }) - this.s3.deleteObjects.callsArgWith(1) - return this.AWSSDKPersistorManager.deleteDirectory( - this.bucketName, - this.key, - err => { - expect(err).to.not.be.ok - expect(this.s3.deleteObjects.calledOnce, 'called only once').to.be - .true - expect( - this.s3.deleteObjects.calledWith({ - Bucket: this.bucketName, - Delete: { - Quiet: true, - Objects: this.fileList - } - }), - 'called with correct arguments' - ).to.be.true - return done() - } - ) - }) - - return it('should dispatch the error from s3.deleteObjects', function(done) { - this.s3.listObjects.callsArgWith(1, null, { Contents: this.fileList }) - this.s3.deleteObjects.callsArgWith(1, this.error) - return this.AWSSDKPersistorManager.deleteDirectory( - this.bucketName, - this.key, - err => { - expect(err).to.equal(this.error) - return done() - } - ) - }) - }) - }) - - describe('checkIfFileExists', function() { - it('should check for the file with s3.headObject', function(done) { - this.s3.headObject.callsArgWith(1, null, {}) - return this.AWSSDKPersistorManager.checkIfFileExists( - this.bucketName, - this.key, - (err, exists) => { - expect(err).to.not.be.ok - expect(this.s3.headObject.calledOnce, 'called only once').to.be.true - expect( - this.s3.headObject.calledWith({ - Bucket: this.bucketName, - Key: this.key - }), - 'called with correct arguments' - ).to.be.true - return done() - } - ) - }) - - it('should return false on an inexistant file', function(done) { - this.s3.headObject.callsArgWith(1, null, {}) - return this.AWSSDKPersistorManager.checkIfFileExists( - this.bucketName, - this.key, - (err, exists) => { - expect(exists).to.be.false - return done() - } - ) - }) - - it('should return true on an existing file', function(done) { - this.s3.headObject.callsArgWith(1, null, { ETag: 'etag' }) - return this.AWSSDKPersistorManager.checkIfFileExists( - this.bucketName, - this.key, - (err, exists) => { - expect(exists).to.be.true - return done() - } - ) - }) - - return it('should dispatch the error from s3.headObject', function(done) { - this.s3.headObject.callsArgWith(1, this.error) - return this.AWSSDKPersistorManager.checkIfFileExists( - this.bucketName, - this.key, - (err, exists) => { - expect(err).to.equal(this.error) - return done() - } - ) - }) - }) - - return describe('directorySize', function() { - it('should list the directory content using s3.listObjects', function(done) { - this.s3.listObjects.callsArgWith(1, null, { Contents: [] }) - return this.AWSSDKPersistorManager.directorySize( - this.bucketName, - this.key, - err => { - expect(err).to.not.be.ok - expect(this.s3.listObjects.calledOnce, 'called only once').to.be.true - expect( - this.s3.listObjects.calledWith({ - Bucket: this.bucketName, - Prefix: this.key - }), - 'called with correct arguments' - ).to.be.true - return done() - } - ) - }) - - it('should dispatch the error from s3.listObjects', function(done) { - this.s3.listObjects.callsArgWith(1, this.error) - return this.AWSSDKPersistorManager.directorySize( - this.bucketName, - this.key, - err => { - expect(err).to.equal(this.error) - return done() - } - ) - }) - - return it('should sum directory files sizes', function(done) { - this.s3.listObjects.callsArgWith(1, null, { - Contents: [{ Size: 1024 }, { Size: 2048 }] - }) - return this.AWSSDKPersistorManager.directorySize( - this.bucketName, - this.key, - (err, size) => { - expect(size).to.equal(3072) - return done() - } - ) - }) - }) -}) From 2ca74fdf15563092db8d2a1c45fe9717dbb45df1 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Fri, 3 Jan 2020 18:22:08 +0000 Subject: [PATCH 384/555] Cleanup and refactor S3PersistorManager to use aws-sdk only --- .../filestore/app/js/S3PersistorManager.js | 588 ++++----- services/filestore/npm-shrinkwrap.json | 34 +- services/filestore/package.json | 1 + .../test/unit/js/S3PersistorManagerTests.js | 1152 +++++++++-------- 4 files changed, 896 insertions(+), 879 deletions(-) diff --git a/services/filestore/app/js/S3PersistorManager.js b/services/filestore/app/js/S3PersistorManager.js index cadf38172a..5f8bb34c5d 100644 --- a/services/filestore/app/js/S3PersistorManager.js +++ b/services/filestore/app/js/S3PersistorManager.js @@ -1,376 +1,258 @@ -/* eslint-disable - handle-callback-err, - new-cap, - no-return-assign, - no-unused-vars, - node/no-deprecated-api, - standard/no-callback-literal, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS101: Remove unnecessary use of Array.from - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -// This module is the one which is used in production. It needs to be migrated -// to use aws-sdk throughout, see the comments in AWSSDKPersistorManager for -// details. The knox library is unmaintained and has bugs. - const http = require('http') -http.globalAgent.maxSockets = 300 const https = require('https') +http.globalAgent.maxSockets = 300 https.globalAgent.maxSockets = 300 + const settings = require('settings-sharelatex') -const request = require('request') const logger = require('logger-sharelatex') const metrics = require('metrics-sharelatex') + +const meter = require('stream-meter') const fs = require('fs') -const knox = require('knox') -const path = require('path') -const LocalFileWriter = require('./LocalFileWriter') -const Errors = require('./Errors') -const _ = require('underscore') -const awsS3 = require('aws-sdk/clients/s3') -const URL = require('url') +const S3 = require('aws-sdk/clients/s3') +const { URL } = require('url') +const { callbackify } = require('util') +const { WriteError, ReadError, NotFoundError } = require('./Errors') -const thirtySeconds = 30 * 1000 - -const buildDefaultOptions = function(bucketName, method, key) { - let endpoint - if (settings.filestore.s3.endpoint) { - endpoint = `${settings.filestore.s3.endpoint}/${bucketName}` - } else { - endpoint = `https://${bucketName}.s3.amazonaws.com` - } - return { - aws: { - key: settings.filestore.s3.key, - secret: settings.filestore.s3.secret, - bucket: bucketName - }, - method, - timeout: thirtySeconds, - uri: `${endpoint}/${key}` +module.exports = { + sendFile: callbackify(sendFile), + sendStream: callbackify(sendStream), + getFileStream: callbackify(getFileStream), + deleteDirectory: callbackify(deleteDirectory), + getFileSize: callbackify(getFileSize), + deleteFile: callbackify(deleteFile), + copyFile: callbackify(copyFile), + checkIfFileExists: callbackify(checkIfFileExists), + getDirectorySize: callbackify(getDirectorySize), + promises: { + sendFile, + sendStream, + getFileStream, + deleteDirectory, + getFileSize, + deleteFile, + copyFile, + checkIfFileExists, + getDirectorySize } } -const getS3Options = function(credentials) { +const _client = new S3(_defaultOptions()) + +async function sendFile(bucketName, key, fsPath) { + let readStream + try { + readStream = fs.createReadStream(fsPath) + } catch (err) { + throw _wrapError( + err, + 'error reading file from disk', + { bucketName, key, fsPath }, + ReadError + ) + } + return sendStream(bucketName, key, readStream) +} + +async function sendStream(bucketName, key, readStream) { + try { + const meteredStream = meter() + meteredStream.on('finish', () => { + metrics.count('s3.egress', meteredStream.bytes) + }) + + const response = await _client + .upload({ + Bucket: bucketName, + Key: key, + Body: readStream.pipe(meteredStream) + }) + .promise() + + logger.log({ response, bucketName, key }, 'data uploaded to s3') + } catch (err) { + throw _wrapError( + err, + 'upload to S3 failed', + { bucketName, key }, + WriteError + ) + } +} + +async function getFileStream(bucketName, key, opts) { + opts = opts || {} + + const params = { + Bucket: bucketName, + Key: key + } + if (opts.start != null && opts.end != null) { + params.Range = `bytes=${opts.start}-${opts.end}` + } + + return new Promise((resolve, reject) => { + const stream = _client.getObject(params).createReadStream() + + const meteredStream = meter() + meteredStream.on('finish', () => { + metrics.count('s3.ingress', meteredStream.bytes) + }) + + const onStreamReady = function() { + stream.removeListener('readable', onStreamReady) + resolve(stream.pipe(meteredStream)) + } + stream.on('readable', onStreamReady) + stream.on('error', err => { + reject(_wrapError(err, 'error reading from S3', params, ReadError)) + }) + }) +} + +async function deleteDirectory(bucketName, key) { + logger.log({ key, bucketName }, 'deleting directory') + let response + + try { + response = await _client + .listObjects({ Bucket: bucketName, Prefix: key }) + .promise() + } catch (err) { + throw _wrapError( + err, + 'failed to list objects in S3', + { bucketName, key }, + ReadError + ) + } + + const objects = response.Contents.map(item => ({ Key: item.Key })) + if (objects.length) { + try { + await _client + .deleteObjects({ + Bucket: bucketName, + Delete: { + Objects: objects, + Quiet: true + } + }) + .promise() + } catch (err) { + throw _wrapError( + err, + 'failed to delete objects in S3', + { bucketName, key }, + WriteError + ) + } + } +} + +async function getFileSize(bucketName, key) { + try { + const response = await _client + .headObject({ Bucket: bucketName, Key: key }) + .promise() + return response.ContentLength + } catch (err) { + throw _wrapError( + err, + 'error getting size of s3 object', + { bucketName, key }, + ReadError + ) + } +} + +async function deleteFile(bucketName, key) { + try { + await _client.deleteObject({ Bucket: bucketName, Key: key }).promise() + } catch (err) { + throw _wrapError( + err, + 'failed to delete file in S3', + { bucketName, key }, + WriteError + ) + } +} + +async function copyFile(bucketName, sourceKey, destKey) { + const params = { + Bucket: bucketName, + Key: destKey, + CopySource: `${bucketName}/${sourceKey}` + } + try { + await _client.copyObject(params).promise() + } catch (err) { + throw _wrapError(err, 'failed to copy file in S3', params, WriteError) + } +} + +async function checkIfFileExists(bucketName, key) { + try { + await getFileSize(bucketName, key) + return true + } catch (err) { + if (err instanceof NotFoundError) { + return false + } + throw _wrapError( + err, + 'error checking whether S3 object exists', + { bucketName, key }, + ReadError + ) + } +} + +async function getDirectorySize(bucketName, key) { + try { + const response = await _client + .listObjects({ Bucket: bucketName, Prefix: key }) + .promise() + + return response.Contents.reduce((acc, item) => item.Size + acc, 0) + } catch (err) { + throw _wrapError( + err, + 'error getting directory size in S3', + { bucketName, key }, + ReadError + ) + } +} + +function _wrapError(error, message, params, ErrorType) { + if (['NoSuchKey', 'NotFound', 'ENOENT'].includes(error.code)) { + return new NotFoundError({ + message: 'no such file', + info: params + }).withCause(error) + } else { + return new ErrorType({ + message: message, + info: params + }).withCause(error) + } +} + +function _defaultOptions() { const options = { credentials: { - accessKeyId: credentials.auth_key, - secretAccessKey: credentials.auth_secret + accessKeyId: settings.filestore.s3.key, + secretAccessKey: settings.filestore.s3.secret } } if (settings.filestore.s3.endpoint) { - const endpoint = URL.parse(settings.filestore.s3.endpoint) + const endpoint = new URL(settings.filestore.s3.endpoint) options.endpoint = settings.filestore.s3.endpoint options.sslEnabled = endpoint.protocol === 'https' } return options } - -const defaultS3Client = new awsS3( - getS3Options({ - auth_key: settings.filestore.s3.key, - auth_secret: settings.filestore.s3.secret - }) -) - -const getS3Client = function(credentials) { - if (credentials != null) { - return new awsS3(getS3Options(credentials)) - } else { - return defaultS3Client - } -} - -const getKnoxClient = bucketName => { - const options = { - key: settings.filestore.s3.key, - secret: settings.filestore.s3.secret, - bucket: bucketName - } - if (settings.filestore.s3.endpoint) { - const endpoint = URL.parse(settings.filestore.s3.endpoint) - options.endpoint = endpoint.hostname - options.port = endpoint.port - } - return knox.createClient(options) -} - -module.exports = { - sendFile(bucketName, key, fsPath, callback) { - const s3Client = getKnoxClient(bucketName) - let uploaded = 0 - const putEventEmiter = s3Client.putFile(fsPath, key, function(err, res) { - metrics.count('s3.egress', uploaded) - if (err != null) { - logger.err( - { err, bucketName, key, fsPath }, - 'something went wrong uploading file to s3' - ) - return callback(err) - } - if (res == null) { - logger.err( - { err, res, bucketName, key, fsPath }, - 'no response from s3 put file' - ) - return callback('no response from put file') - } - if (res.statusCode !== 200) { - logger.err( - { bucketName, key, fsPath }, - 'non 200 response from s3 putting file' - ) - return callback('non 200 response from s3 on put file') - } - logger.log({ res, bucketName, key, fsPath }, 'file uploaded to s3') - return callback(err) - }) - putEventEmiter.on('error', function(err) { - logger.err( - { err, bucketName, key, fsPath }, - 'error emmited on put of file' - ) - return callback(err) - }) - return putEventEmiter.on( - 'progress', - progress => (uploaded = progress.written) - ) - }, - - sendStream(bucketName, key, readStream, callback) { - logger.log({ bucketName, key }, 'sending file to s3') - readStream.on('error', err => - logger.err({ bucketName, key }, 'error on stream to send to s3') - ) - return LocalFileWriter.writeStream(readStream, null, (err, fsPath) => { - if (err != null) { - logger.err( - { bucketName, key, fsPath, err }, - 'something went wrong writing stream to disk' - ) - return callback(err) - } - return this.sendFile(bucketName, key, fsPath, ( - err // delete the temporary file created above and return the original error - ) => LocalFileWriter.deleteFile(fsPath, () => callback(err))) - }) - }, - - // opts may be {start: Number, end: Number} - getFileStream(bucketName, key, opts, callback) { - if (callback == null) { - callback = function(err, res) {} - } - opts = opts || {} - callback = _.once(callback) - logger.log({ bucketName, key }, 'getting file from s3') - - const s3 = getS3Client(opts.credentials) - const s3Params = { - Bucket: bucketName, - Key: key - } - if (opts.start != null && opts.end != null) { - s3Params.Range = `bytes=${opts.start}-${opts.end}` - } - const s3Request = s3.getObject(s3Params) - - s3Request.on( - 'httpHeaders', - (statusCode, headers, response, statusMessage) => { - if ([403, 404].includes(statusCode)) { - // S3 returns a 403 instead of a 404 when the user doesn't have - // permission to list the bucket contents. - logger.log({ bucketName, key }, 'file not found in s3') - return callback( - new Errors.NotFoundError( - `File not found in S3: ${bucketName}:${key}` - ), - null - ) - } - if (![200, 206].includes(statusCode)) { - logger.log( - { bucketName, key }, - `error getting file from s3: ${statusCode}` - ) - return callback( - new Error( - `Got non-200 response from S3: ${statusCode} ${statusMessage}` - ), - null - ) - } - const stream = response.httpResponse.createUnbufferedStream() - stream.on('data', data => metrics.count('s3.ingress', data.byteLength)) - - return callback(null, stream) - } - ) - - s3Request.on('error', err => { - logger.err({ err, bucketName, key }, 'error getting file stream from s3') - return callback(err) - }) - - return s3Request.send() - }, - - getFileSize(bucketName, key, callback) { - logger.log({ bucketName, key }, 'getting file size from S3') - const s3 = getS3Client() - return s3.headObject({ Bucket: bucketName, Key: key }, function(err, data) { - if (err != null) { - if ([403, 404].includes(err.statusCode)) { - // S3 returns a 403 instead of a 404 when the user doesn't have - // permission to list the bucket contents. - logger.log( - { - bucketName, - key - }, - 'file not found in s3' - ) - callback( - new Errors.NotFoundError( - `File not found in S3: ${bucketName}:${key}` - ) - ) - } else { - logger.err( - { - bucketName, - key, - err - }, - 'error performing S3 HeadObject' - ) - callback(err) - } - return - } - return callback(null, data.ContentLength) - }) - }, - - copyFile(bucketName, sourceKey, destKey, callback) { - logger.log({ bucketName, sourceKey, destKey }, 'copying file in s3') - const source = bucketName + '/' + sourceKey - // use the AWS SDK instead of knox due to problems with error handling (https://github.com/Automattic/knox/issues/114) - const s3 = getS3Client() - return s3.copyObject( - { Bucket: bucketName, Key: destKey, CopySource: source }, - function(err) { - if (err != null) { - if (err.code === 'NoSuchKey') { - logger.err( - { bucketName, sourceKey }, - 'original file not found in s3 when copying' - ) - return callback( - new Errors.NotFoundError( - 'original file not found in S3 when copying' - ) - ) - } else { - logger.err( - { err, bucketName, sourceKey, destKey }, - 'something went wrong copying file in aws' - ) - return callback(err) - } - } else { - return callback() - } - } - ) - }, - - deleteFile(bucketName, key, callback) { - logger.log({ bucketName, key }, 'delete file in s3') - const options = buildDefaultOptions(bucketName, 'delete', key) - return request(options, function(err, res) { - if (err != null) { - logger.err( - { err, res, bucketName, key }, - 'something went wrong deleting file in aws' - ) - } - return callback(err) - }) - }, - - deleteDirectory(bucketName, key, _callback) { - // deleteMultiple can call the callback multiple times so protect against this. - const callback = function(...args) { - _callback(...Array.from(args || [])) - return (_callback = function() {}) - } - - logger.log({ key, bucketName }, 'deleting directory') - const s3Client = getKnoxClient(bucketName) - return s3Client.list({ prefix: key }, function(err, data) { - if (err != null) { - logger.err( - { err, bucketName, key }, - 'something went wrong listing prefix in aws' - ) - return callback(err) - } - const keys = _.map(data.Contents, entry => entry.Key) - return s3Client.deleteMultiple(keys, callback) - }) - }, - - checkIfFileExists(bucketName, key, callback) { - logger.log({ bucketName, key }, 'checking if file exists in s3') - const options = buildDefaultOptions(bucketName, 'head', key) - return request(options, function(err, res) { - if (err != null) { - logger.err( - { err, res, bucketName, key }, - 'something went wrong checking file in aws' - ) - return callback(err) - } - if (res == null) { - logger.err( - { err, res, bucketName, key }, - 'no response object returned when checking if file exists' - ) - err = new Error(`no response from s3 ${bucketName} ${key}`) - return callback(err) - } - const exists = res.statusCode === 200 - logger.log({ bucketName, key, exists }, 'checked if file exsists in s3') - return callback(err, exists) - }) - }, - - directorySize(bucketName, key, callback) { - logger.log({ bucketName, key }, 'get project size in s3') - const s3Client = getKnoxClient(bucketName) - return s3Client.list({ prefix: key }, function(err, data) { - if (err != null) { - logger.err( - { err, bucketName, key }, - 'something went wrong listing prefix in aws' - ) - return callback(err) - } - let totalSize = 0 - _.each(data.Contents, entry => (totalSize += entry.Size)) - logger.log({ totalSize }, 'total size') - return callback(null, totalSize) - }) - } -} diff --git a/services/filestore/npm-shrinkwrap.json b/services/filestore/npm-shrinkwrap.json index 3ed1400a61..8d78271caa 100644 --- a/services/filestore/npm-shrinkwrap.json +++ b/services/filestore/npm-shrinkwrap.json @@ -5018,6 +5018,38 @@ "resolved": "https://registry.npmjs.org/stream-counter/-/stream-counter-1.0.0.tgz", "integrity": "sha1-kc8lac5NxQYf6816yyY5SloRR1E=" }, + "stream-meter": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/stream-meter/-/stream-meter-1.0.4.tgz", + "integrity": "sha1-Uq+Vql6nYKJJFxZwTb/5D3Ov3R0=", + "requires": { + "readable-stream": "^2.1.4" + }, + "dependencies": { + "readable-stream": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", + "integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "requires": { + "safe-buffer": "~5.1.0" + } + } + } + }, "stream-shift": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.0.tgz", @@ -5531,7 +5563,7 @@ "xml2js": { "version": "0.4.19", "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.19.tgz", - "integrity": "sha1-aGwg8hMgnpSr8NG88e+qKRx4J6c=", + "integrity": "sha512-esZnJZJOiJR9wWKMyuvSE1y6Dq5LCuJanqhxslH2bxM6duahNZ+HMpCLhBQGZkbX6xRf8x1Y2eJlgt2q3qo49Q==", "requires": { "sax": ">=0.6.0", "xmlbuilder": "~9.0.1" diff --git a/services/filestore/package.json b/services/filestore/package.json index 5d7c3e3ec1..14e35cd8a2 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -44,6 +44,7 @@ "settings-sharelatex": "^1.1.0", "stream-browserify": "^2.0.1", "stream-buffers": "~0.2.5", + "stream-meter": "^1.0.4", "underscore": "~1.5.2" }, "devDependencies": { diff --git a/services/filestore/test/unit/js/S3PersistorManagerTests.js b/services/filestore/test/unit/js/S3PersistorManagerTests.js index 777c9c5a32..3caf2db73d 100644 --- a/services/filestore/test/unit/js/S3PersistorManagerTests.js +++ b/services/filestore/test/unit/js/S3PersistorManagerTests.js @@ -1,618 +1,720 @@ -/* eslint-disable - handle-callback-err, - no-return-assign, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const { assert } = require('chai') const sinon = require('sinon') const chai = require('chai') -const should = chai.should() const { expect } = chai const modulePath = '../../../app/js/S3PersistorManager.js' const SandboxedModule = require('sandboxed-module') +const Errors = require('../../../app/js/Errors') + describe('S3PersistorManagerTests', function() { - beforeEach(function() { - this.settings = { - filestore: { - backend: 's3', - s3: { - secret: 'secret', - key: 'this_key' - }, - stores: { - user_files: 'sl_user_files' - } - } - } - this.knoxClient = { - putFile: sinon.stub(), - copyFile: sinon.stub(), - list: sinon.stub(), - deleteMultiple: sinon.stub(), - get: sinon.stub() - } - this.knox = { createClient: sinon.stub().returns(this.knoxClient) } - this.s3EventHandlers = {} - this.s3Request = { - on: sinon.stub().callsFake((event, callback) => { - return (this.s3EventHandlers[event] = callback) - }), - send: sinon.stub() - } - this.s3Response = { - httpResponse: { - createUnbufferedStream: sinon.stub() - } - } - this.s3Client = { - copyObject: sinon.stub(), - headObject: sinon.stub(), - getObject: sinon.stub().returns(this.s3Request) - } - this.awsS3 = sinon.stub().returns(this.s3Client) - this.LocalFileWriter = { - writeStream: sinon.stub(), - deleteFile: sinon.stub() - } - this.request = sinon.stub() - this.requires = { - knox: this.knox, - 'aws-sdk/clients/s3': this.awsS3, - 'settings-sharelatex': this.settings, - './LocalFileWriter': this.LocalFileWriter, - 'logger-sharelatex': { - log() {}, - err() {} + const settings = { + filestore: { + backend: 's3', + s3: { + secret: 'secret', + key: 'this_key' }, - request: this.request, - './Errors': (this.Errors = { NotFoundError: sinon.stub() }) + stores: { + user_files: 'sl_user_files' + } } - this.key = 'my/key' - this.bucketName = 'my-bucket' - this.error = 'my errror' - return (this.S3PersistorManager = SandboxedModule.require(modulePath, { - requires: this.requires - })) + } + const filename = '/wombat/potato.tex' + const bucket = 'womBucket' + const key = 'monKey' + const destKey = 'donKey' + const objectSize = 5555 + const genericError = new Error('guru meditation error') + const files = [ + { Key: 'llama', Size: 11 }, + { Key: 'hippo', Size: 22 } + ] + const filesSize = 33 + + let Metrics, + S3, + Fs, + Meter, + MeteredStream, + ReadStream, + S3PersistorManager, + S3Client, + S3ReadStream, + S3NotFoundError, + FileNotFoundError, + EmptyPromise + + beforeEach(function() { + EmptyPromise = { + promise: sinon.stub().resolves() + } + + Metrics = { + count: sinon.stub() + } + + ReadStream = { + pipe: sinon.stub().returns('readStream') + } + + FileNotFoundError = new Error('File not found') + FileNotFoundError.code = 'ENOENT' + + Fs = { + createReadStream: sinon.stub().returns(ReadStream) + } + + MeteredStream = { + on: sinon.stub(), + bytes: objectSize + } + MeteredStream.on.withArgs('finish').yields() + Meter = sinon.stub().returns(MeteredStream) + + S3NotFoundError = new Error('not found') + S3NotFoundError.code = 'NoSuchKey' + + S3ReadStream = { + on: sinon.stub(), + pipe: sinon.stub().returns('s3Stream'), + removeListener: sinon.stub() + } + S3ReadStream.on.withArgs('readable').yields() + S3Client = { + getObject: sinon.stub().returns({ + createReadStream: sinon.stub().returns(S3ReadStream) + }), + headObject: sinon.stub().returns({ + promise: sinon.stub().resolves({ + ContentLength: objectSize + }) + }), + listObjects: sinon.stub().returns({ + promise: sinon.stub().resolves({ + Contents: files + }) + }), + upload: sinon.stub().returns(EmptyPromise), + copyObject: sinon.stub().returns(EmptyPromise), + deleteObject: sinon.stub().returns(EmptyPromise), + deleteObjects: sinon.stub().returns(EmptyPromise) + } + S3 = sinon.stub().returns(S3Client) + + S3PersistorManager = SandboxedModule.require(modulePath, { + requires: { + 'aws-sdk/clients/s3': S3, + 'settings-sharelatex': settings, + './Errors': Errors, + fs: Fs, + 'stream-meter': Meter, + 'logger-sharelatex': { + log() {}, + err() {} + }, + 'metrics-sharelatex': Metrics + }, + globals: { console } + }) }) describe('getFileStream', function() { - describe('success', function() { - beforeEach(function() { - this.expectedStream = { expectedStream: true } - this.expectedStream.on = sinon.stub() - this.s3Request.send.callsFake(() => { - return this.s3EventHandlers.httpHeaders( - 200, - {}, - this.s3Response, - 'OK' - ) - }) - return this.s3Response.httpResponse.createUnbufferedStream.returns( - this.expectedStream - ) + describe('when called with valid parameters', function() { + let stream + + beforeEach(async function() { + stream = await S3PersistorManager.promises.getFileStream(bucket, key) }) - it('returns a stream', function(done) { - return this.S3PersistorManager.getFileStream( - this.bucketName, - this.key, - {}, - (err, stream) => { - if (err != null) { - return done(err) - } - expect(stream).to.equal(this.expectedStream) - return done() - } - ) + it('returns a stream', function() { + expect(stream).to.equal('s3Stream') }) - it('sets the AWS client up with credentials from settings', function(done) { - return this.S3PersistorManager.getFileStream( - this.bucketName, - this.key, - {}, - (err, stream) => { - if (err != null) { - return done(err) - } - expect(this.awsS3.lastCall.args).to.deep.equal([ - { - credentials: { - accessKeyId: this.settings.filestore.s3.key, - secretAccessKey: this.settings.filestore.s3.secret - } - } - ]) - return done() - } - ) - }) - - it('fetches the right key from the right bucket', function(done) { - return this.S3PersistorManager.getFileStream( - this.bucketName, - this.key, - {}, - (err, stream) => { - if (err != null) { - return done(err) - } - expect(this.s3Client.getObject.lastCall.args).to.deep.equal([ - { - Bucket: this.bucketName, - Key: this.key - } - ]) - return done() - } - ) - }) - - it('accepts alternative credentials', function(done) { - const accessKeyId = 'that_key' - const secret = 'that_secret' - const opts = { + it('sets the AWS client up with credentials from settings', function() { + expect(S3).to.have.been.calledWith({ credentials: { - auth_key: accessKeyId, - auth_secret: secret + accessKeyId: settings.filestore.s3.key, + secretAccessKey: settings.filestore.s3.secret } - } - return this.S3PersistorManager.getFileStream( - this.bucketName, - this.key, - opts, - (err, stream) => { - if (err != null) { - return done(err) - } - expect(this.awsS3.lastCall.args).to.deep.equal([ - { - credentials: { - accessKeyId, - secretAccessKey: secret - } - } - ]) - expect(stream).to.equal(this.expectedStream) - return done() - } - ) + }) }) - return it('accepts byte range', function(done) { - const start = 0 - const end = 8 - const opts = { start, end } - return this.S3PersistorManager.getFileStream( - this.bucketName, - this.key, - opts, - (err, stream) => { - if (err != null) { - return done(err) - } - expect(this.s3Client.getObject.lastCall.args).to.deep.equal([ - { - Bucket: this.bucketName, - Key: this.key, - Range: `bytes=${start}-${end}` - } - ]) - expect(stream).to.equal(this.expectedStream) - return done() - } - ) + it('fetches the right key from the right bucket', function() { + expect(S3Client.getObject).to.have.been.calledWith({ + Bucket: bucket, + Key: key + }) + }) + + it('pipes the stream through the meter', function() { + expect(S3ReadStream.pipe).to.have.been.calledWith(MeteredStream) + }) + + it('records an ingress metric', function() { + expect(Metrics.count).to.have.been.calledWith('s3.ingress', objectSize) }) }) - return describe('errors', function() { - describe("when the file doesn't exist", function() { - beforeEach(function() { - return this.s3Request.send.callsFake(() => { - return this.s3EventHandlers.httpHeaders( - 404, - {}, - this.s3Response, - 'Not found' - ) - }) - }) + describe('when called with a byte range', function() { + let stream - return it('returns a NotFoundError that indicates the bucket and key', function(done) { - return this.S3PersistorManager.getFileStream( - this.bucketName, - this.key, - {}, - (err, stream) => { - expect(err).to.be.instanceof(this.Errors.NotFoundError) - const errMsg = this.Errors.NotFoundError.lastCall.args[0] - expect(errMsg).to.match(new RegExp(`.*${this.bucketName}.*`)) - expect(errMsg).to.match(new RegExp(`.*${this.key}.*`)) - return done() - } - ) + beforeEach(async function() { + stream = await S3PersistorManager.promises.getFileStream(bucket, key, { + start: 5, + end: 10 }) }) - describe('when S3 encounters an unkown error', function() { - beforeEach(function() { - return this.s3Request.send.callsFake(() => { - return this.s3EventHandlers.httpHeaders( - 500, - {}, - this.s3Response, - 'Internal server error' - ) - }) - }) - - return it('returns an error', function(done) { - return this.S3PersistorManager.getFileStream( - this.bucketName, - this.key, - {}, - (err, stream) => { - expect(err).to.be.instanceof(Error) - return done() - } - ) - }) + it('returns a stream', function() { + expect(stream).to.equal('s3Stream') }) - return describe('when the S3 request errors out before receiving HTTP headers', function() { - beforeEach(function() { - return this.s3Request.send.callsFake(() => { - return this.s3EventHandlers.error(new Error('connection failed')) - }) + it('passes the byte range on to S3', function() { + expect(S3Client.getObject).to.have.been.calledWith({ + Bucket: bucket, + Key: key, + Range: 'bytes=5-10' }) + }) + }) - return it('returns an error', function(done) { - return this.S3PersistorManager.getFileStream( - this.bucketName, - this.key, - {}, - (err, stream) => { - expect(err).to.be.instanceof(Error) - return done() - } - ) - }) + describe("when the file doesn't exist", function() { + let error, stream + + beforeEach(async function() { + S3ReadStream.on = sinon.stub() + S3ReadStream.on.withArgs('error').yields(S3NotFoundError) + try { + stream = await S3PersistorManager.promises.getFileStream(bucket, key) + console.log(stream) + } catch (err) { + error = err + } + }) + + it('does not return a stream', function() { + expect(stream).not.to.exist + }) + + it('throws a NotFoundError', function() { + expect(error).to.be.an.instanceOf(Errors.NotFoundError) + }) + + it('wraps the error from S3', function() { + expect(error.cause).to.equal(S3NotFoundError) + }) + + it('stores the bucket and key in the error', function() { + expect(error.info).to.deep.equal({ Bucket: bucket, Key: key }) + }) + }) + + describe('when S3 encounters an unkown error', function() { + let error, stream + + beforeEach(async function() { + S3ReadStream.on = sinon.stub() + S3ReadStream.on.withArgs('error').yields(genericError) + try { + stream = await S3PersistorManager.promises.getFileStream(bucket, key) + console.log(stream) + } catch (err) { + error = err + } + }) + + it('does not return a stream', function() { + expect(stream).not.to.exist + }) + + it('throws a ReadError', function() { + expect(error).to.be.an.instanceOf(Errors.ReadError) + }) + + it('wraps the error from S3', function() { + expect(error.cause).to.equal(genericError) + }) + + it('stores the bucket and key in the error', function() { + expect(error.info).to.deep.equal({ Bucket: bucket, Key: key }) }) }) }) describe('getFileSize', function() { - it('should obtain the file size from S3', function(done) { - const expectedFileSize = 123 - this.s3Client.headObject.yields( - new Error('s3Client.headObject got unexpected arguments') - ) - this.s3Client.headObject - .withArgs({ - Bucket: this.bucketName, - Key: this.key - }) - .yields(null, { ContentLength: expectedFileSize }) + describe('when called with valid parameters', function() { + let size - return this.S3PersistorManager.getFileSize( - this.bucketName, - this.key, - (err, fileSize) => { - if (err != null) { - return done(err) - } - expect(fileSize).to.equal(expectedFileSize) - return done() - } - ) - }) - ;[403, 404].forEach(statusCode => - it(`should throw NotFoundError when S3 responds with ${statusCode}`, function(done) { - const error = new Error() - error.statusCode = statusCode - this.s3Client.headObject.yields(error) - - return this.S3PersistorManager.getFileSize( - this.bucketName, - this.key, - (err, fileSize) => { - expect(err).to.be.an.instanceof(this.Errors.NotFoundError) - return done() - } - ) + beforeEach(async function() { + size = await S3PersistorManager.promises.getFileSize(bucket, key) }) - ) - return it('should rethrow any other error', function(done) { - const error = new Error() - this.s3Client.headObject.yields(error) - this.s3Client.headObject.yields(error) + it('should return the object size', function() { + expect(size).to.equal(objectSize) + }) - return this.S3PersistorManager.getFileSize( - this.bucketName, - this.key, - (err, fileSize) => { - expect(err).to.equal(error) - return done() - } - ) - }) - }) - - describe('sendFile', function() { - beforeEach(function() { - return this.knoxClient.putFile.returns({ on() {} }) + it('should pass the bucket and key to S3', function() { + expect(S3Client.headObject).to.have.been.calledWith({ + Bucket: bucket, + Key: key + }) + }) }) - it('should put file with knox', function(done) { - this.LocalFileWriter.deleteFile.callsArgWith(1) - this.knoxClient.putFile.callsArgWith(2, this.error) - return this.S3PersistorManager.sendFile( - this.bucketName, - this.key, - this.fsPath, - err => { - this.knoxClient.putFile - .calledWith(this.fsPath, this.key) - .should.equal(true) - err.should.equal(this.error) - return done() + describe('when the object is not found', function() { + let error + + beforeEach(async function() { + S3Client.headObject = sinon.stub().returns({ + promise: sinon.stub().rejects(S3NotFoundError) + }) + try { + await S3PersistorManager.promises.getFileSize(bucket, key) + } catch (err) { + error = err } - ) + }) + + it('should return a NotFoundError', function() { + expect(error).to.be.an.instanceOf(Errors.NotFoundError) + }) + + it('should wrap the error', function() { + expect(error.cause).to.equal(S3NotFoundError) + }) }) - return it('should delete the file and pass the error with it', function(done) { - this.LocalFileWriter.deleteFile.callsArgWith(1) - this.knoxClient.putFile.callsArgWith(2, this.error) - return this.S3PersistorManager.sendFile( - this.bucketName, - this.key, - this.fsPath, - err => { - this.knoxClient.putFile - .calledWith(this.fsPath, this.key) - .should.equal(true) - err.should.equal(this.error) - return done() + describe('when S3 returns an error', function() { + let error + + beforeEach(async function() { + S3Client.headObject = sinon.stub().returns({ + promise: sinon.stub().rejects(genericError) + }) + try { + await S3PersistorManager.promises.getFileSize(bucket, key) + } catch (err) { + error = err } - ) + }) + + it('should return a ReadError', function() { + expect(error).to.be.an.instanceOf(Errors.ReadError) + }) + + it('should wrap the error', function() { + expect(error.cause).to.equal(genericError) + }) }) }) describe('sendStream', function() { - beforeEach(function() { - this.fsPath = 'to/some/where' - this.origin = { on() {} } - return (this.S3PersistorManager.sendFile = sinon.stub().callsArgWith(3)) + describe('with valid parameters', function() { + beforeEach(async function() { + return S3PersistorManager.promises.sendStream(bucket, key, ReadStream) + }) + + it('should upload the stream', function() { + expect(S3Client.upload).to.have.been.calledWith({ + Bucket: bucket, + Key: key, + Body: 'readStream' + }) + }) + + it('should meter the stream', function() { + expect(ReadStream.pipe).to.have.been.calledWith(MeteredStream) + }) + + it('should record an egress metric', function() { + expect(Metrics.count).to.have.been.calledWith('s3.egress', objectSize) + }) }) - it('should send stream to LocalFileWriter', function(done) { - this.LocalFileWriter.deleteFile.callsArgWith(1) - this.LocalFileWriter.writeStream.callsArgWith(2, null, this.fsPath) - return this.S3PersistorManager.sendStream( - this.bucketName, - this.key, - this.origin, - () => { - this.LocalFileWriter.writeStream - .calledWith(this.origin) - .should.equal(true) - return done() + describe('when the upload fails', function() { + let error + beforeEach(async function() { + S3Client.upload = sinon.stub().returns({ + promise: sinon.stub().rejects(genericError) + }) + try { + await S3PersistorManager.promises.sendStream(bucket, key, ReadStream) + } catch (err) { + error = err } - ) + }) + + it('throws a WriteError', function() { + expect(error).to.be.an.instanceOf(Errors.WriteError) + }) + }) + }) + + describe('sendFile', function() { + describe('with valid parameters', function() { + beforeEach(async function() { + return S3PersistorManager.promises.sendFile(bucket, key, filename) + }) + + it('should create a read stream for the file', function() { + expect(Fs.createReadStream).to.have.been.calledWith(filename) + }) + + it('should upload the stream', function() { + expect(S3Client.upload).to.have.been.calledWith({ + Bucket: bucket, + Key: key, + Body: 'readStream' + }) + }) }) - it('should return the error from LocalFileWriter', function(done) { - this.LocalFileWriter.deleteFile.callsArgWith(1) - this.LocalFileWriter.writeStream.callsArgWith(2, this.error) - return this.S3PersistorManager.sendStream( - this.bucketName, - this.key, - this.origin, - err => { - err.should.equal(this.error) - return done() + describe('when the file does not exist', function() { + let error + + beforeEach(async function() { + Fs.createReadStream = sinon.stub().throws(FileNotFoundError) + try { + await S3PersistorManager.promises.sendFile(bucket, key, filename) + } catch (err) { + error = err } - ) + }) + + it('returns a NotFoundError', function() { + expect(error).to.be.an.instanceOf(Errors.NotFoundError) + }) + + it('wraps the error', function() { + expect(error.cause).to.equal(FileNotFoundError) + }) }) - return it('should send the file to the filestore', function(done) { - this.LocalFileWriter.deleteFile.callsArgWith(1) - this.LocalFileWriter.writeStream.callsArgWith(2) - return this.S3PersistorManager.sendStream( - this.bucketName, - this.key, - this.origin, - err => { - this.S3PersistorManager.sendFile.called.should.equal(true) - return done() + describe('when reading the file throws an error', function() { + let error + + beforeEach(async function() { + Fs.createReadStream = sinon.stub().throws(genericError) + try { + await S3PersistorManager.promises.sendFile(bucket, key, filename) + } catch (err) { + error = err } - ) + }) + + it('returns a ReadError', function() { + expect(error).to.be.an.instanceOf(Errors.ReadError) + }) + + it('wraps the error', function() { + expect(error.cause).to.equal(genericError) + }) }) }) describe('copyFile', function() { - beforeEach(function() { - this.sourceKey = 'my/key' - return (this.destKey = 'my/dest/key') + describe('with valid parameters', function() { + beforeEach(async function() { + return S3PersistorManager.promises.copyFile(bucket, key, destKey) + }) + + it('should copy the object', function() { + expect(S3Client.copyObject).to.have.been.calledWith({ + Bucket: bucket, + Key: destKey, + CopySource: `${bucket}/${key}` + }) + }) }) - it('should use AWS SDK to copy file', function(done) { - this.s3Client.copyObject.callsArgWith(1, this.error) - return this.S3PersistorManager.copyFile( - this.bucketName, - this.sourceKey, - this.destKey, - err => { - err.should.equal(this.error) - this.s3Client.copyObject - .calledWith({ - Bucket: this.bucketName, - Key: this.destKey, - CopySource: this.bucketName + '/' + this.key - }) - .should.equal(true) - return done() - } - ) - }) + describe('when the file does not exist', function() { + let error - return it('should return a NotFoundError object if the original file does not exist', function(done) { - const NoSuchKeyError = { code: 'NoSuchKey' } - this.s3Client.copyObject.callsArgWith(1, NoSuchKeyError) - return this.S3PersistorManager.copyFile( - this.bucketName, - this.sourceKey, - this.destKey, - err => { - expect(err instanceof this.Errors.NotFoundError).to.equal(true) - return done() + beforeEach(async function() { + S3Client.copyObject = sinon.stub().returns({ + promise: sinon.stub().rejects(S3NotFoundError) + }) + try { + await S3PersistorManager.promises.copyFile(bucket, key, destKey) + } catch (err) { + error = err } - ) + }) + + it('should throw a NotFoundError', function() { + expect(error).to.be.an.instanceOf(Errors.NotFoundError) + }) }) }) - describe('deleteDirectory', () => - it('should list the contents passing them onto multi delete', function(done) { - const data = { Contents: [{ Key: '1234' }, { Key: '456' }] } - this.knoxClient.list.callsArgWith(1, null, data) - this.knoxClient.deleteMultiple.callsArgWith(1) - return this.S3PersistorManager.deleteDirectory( - this.bucketName, - this.key, - err => { - this.knoxClient.deleteMultiple - .calledWith(['1234', '456']) - .should.equal(true) - return done() - } - ) - })) - describe('deleteFile', function() { - it('should use correct options', function(done) { - this.request.callsArgWith(1) + describe('with valid parameters', function() { + beforeEach(async function() { + return S3PersistorManager.promises.deleteFile(bucket, key) + }) - return this.S3PersistorManager.deleteFile( - this.bucketName, - this.key, - err => { - const opts = this.request.args[0][0] - assert.deepEqual(opts.aws, { - key: this.settings.filestore.s3.key, - secret: this.settings.filestore.s3.secret, - bucket: this.bucketName - }) - opts.method.should.equal('delete') - opts.timeout.should.equal(30 * 1000) - opts.uri.should.equal( - `https://${this.bucketName}.s3.amazonaws.com/${this.key}` - ) - return done() - } - ) + it('should delete the object', function() { + expect(S3Client.deleteObject).to.have.been.calledWith({ + Bucket: bucket, + Key: key + }) + }) }) - return it('should return the error', function(done) { - this.request.callsArgWith(1, this.error) + describe('when the file does not exist', function() { + let error - return this.S3PersistorManager.deleteFile( - this.bucketName, - this.key, - err => { - err.should.equal(this.error) - return done() + beforeEach(async function() { + S3Client.deleteObject = sinon.stub().returns({ + promise: sinon.stub().rejects(S3NotFoundError) + }) + try { + await S3PersistorManager.promises.deleteFile(bucket, key) + } catch (err) { + error = err } - ) + }) + + it('should throw a NotFoundError', function() { + expect(error).to.be.an.instanceOf(Errors.NotFoundError) + }) + }) + }) + + describe('deleteDirectory', function() { + describe('with valid parameters', function() { + beforeEach(async function() { + return S3PersistorManager.promises.deleteDirectory(bucket, key) + }) + + it('should list the objects in the directory', function() { + expect(S3Client.listObjects).to.have.been.calledWith({ + Bucket: bucket, + Prefix: key + }) + }) + + it('should delete the objects using their keys', function() { + expect(S3Client.deleteObjects).to.have.been.calledWith({ + Bucket: bucket, + Delete: { + Objects: [{ Key: 'llama' }, { Key: 'hippo' }], + Quiet: true + } + }) + }) + }) + + describe('when there are no files', function() { + beforeEach(async function() { + S3Client.listObjects = sinon + .stub() + .returns({ promise: sinon.stub().resolves({ Contents: [] }) }) + return S3PersistorManager.promises.deleteDirectory(bucket, key) + }) + + it('should list the objects in the directory', function() { + expect(S3Client.listObjects).to.have.been.calledWith({ + Bucket: bucket, + Prefix: key + }) + }) + + it('should not try to delete any objects', function() { + expect(S3Client.deleteObjects).not.to.have.been.called + }) + }) + + describe('when there is an error listing the objects', function() { + let error + + beforeEach(async function() { + S3Client.listObjects = sinon + .stub() + .returns({ promise: sinon.stub().rejects(genericError) }) + try { + await S3PersistorManager.promises.deleteDirectory(bucket, key) + } catch (err) { + error = err + } + }) + + it('should generate a ReadError', function() { + expect(error).to.be.an.instanceOf(Errors.ReadError) + }) + + it('should wrap the error', function() { + expect(error.cause).to.equal(genericError) + }) + + it('should not try to delete any objects', function() { + expect(S3Client.deleteObjects).not.to.have.been.called + }) + }) + + describe('when there is an error deleting the objects', function() { + let error + + beforeEach(async function() { + S3Client.deleteObjects = sinon + .stub() + .returns({ promise: sinon.stub().rejects(genericError) }) + try { + await S3PersistorManager.promises.deleteDirectory(bucket, key) + } catch (err) { + error = err + } + }) + + it('should generate a WriteError', function() { + expect(error).to.be.an.instanceOf(Errors.WriteError) + }) + + it('should wrap the error', function() { + expect(error.cause).to.equal(genericError) + }) + }) + }) + + describe('directorySize', function() { + describe('with valid parameters', function() { + let size + + beforeEach(async function() { + size = await S3PersistorManager.promises.getDirectorySize(bucket, key) + }) + + it('should list the objects in the directory', function() { + expect(S3Client.listObjects).to.have.been.calledWith({ + Bucket: bucket, + Prefix: key + }) + }) + + it('should return the directory size', function() { + expect(size).to.equal(filesSize) + }) + }) + + describe('when there are no files', function() { + let size + + beforeEach(async function() { + S3Client.listObjects = sinon + .stub() + .returns({ promise: sinon.stub().resolves({ Contents: [] }) }) + size = await S3PersistorManager.promises.getDirectorySize(bucket, key) + }) + + it('should list the objects in the directory', function() { + expect(S3Client.listObjects).to.have.been.calledWith({ + Bucket: bucket, + Prefix: key + }) + }) + + it('should return zero', function() { + expect(size).to.equal(0) + }) + }) + + describe('when there is an error listing the objects', function() { + let error + + beforeEach(async function() { + S3Client.listObjects = sinon + .stub() + .returns({ promise: sinon.stub().rejects(genericError) }) + try { + await S3PersistorManager.promises.getDirectorySize(bucket, key) + } catch (err) { + error = err + } + }) + + it('should generate a ReadError', function() { + expect(error).to.be.an.instanceOf(Errors.ReadError) + }) + + it('should wrap the error', function() { + expect(error.cause).to.equal(genericError) + }) }) }) describe('checkIfFileExists', function() { - it('should use correct options', function(done) { - this.request.callsArgWith(1, null, { statusCode: 200 }) + describe('when the file exists', function() { + let exists - return this.S3PersistorManager.checkIfFileExists( - this.bucketName, - this.key, - err => { - const opts = this.request.args[0][0] - assert.deepEqual(opts.aws, { - key: this.settings.filestore.s3.key, - secret: this.settings.filestore.s3.secret, - bucket: this.bucketName - }) - opts.method.should.equal('head') - opts.timeout.should.equal(30 * 1000) - opts.uri.should.equal( - `https://${this.bucketName}.s3.amazonaws.com/${this.key}` - ) - return done() - } - ) + beforeEach(async function() { + exists = await S3PersistorManager.promises.checkIfFileExists( + bucket, + key + ) + }) + + it('should get the object header', function() { + expect(S3Client.headObject).to.have.been.calledWith({ + Bucket: bucket, + Key: key + }) + }) + + it('should return that the file exists', function() { + expect(exists).to.equal(true) + }) }) - it('should return true for a 200', function(done) { - this.request.callsArgWith(1, null, { statusCode: 200 }) + describe('when the file does not exist', function() { + let exists - return this.S3PersistorManager.checkIfFileExists( - this.bucketName, - this.key, - (err, exists) => { - exists.should.equal(true) - return done() - } - ) + beforeEach(async function() { + S3Client.headObject = sinon + .stub() + .returns({ promise: sinon.stub().rejects(S3NotFoundError) }) + exists = await S3PersistorManager.promises.checkIfFileExists( + bucket, + key + ) + }) + + it('should get the object header', function() { + expect(S3Client.headObject).to.have.been.calledWith({ + Bucket: bucket, + Key: key + }) + }) + + it('should return that the file does not exist', function() { + expect(exists).to.equal(false) + }) }) - it('should return false for a non 200', function(done) { - this.request.callsArgWith(1, null, { statusCode: 404 }) + describe('when there is an error', function() { + let error - return this.S3PersistorManager.checkIfFileExists( - this.bucketName, - this.key, - (err, exists) => { - exists.should.equal(false) - return done() + beforeEach(async function() { + S3Client.headObject = sinon + .stub() + .returns({ promise: sinon.stub().rejects(genericError) }) + try { + await S3PersistorManager.promises.checkIfFileExists(bucket, key) + } catch (err) { + error = err } - ) - }) + }) - return it('should return the error', function(done) { - this.request.callsArgWith(1, this.error, {}) + it('should generate a ReadError', function() { + expect(error).to.be.an.instanceOf(Errors.ReadError) + }) - return this.S3PersistorManager.checkIfFileExists( - this.bucketName, - this.key, - err => { - err.should.equal(this.error) - return done() - } - ) + it('should wrap the upstream ReadError', function() { + expect(error.cause).to.be.an.instanceOf(Errors.ReadError) + }) + + it('should eventually wrap the error', function() { + expect(error.cause.cause).to.equal(genericError) + }) }) }) - - return describe('directorySize', () => - it('should sum directory files size', function(done) { - const data = { Contents: [{ Size: 1024 }, { Size: 2048 }] } - this.knoxClient.list.callsArgWith(1, null, data) - return this.S3PersistorManager.directorySize( - this.bucketName, - this.key, - (err, totalSize) => { - totalSize.should.equal(3072) - return done() - } - ) - })) }) From a6d9d34fe3b2758b7791878cce5df75ea215fdf2 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Sat, 4 Jan 2020 17:00:04 +0000 Subject: [PATCH 385/555] Configure PersistorManager to use the new S3 manager for 'aws-sdk' --- services/filestore/app/js/PersistorManager.js | 2 -- services/filestore/test/unit/js/PersistorManagerTests.js | 8 ++++++++ 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/services/filestore/app/js/PersistorManager.js b/services/filestore/app/js/PersistorManager.js index 8124d66101..cca0cf0f36 100644 --- a/services/filestore/app/js/PersistorManager.js +++ b/services/filestore/app/js/PersistorManager.js @@ -13,8 +13,6 @@ if (!settings.filestore.backend) { switch (settings.filestore.backend) { case 'aws-sdk': - module.exports = require('./AWSSDKPersistorManager') - break case 's3': module.exports = require('./S3PersistorManager') break diff --git a/services/filestore/test/unit/js/PersistorManagerTests.js b/services/filestore/test/unit/js/PersistorManagerTests.js index d8fd887265..0ecbb22078 100644 --- a/services/filestore/test/unit/js/PersistorManagerTests.js +++ b/services/filestore/test/unit/js/PersistorManagerTests.js @@ -43,6 +43,14 @@ describe('PersistorManager', function() { expect(PersistorManager.wrappedMethod()).to.equal('S3PersistorManager') }) + it("should implement the S3 wrapped method when 'aws-sdk' is configured", function() { + settings.filestore.backend = 'aws-sdk' + PersistorManager = SandboxedModule.require(modulePath, { requires }) + + expect(PersistorManager).to.respondTo('wrappedMethod') + expect(PersistorManager.wrappedMethod()).to.equal('S3PersistorManager') + }) + it('should implement the FS wrapped method when FS is configured', function() { settings.filestore.backend = 'fs' PersistorManager = SandboxedModule.require(modulePath, { requires }) From b642b3cf3a418daa25300c1e796e5004537346c3 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 6 Jan 2020 15:35:40 +0000 Subject: [PATCH 386/555] Rename incorrectly-named 'getDirectorySize' method --- services/filestore/app/js/S3PersistorManager.js | 6 +++--- .../filestore/test/unit/js/S3PersistorManagerTests.js | 8 +++----- 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/services/filestore/app/js/S3PersistorManager.js b/services/filestore/app/js/S3PersistorManager.js index 5f8bb34c5d..d0729b80b9 100644 --- a/services/filestore/app/js/S3PersistorManager.js +++ b/services/filestore/app/js/S3PersistorManager.js @@ -23,7 +23,7 @@ module.exports = { deleteFile: callbackify(deleteFile), copyFile: callbackify(copyFile), checkIfFileExists: callbackify(checkIfFileExists), - getDirectorySize: callbackify(getDirectorySize), + directorySize: callbackify(directorySize), promises: { sendFile, sendStream, @@ -33,7 +33,7 @@ module.exports = { deleteFile, copyFile, checkIfFileExists, - getDirectorySize + directorySize } } @@ -209,7 +209,7 @@ async function checkIfFileExists(bucketName, key) { } } -async function getDirectorySize(bucketName, key) { +async function directorySize(bucketName, key) { try { const response = await _client .listObjects({ Bucket: bucketName, Prefix: key }) diff --git a/services/filestore/test/unit/js/S3PersistorManagerTests.js b/services/filestore/test/unit/js/S3PersistorManagerTests.js index 3caf2db73d..2faa8b52c7 100644 --- a/services/filestore/test/unit/js/S3PersistorManagerTests.js +++ b/services/filestore/test/unit/js/S3PersistorManagerTests.js @@ -186,7 +186,6 @@ describe('S3PersistorManagerTests', function() { S3ReadStream.on.withArgs('error').yields(S3NotFoundError) try { stream = await S3PersistorManager.promises.getFileStream(bucket, key) - console.log(stream) } catch (err) { error = err } @@ -217,7 +216,6 @@ describe('S3PersistorManagerTests', function() { S3ReadStream.on.withArgs('error').yields(genericError) try { stream = await S3PersistorManager.promises.getFileStream(bucket, key) - console.log(stream) } catch (err) { error = err } @@ -581,7 +579,7 @@ describe('S3PersistorManagerTests', function() { let size beforeEach(async function() { - size = await S3PersistorManager.promises.getDirectorySize(bucket, key) + size = await S3PersistorManager.promises.directorySize(bucket, key) }) it('should list the objects in the directory', function() { @@ -603,7 +601,7 @@ describe('S3PersistorManagerTests', function() { S3Client.listObjects = sinon .stub() .returns({ promise: sinon.stub().resolves({ Contents: [] }) }) - size = await S3PersistorManager.promises.getDirectorySize(bucket, key) + size = await S3PersistorManager.promises.directorySize(bucket, key) }) it('should list the objects in the directory', function() { @@ -626,7 +624,7 @@ describe('S3PersistorManagerTests', function() { .stub() .returns({ promise: sinon.stub().rejects(genericError) }) try { - await S3PersistorManager.promises.getDirectorySize(bucket, key) + await S3PersistorManager.promises.directorySize(bucket, key) } catch (err) { error = err } From ffd002dc76341d6ff9494a83796093410b37c20e Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 6 Jan 2020 15:47:46 +0000 Subject: [PATCH 387/555] Add explanatory comment --- services/filestore/app/js/FileController.js | 1 + 1 file changed, 1 insertion(+) diff --git a/services/filestore/app/js/FileController.js b/services/filestore/app/js/FileController.js index 726438a557..eef441436c 100644 --- a/services/filestore/app/js/FileController.js +++ b/services/filestore/app/js/FileController.js @@ -62,6 +62,7 @@ function getFile(req, res, next) { logger.log({ key, bucket, format, style }, 'sending file to response') + // pass 'next' as a callback to 'pipeline' to receive any errors pipeline(fileStream, res, next) }) } From 2bb2caf7b3fa8aac1a6148ddbb23f8b4c12d0efc Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Tue, 7 Jan 2020 09:46:53 +0000 Subject: [PATCH 388/555] Clean up settings tests --- .../filestore/test/unit/js/SettingsTests.js | 32 ++++++------------- 1 file changed, 9 insertions(+), 23 deletions(-) diff --git a/services/filestore/test/unit/js/SettingsTests.js b/services/filestore/test/unit/js/SettingsTests.js index 472c6d1179..4563449fde 100644 --- a/services/filestore/test/unit/js/SettingsTests.js +++ b/services/filestore/test/unit/js/SettingsTests.js @@ -1,33 +1,19 @@ -/* eslint-disable - camelcase, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const { assert } = require('chai') -const sinon = require('sinon') const chai = require('chai') -const should = chai.should() const { expect } = chai -const modulePath = '../../../app/js/BucketController.js' -describe('Settings', () => - describe('s3', () => - it('should use JSONified env var if present', function(done) { - const s3_settings = { +describe('Settings', function() { + describe('s3', function() { + it('should use JSONified env var if present', function() { + const s3Settings = { bucket1: { auth_key: 'bucket1_key', auth_secret: 'bucket1_secret' } } - process.env.S3_BUCKET_CREDENTIALS = JSON.stringify(s3_settings) + process.env.S3_BUCKET_CREDENTIALS = JSON.stringify(s3Settings) const settings = require('settings-sharelatex') - expect(settings.filestore.s3BucketCreds).to.deep.equal(s3_settings) - return done() - }))) + expect(settings.filestore.s3BucketCreds).to.deep.equal(s3Settings) + }) + }) +}) From 6cc5d94f13bb8c5328579e8226171f2c85addc38 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Tue, 7 Jan 2020 10:24:46 +0000 Subject: [PATCH 389/555] Re-add bucket-specific credentials support for S3 --- services/filestore/app/js/Errors.js | 4 +- .../filestore/app/js/S3PersistorManager.js | 77 +++++++++--- .../test/unit/js/S3PersistorManagerTests.js | 117 +++++++++++++++--- 3 files changed, 166 insertions(+), 32 deletions(-) diff --git a/services/filestore/app/js/Errors.js b/services/filestore/app/js/Errors.js index 65af6dc056..06091a13ba 100644 --- a/services/filestore/app/js/Errors.js +++ b/services/filestore/app/js/Errors.js @@ -23,6 +23,7 @@ class ReadError extends BackwardCompatibleError {} class HealthCheckError extends BackwardCompatibleError {} class ConversionsDisabledError extends BackwardCompatibleError {} class ConversionError extends BackwardCompatibleError {} +class SettingsError extends BackwardCompatibleError {} class FailedCommandError extends OError { constructor(command, code, stdout, stderr) { @@ -46,5 +47,6 @@ module.exports = { WriteError, ReadError, ConversionError, - HealthCheckError + HealthCheckError, + SettingsError } diff --git a/services/filestore/app/js/S3PersistorManager.js b/services/filestore/app/js/S3PersistorManager.js index d0729b80b9..36c49d35bd 100644 --- a/services/filestore/app/js/S3PersistorManager.js +++ b/services/filestore/app/js/S3PersistorManager.js @@ -12,7 +12,12 @@ const fs = require('fs') const S3 = require('aws-sdk/clients/s3') const { URL } = require('url') const { callbackify } = require('util') -const { WriteError, ReadError, NotFoundError } = require('./Errors') +const { + WriteError, + ReadError, + NotFoundError, + SettingsError +} = require('./Errors') module.exports = { sendFile: callbackify(sendFile), @@ -37,8 +42,6 @@ module.exports = { } } -const _client = new S3(_defaultOptions()) - async function sendFile(bucketName, key, fsPath) { let readStream try { @@ -61,7 +64,7 @@ async function sendStream(bucketName, key, readStream) { metrics.count('s3.egress', meteredStream.bytes) }) - const response = await _client + const response = await _client(bucketName) .upload({ Bucket: bucketName, Key: key, @@ -92,7 +95,9 @@ async function getFileStream(bucketName, key, opts) { } return new Promise((resolve, reject) => { - const stream = _client.getObject(params).createReadStream() + const stream = _client(bucketName) + .getObject(params) + .createReadStream() const meteredStream = meter() meteredStream.on('finish', () => { @@ -115,7 +120,7 @@ async function deleteDirectory(bucketName, key) { let response try { - response = await _client + response = await _client(bucketName) .listObjects({ Bucket: bucketName, Prefix: key }) .promise() } catch (err) { @@ -130,7 +135,7 @@ async function deleteDirectory(bucketName, key) { const objects = response.Contents.map(item => ({ Key: item.Key })) if (objects.length) { try { - await _client + await _client(bucketName) .deleteObjects({ Bucket: bucketName, Delete: { @@ -152,7 +157,7 @@ async function deleteDirectory(bucketName, key) { async function getFileSize(bucketName, key) { try { - const response = await _client + const response = await _client(bucketName) .headObject({ Bucket: bucketName, Key: key }) .promise() return response.ContentLength @@ -168,7 +173,9 @@ async function getFileSize(bucketName, key) { async function deleteFile(bucketName, key) { try { - await _client.deleteObject({ Bucket: bucketName, Key: key }).promise() + await _client(bucketName) + .deleteObject({ Bucket: bucketName, Key: key }) + .promise() } catch (err) { throw _wrapError( err, @@ -186,7 +193,9 @@ async function copyFile(bucketName, sourceKey, destKey) { CopySource: `${bucketName}/${sourceKey}` } try { - await _client.copyObject(params).promise() + await _client(bucketName) + .copyObject(params) + .promise() } catch (err) { throw _wrapError(err, 'failed to copy file in S3', params, WriteError) } @@ -211,7 +220,7 @@ async function checkIfFileExists(bucketName, key) { async function directorySize(bucketName, key) { try { - const response = await _client + const response = await _client(bucketName) .listObjects({ Bucket: bucketName, Prefix: key }) .promise() @@ -240,9 +249,49 @@ function _wrapError(error, message, params, ErrorType) { } } -function _defaultOptions() { - const options = { - credentials: { +const _clients = {} + +function _client(bucket) { + if (_clients[bucket]) { + return _clients[bucket] + } + + if ( + settings.filestore.s3.s3BucketCreds && + settings.filestore.s3.s3BucketCreds[bucket] + ) { + _clients[bucket] = new S3( + _clientOptions(settings.filestore.s3.s3BucketCreds[bucket]) + ) + return _clients[bucket] + } + + // no specific credentials for the bucket + if (_clients.default) { + return _clients.default + } + + if (settings.filestore.s3.key) { + _clients.default = new S3(_clientOptions()) + return _clients.default + } + + throw new SettingsError({ + message: 'no bucket-specific or default credentials provided', + info: { bucket } + }) +} + +function _clientOptions(bucketCredentials) { + const options = {} + + if (bucketCredentials) { + options.credentials = { + accessKeyId: bucketCredentials.auth_key, + secretAccessKey: bucketCredentials.auth_secret + } + } else { + options.credentials = { accessKeyId: settings.filestore.s3.key, secretAccessKey: settings.filestore.s3.secret } diff --git a/services/filestore/test/unit/js/S3PersistorManagerTests.js b/services/filestore/test/unit/js/S3PersistorManagerTests.js index 2faa8b52c7..bdac7c8232 100644 --- a/services/filestore/test/unit/js/S3PersistorManagerTests.js +++ b/services/filestore/test/unit/js/S3PersistorManagerTests.js @@ -7,16 +7,12 @@ const SandboxedModule = require('sandboxed-module') const Errors = require('../../../app/js/Errors') describe('S3PersistorManagerTests', function() { - const settings = { - filestore: { - backend: 's3', - s3: { - secret: 'secret', - key: 'this_key' - }, - stores: { - user_files: 'sl_user_files' - } + const defaultS3Key = 'frog' + const defaultS3Secret = 'prince' + const defaultS3Credentials = { + credentials: { + accessKeyId: defaultS3Key, + secretAccessKey: defaultS3Secret } } const filename = '/wombat/potato.tex' @@ -42,9 +38,23 @@ describe('S3PersistorManagerTests', function() { S3ReadStream, S3NotFoundError, FileNotFoundError, - EmptyPromise + EmptyPromise, + settings beforeEach(function() { + settings = { + filestore: { + backend: 's3', + s3: { + secret: defaultS3Secret, + key: defaultS3Key + }, + stores: { + user_files: 'sl_user_files' + } + } + } + EmptyPromise = { promise: sinon.stub().resolves() } @@ -131,12 +141,7 @@ describe('S3PersistorManagerTests', function() { }) it('sets the AWS client up with credentials from settings', function() { - expect(S3).to.have.been.calledWith({ - credentials: { - accessKeyId: settings.filestore.s3.key, - secretAccessKey: settings.filestore.s3.secret - } - }) + expect(S3).to.have.been.calledWith(defaultS3Credentials) }) it('fetches the right key from the right bucket', function() { @@ -178,6 +183,84 @@ describe('S3PersistorManagerTests', function() { }) }) + describe('when there are alternative credentials', function() { + let stream + const alternativeSecret = 'giraffe' + const alternativeKey = 'hippo' + const alternativeS3Credentials = { + credentials: { + accessKeyId: alternativeKey, + secretAccessKey: alternativeSecret + } + } + + beforeEach(async function() { + settings.filestore.s3.s3BucketCreds = {} + settings.filestore.s3.s3BucketCreds[bucket] = { + auth_key: alternativeKey, + auth_secret: alternativeSecret + } + + stream = await S3PersistorManager.promises.getFileStream(bucket, key) + }) + + it('returns a stream', function() { + expect(stream).to.equal('s3Stream') + }) + + it('sets the AWS client up with the alternative credentials', function() { + expect(S3).to.have.been.calledWith(alternativeS3Credentials) + }) + + it('fetches the right key from the right bucket', function() { + expect(S3Client.getObject).to.have.been.calledWith({ + Bucket: bucket, + Key: key + }) + }) + + it('caches the credentials', async function() { + stream = await S3PersistorManager.promises.getFileStream(bucket, key) + + expect(S3).to.have.been.calledOnceWith(alternativeS3Credentials) + }) + + it('uses the default credentials for an unknown bucket', async function() { + stream = await S3PersistorManager.promises.getFileStream( + 'anotherBucket', + key + ) + + expect(S3).to.have.been.calledTwice + expect(S3.firstCall).to.have.been.calledWith(alternativeS3Credentials) + expect(S3.secondCall).to.have.been.calledWith(defaultS3Credentials) + }) + + it('caches the default credentials', async function() { + stream = await S3PersistorManager.promises.getFileStream( + 'anotherBucket', + key + ) + stream = await S3PersistorManager.promises.getFileStream( + 'anotherBucket', + key + ) + + expect(S3).to.have.been.calledTwice + expect(S3.firstCall).to.have.been.calledWith(alternativeS3Credentials) + expect(S3.secondCall).to.have.been.calledWith(defaultS3Credentials) + }) + + it('throws an error if there are no credentials for the bucket', async function() { + delete settings.filestore.s3.key + delete settings.filestore.s3.secret + + await expect( + S3PersistorManager.promises.getFileStream('anotherBucket', key) + ).to.eventually.be.rejected.and.be.an.instanceOf(Errors.SettingsError) + }) + }) + describe("when the file doesn't exist", function() { let error, stream From 80d41cf51bf7d3ed7f7202793c41454fc31b6b82 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Tue, 7 Jan 2020 15:05:51 +0000 Subject: [PATCH 390/555] Move bucket-specific file endpoint into FileController --- services/filestore/app.js | 7 +- services/filestore/app/js/BucketController.js | 48 --------- services/filestore/app/js/KeyBuilder.js | 7 ++ .../filestore/app/js/S3PersistorManager.js | 5 + .../test/acceptance/js/FilestoreTests.js | 46 +++++++- .../test/unit/js/BucketControllerTests.js | 100 ------------------ 6 files changed, 62 insertions(+), 151 deletions(-) delete mode 100644 services/filestore/app/js/BucketController.js delete mode 100644 services/filestore/test/unit/js/BucketControllerTests.js diff --git a/services/filestore/app.js b/services/filestore/app.js index 232c5b24bc..9256cb0029 100644 --- a/services/filestore/app.js +++ b/services/filestore/app.js @@ -9,7 +9,6 @@ const express = require('express') const bodyParser = require('body-parser') const fileController = require('./app/js/FileController') -const bucketController = require('./app/js/BucketController') const keyBuilder = require('./app/js/KeyBuilder') const healthCheckController = require('./app/js/HealthCheckController') @@ -114,7 +113,11 @@ app.get( fileController.directorySize ) -app.get('/bucket/:bucket/key/*', bucketController.getFile) +app.get( + '/bucket/:bucket/key/*', + keyBuilder.bucketFileKeyMiddleware, + fileController.getFile +) app.get('/heapdump', (req, res, next) => require('heapdump').writeSnapshot( diff --git a/services/filestore/app/js/BucketController.js b/services/filestore/app/js/BucketController.js deleted file mode 100644 index 46f69679aa..0000000000 --- a/services/filestore/app/js/BucketController.js +++ /dev/null @@ -1,48 +0,0 @@ -/* eslint-disable - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -let BucketController -const settings = require('settings-sharelatex') -const logger = require('logger-sharelatex') -const FileHandler = require('./FileHandler') -const metrics = require('metrics-sharelatex') -const Errors = require('./Errors') - -module.exports = BucketController = { - getFile(req, res) { - const { bucket } = req.params - const key = req.params[0] - const credentials = - settings.filestore.s3BucketCreds != null - ? settings.filestore.s3BucketCreds[bucket] - : undefined - const options = { - key, - bucket, - credentials - } - metrics.inc(`${bucket}.getFile`) - logger.log({ key, bucket }, 'receiving request to get file from bucket') - return FileHandler.getFile(bucket, key, options, function(err, fileStream) { - if (err != null) { - logger.err({ err, key, bucket }, 'problem getting file from bucket') - if (err instanceof Errors.NotFoundError) { - return res.send(404) - } else { - return res.send(500) - } - } else { - logger.log({ key, bucket }, 'sending bucket file to response') - return fileStream.pipe(res) - } - }) - } -} diff --git a/services/filestore/app/js/KeyBuilder.js b/services/filestore/app/js/KeyBuilder.js index 8de7c0be2a..66cf563014 100644 --- a/services/filestore/app/js/KeyBuilder.js +++ b/services/filestore/app/js/KeyBuilder.js @@ -6,6 +6,7 @@ module.exports = { userFileKeyMiddleware, publicFileKeyMiddleware, publicProjectKeyMiddleware, + bucketFileKeyMiddleware, templateFileKeyMiddleware } @@ -48,6 +49,12 @@ function publicFileKeyMiddleware(req, res, next) { next() } +function bucketFileKeyMiddleware(req, res, next) { + req.bucket = req.params.bucket + req.key = req.params[0] + next() +} + function templateFileKeyMiddleware(req, res, next) { const { template_id: templateId, diff --git a/services/filestore/app/js/S3PersistorManager.js b/services/filestore/app/js/S3PersistorManager.js index 36c49d35bd..00ed46379b 100644 --- a/services/filestore/app/js/S3PersistorManager.js +++ b/services/filestore/app/js/S3PersistorManager.js @@ -303,5 +303,10 @@ function _clientOptions(bucketCredentials) { options.sslEnabled = endpoint.protocol === 'https' } + // path-style access is only used for acceptance tests + if (settings.filestore.s3.pathStyle) { + options.s3ForcePathStyle = true + } + return options } diff --git a/services/filestore/test/acceptance/js/FilestoreTests.js b/services/filestore/test/acceptance/js/FilestoreTests.js index 9260b1bd62..d7dfbce57c 100644 --- a/services/filestore/test/acceptance/js/FilestoreTests.js +++ b/services/filestore/test/acceptance/js/FilestoreTests.js @@ -7,6 +7,7 @@ const FilestoreApp = require('./FilestoreApp') const rp = require('request-promise-native').defaults({ resolveWithFullResponse: true }) +const S3 = require('aws-sdk/clients/s3') const Stream = require('stream') const request = require('request') const { promisify } = require('util') @@ -43,7 +44,8 @@ if (process.env.AWS_ACCESS_KEY_ID) { s3: { key: process.env.AWS_ACCESS_KEY_ID, secret: process.env.AWS_SECRET_ACCESS_KEY, - endpoint: process.env.AWS_S3_ENDPOINT + endpoint: process.env.AWS_S3_ENDPOINT, + pathStyle: true }, stores: { user_files: process.env.AWS_S3_USER_FILES_BUCKET_NAME, @@ -288,6 +290,48 @@ describe('Filestore', function() { }) }) + if (backend === 'S3Persistor') { + describe('with a file in a specific bucket', function() { + let constantFileContents, fileId, fileUrl, bucketName + + beforeEach(async function() { + constantFileContents = `This is a file in a different S3 bucket ${Math.random()}` + fileId = Math.random().toString() + bucketName = Math.random().toString() + fileUrl = `${filestoreUrl}/bucket/${bucketName}/key/${fileId}` + + const s3ClientSettings = { + credentials: { + accessKeyId: 'fake', + secretAccessKey: 'fake' + }, + endpoint: process.env.AWS_S3_ENDPOINT, + sslEnabled: false, + s3ForcePathStyle: true + } + + const s3 = new S3(s3ClientSettings) + await s3 + .createBucket({ + Bucket: bucketName + }) + .promise() + await s3 + .upload({ + Bucket: bucketName, + Key: fileId, + Body: constantFileContents + }) + .promise() + }) + + it('should get the file from the specified bucket', async function() { + const response = await rp.get(fileUrl) + expect(response.body).to.equal(constantFileContents) + }) + }) + } + describe('with a pdf file', function() { let fileId, fileUrl, localFileSize const localFileReadPath = Path.resolve( diff --git a/services/filestore/test/unit/js/BucketControllerTests.js b/services/filestore/test/unit/js/BucketControllerTests.js deleted file mode 100644 index ef74b3f6c0..0000000000 --- a/services/filestore/test/unit/js/BucketControllerTests.js +++ /dev/null @@ -1,100 +0,0 @@ -/* eslint-disable - no-return-assign, - no-unused-vars, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS102: Remove unnecessary code created because of implicit returns - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ -const { assert } = require('chai') -const sinon = require('sinon') -const chai = require('chai') -const should = chai.should() -const { expect } = chai -const modulePath = '../../../app/js/BucketController.js' -const SandboxedModule = require('sandboxed-module') - -describe('BucketController', function() { - beforeEach(function() { - this.PersistorManager = { - sendStream: sinon.stub(), - copyFile: sinon.stub(), - deleteFile: sinon.stub() - } - - this.settings = { - s3: { - buckets: { - user_files: 'user_files' - } - }, - filestore: { - backend: 's3', - s3: { - secret: 'secret', - key: 'this_key' - } - } - } - - this.FileHandler = { - getFile: sinon.stub(), - deleteFile: sinon.stub(), - insertFile: sinon.stub(), - getDirectorySize: sinon.stub() - } - this.LocalFileWriter = {} - this.controller = SandboxedModule.require(modulePath, { - requires: { - './LocalFileWriter': this.LocalFileWriter, - './FileHandler': this.FileHandler, - './PersistorManager': this.PersistorManager, - 'settings-sharelatex': this.settings, - 'metrics-sharelatex': { - inc() {} - }, - 'logger-sharelatex': { - log() {}, - err() {} - } - } - }) - this.project_id = 'project_id' - this.file_id = 'file_id' - this.bucket = 'user_files' - this.key = `${this.project_id}/${this.file_id}` - this.req = { - query: {}, - params: { - bucket: this.bucket, - 0: this.key - }, - headers: {} - } - this.res = { setHeader() {} } - return (this.fileStream = {}) - }) - - return describe('getFile', function() { - it('should pipe the stream', function(done) { - this.FileHandler.getFile.callsArgWith(3, null, this.fileStream) - this.fileStream.pipe = res => { - res.should.equal(this.res) - return done() - } - return this.controller.getFile(this.req, this.res) - }) - - return it('should send a 500 if there is a problem', function(done) { - this.FileHandler.getFile.callsArgWith(3, 'error') - this.res.send = code => { - code.should.equal(500) - return done() - } - return this.controller.getFile(this.req, this.res) - }) - }) -}) From 3bf51cac67a5675352fd051e6378e6a3209310f9 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Wed, 8 Jan 2020 09:17:30 +0000 Subject: [PATCH 391/555] Improve naming on internal '_client' method and use Map over object --- .../filestore/app/js/S3PersistorManager.js | 33 ++++++++++--------- 1 file changed, 17 insertions(+), 16 deletions(-) diff --git a/services/filestore/app/js/S3PersistorManager.js b/services/filestore/app/js/S3PersistorManager.js index 00ed46379b..4ccc1642a4 100644 --- a/services/filestore/app/js/S3PersistorManager.js +++ b/services/filestore/app/js/S3PersistorManager.js @@ -64,7 +64,7 @@ async function sendStream(bucketName, key, readStream) { metrics.count('s3.egress', meteredStream.bytes) }) - const response = await _client(bucketName) + const response = await _getClientForBucket(bucketName) .upload({ Bucket: bucketName, Key: key, @@ -95,7 +95,7 @@ async function getFileStream(bucketName, key, opts) { } return new Promise((resolve, reject) => { - const stream = _client(bucketName) + const stream = _getClientForBucket(bucketName) .getObject(params) .createReadStream() @@ -120,7 +120,7 @@ async function deleteDirectory(bucketName, key) { let response try { - response = await _client(bucketName) + response = await _getClientForBucket(bucketName) .listObjects({ Bucket: bucketName, Prefix: key }) .promise() } catch (err) { @@ -135,7 +135,7 @@ async function deleteDirectory(bucketName, key) { const objects = response.Contents.map(item => ({ Key: item.Key })) if (objects.length) { try { - await _client(bucketName) + await _getClientForBucket(bucketName) .deleteObjects({ Bucket: bucketName, Delete: { @@ -157,7 +157,7 @@ async function deleteDirectory(bucketName, key) { async function getFileSize(bucketName, key) { try { - const response = await _client(bucketName) + const response = await _getClientForBucket(bucketName) .headObject({ Bucket: bucketName, Key: key }) .promise() return response.ContentLength @@ -173,7 +173,7 @@ async function getFileSize(bucketName, key) { async function deleteFile(bucketName, key) { try { - await _client(bucketName) + await _getClientForBucket(bucketName) .deleteObject({ Bucket: bucketName, Key: key }) .promise() } catch (err) { @@ -193,7 +193,7 @@ async function copyFile(bucketName, sourceKey, destKey) { CopySource: `${bucketName}/${sourceKey}` } try { - await _client(bucketName) + await _getClientForBucket(bucketName) .copyObject(params) .promise() } catch (err) { @@ -220,7 +220,7 @@ async function checkIfFileExists(bucketName, key) { async function directorySize(bucketName, key) { try { - const response = await _client(bucketName) + const response = await _getClientForBucket(bucketName) .listObjects({ Bucket: bucketName, Prefix: key }) .promise() @@ -249,9 +249,10 @@ function _wrapError(error, message, params, ErrorType) { } } -const _clients = {} +const _clients = new Map() +let _defaultClient -function _client(bucket) { +function _getClientForBucket(bucket) { if (_clients[bucket]) { return _clients[bucket] } @@ -261,19 +262,19 @@ function _client(bucket) { settings.filestore.s3.s3BucketCreds[bucket] ) { _clients[bucket] = new S3( - _clientOptions(settings.filestore.s3.s3BucketCreds[bucket]) + _buildClientOptions(settings.filestore.s3.s3BucketCreds[bucket]) ) return _clients[bucket] } // no specific credentials for the bucket - if (_clients.default) { - return _clients.default + if (_defaultClient) { + return _defaultClient } if (settings.filestore.s3.key) { - _clients.default = new S3(_clientOptions()) - return _clients.default + _defaultClient = new S3(_buildClientOptions()) + return _defaultClient } throw new SettingsError({ @@ -282,7 +283,7 @@ function _client(bucket) { }) } -function _clientOptions(bucketCredentials) { +function _buildClientOptions(bucketCredentials) { const options = {} if (bucketCredentials) { From d8470d4a59b36866072cdc9fe05840f06572deed Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Wed, 8 Jan 2020 11:20:44 +0000 Subject: [PATCH 392/555] Use correct object path for s3BucketCreds --- services/filestore/app/js/S3PersistorManager.js | 6 +++--- services/filestore/test/unit/js/S3PersistorManagerTests.js | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/services/filestore/app/js/S3PersistorManager.js b/services/filestore/app/js/S3PersistorManager.js index 4ccc1642a4..5fb7040b24 100644 --- a/services/filestore/app/js/S3PersistorManager.js +++ b/services/filestore/app/js/S3PersistorManager.js @@ -258,11 +258,11 @@ function _getClientForBucket(bucket) { } if ( - settings.filestore.s3.s3BucketCreds && - settings.filestore.s3.s3BucketCreds[bucket] + settings.filestore.s3BucketCreds && + settings.filestore.s3BucketCreds[bucket] ) { _clients[bucket] = new S3( - _buildClientOptions(settings.filestore.s3.s3BucketCreds[bucket]) + _buildClientOptions(settings.filestore.s3BucketCreds[bucket]) ) return _clients[bucket] } diff --git a/services/filestore/test/unit/js/S3PersistorManagerTests.js b/services/filestore/test/unit/js/S3PersistorManagerTests.js index bdac7c8232..4e98aa01ff 100644 --- a/services/filestore/test/unit/js/S3PersistorManagerTests.js +++ b/services/filestore/test/unit/js/S3PersistorManagerTests.js @@ -195,8 +195,8 @@ describe('S3PersistorManagerTests', function() { } beforeEach(async function() { - settings.filestore.s3.s3BucketCreds = {} - settings.filestore.s3.s3BucketCreds[bucket] = { + settings.filestore.s3BucketCreds = {} + settings.filestore.s3BucketCreds[bucket] = { auth_key: alternativeKey, auth_secret: alternativeSecret } From e331a3130ce55ece31caf3433e285d082bb3d370 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 9 Jan 2020 14:13:24 +0000 Subject: [PATCH 393/555] Handle AccessDenied and stream-premature-close errors These errors tend to occur as part of normal operation and should not generate `logger.err` messages --- services/filestore/app/js/FileController.js | 12 +++++-- .../filestore/app/js/S3PersistorManager.js | 4 ++- .../test/unit/js/S3PersistorManagerTests.js | 34 +++++++++++++++++++ 3 files changed, 47 insertions(+), 3 deletions(-) diff --git a/services/filestore/app/js/FileController.js b/services/filestore/app/js/FileController.js index eef441436c..a2de68b6df 100644 --- a/services/filestore/app/js/FileController.js +++ b/services/filestore/app/js/FileController.js @@ -62,8 +62,16 @@ function getFile(req, res, next) { logger.log({ key, bucket, format, style }, 'sending file to response') - // pass 'next' as a callback to 'pipeline' to receive any errors - pipeline(fileStream, res, next) + pipeline(fileStream, res, err => { + if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { + logger.err( + new Errors.ReadError({ + message: 'error transferring stream', + info: { bucket, key, format, style } + }).withCause(err) + ) + } + }) }) } diff --git a/services/filestore/app/js/S3PersistorManager.js b/services/filestore/app/js/S3PersistorManager.js index 5fb7040b24..2b65880180 100644 --- a/services/filestore/app/js/S3PersistorManager.js +++ b/services/filestore/app/js/S3PersistorManager.js @@ -236,7 +236,9 @@ async function directorySize(bucketName, key) { } function _wrapError(error, message, params, ErrorType) { - if (['NoSuchKey', 'NotFound', 'ENOENT'].includes(error.code)) { + if ( + ['NoSuchKey', 'NotFound', 'AccessDenied', 'ENOENT'].includes(error.code) + ) { return new NotFoundError({ message: 'no such file', info: params diff --git a/services/filestore/test/unit/js/S3PersistorManagerTests.js b/services/filestore/test/unit/js/S3PersistorManagerTests.js index 4e98aa01ff..2c85e353b7 100644 --- a/services/filestore/test/unit/js/S3PersistorManagerTests.js +++ b/services/filestore/test/unit/js/S3PersistorManagerTests.js @@ -37,6 +37,7 @@ describe('S3PersistorManagerTests', function() { S3Client, S3ReadStream, S3NotFoundError, + S3AccessDeniedError, FileNotFoundError, EmptyPromise, settings @@ -84,6 +85,9 @@ describe('S3PersistorManagerTests', function() { S3NotFoundError = new Error('not found') S3NotFoundError.code = 'NoSuchKey' + S3AccessDeniedError = new Error('access denied') + S3AccessDeniedError.code = 'AccessDenied' + S3ReadStream = { on: sinon.stub(), pipe: sinon.stub().returns('s3Stream'), @@ -291,6 +295,36 @@ describe('S3PersistorManagerTests', function() { }) }) + describe('when access to the file is denied', function() { + let error, stream + + beforeEach(async function() { + S3ReadStream.on = sinon.stub() + S3ReadStream.on.withArgs('error').yields(S3AccessDeniedError) + try { + stream = await S3PersistorManager.promises.getFileStream(bucket, key) + } catch (err) { + error = err + } + }) + + it('does not return a stream', function() { + expect(stream).not.to.exist + }) + + it('throws a NotFoundError', function() { + expect(error).to.be.an.instanceOf(Errors.NotFoundError) + }) + + it('wraps the error from S3', function() { + expect(error.cause).to.equal(S3AccessDeniedError) + }) + + it('stores the bucket and key in the error', function() { + expect(error.info).to.deep.equal({ Bucket: bucket, Key: key }) + }) + }) + describe('when S3 encounters an unkown error', function() { let error, stream From f40fbe77aa97b1b931f921f3ae25763149f35905 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Tue, 7 Jan 2020 21:19:26 +0000 Subject: [PATCH 394/555] Simplify logging to log only once per http request --- services/filestore/app.js | 6 + services/filestore/app/js/Errors.js | 4 +- .../filestore/app/js/FSPersistorManager.js | 13 --- services/filestore/app/js/FileController.js | 103 ++++++++++-------- services/filestore/app/js/FileConverter.js | 7 -- services/filestore/app/js/FileHandler.js | 82 +++++--------- .../filestore/app/js/HealthCheckController.js | 7 +- services/filestore/app/js/ImageOptimiser.js | 7 -- services/filestore/app/js/LocalFileWriter.js | 6 - services/filestore/app/js/RequestLogger.js | 32 ++++++ .../filestore/app/js/S3PersistorManager.js | 6 +- services/filestore/app/js/SafeExec.js | 35 +++--- .../test/unit/js/FSPersistorManagerTests.js | 4 - .../test/unit/js/FileControllerTests.js | 86 ++++++--------- .../test/unit/js/FileConverterTests.js | 4 - .../test/unit/js/FileHandlerTests.js | 6 +- .../test/unit/js/ImageOptimiserTests.js | 29 +++-- .../filestore/test/unit/js/KeybuilderTests.js | 9 +- .../test/unit/js/LocalFileWriterTests.js | 4 - .../test/unit/js/S3PersistorManagerTests.js | 4 - .../filestore/test/unit/js/SafeExecTests.js | 4 - 21 files changed, 204 insertions(+), 254 deletions(-) create mode 100644 services/filestore/app/js/RequestLogger.js diff --git a/services/filestore/app.js b/services/filestore/app.js index 9256cb0029..3147cd10c1 100644 --- a/services/filestore/app.js +++ b/services/filestore/app.js @@ -12,6 +12,8 @@ const fileController = require('./app/js/FileController') const keyBuilder = require('./app/js/KeyBuilder') const healthCheckController = require('./app/js/HealthCheckController') +const RequestLogger = require('./app/js/RequestLogger') + const app = express() if (settings.sentry && settings.sentry.dsn) { @@ -27,6 +29,7 @@ if (Metrics.event_loop) { app.use(Metrics.http.monitor(logger)) app.use(function(req, res, next) { Metrics.inc('http-request') + res.logInfo = {} next() }) @@ -137,6 +140,9 @@ app.get('/status', function(req, res) { app.get('/health_check', healthCheckController.check) +app.use(RequestLogger.logRequest) +app.use(RequestLogger.logError) + const port = settings.internal.filestore.port || 3009 const host = '0.0.0.0' diff --git a/services/filestore/app/js/Errors.js b/services/filestore/app/js/Errors.js index 06091a13ba..445b666e17 100644 --- a/services/filestore/app/js/Errors.js +++ b/services/filestore/app/js/Errors.js @@ -24,6 +24,7 @@ class HealthCheckError extends BackwardCompatibleError {} class ConversionsDisabledError extends BackwardCompatibleError {} class ConversionError extends BackwardCompatibleError {} class SettingsError extends BackwardCompatibleError {} +class TimeoutError extends BackwardCompatibleError {} class FailedCommandError extends OError { constructor(command, code, stdout, stderr) { @@ -48,5 +49,6 @@ module.exports = { ReadError, ConversionError, HealthCheckError, - SettingsError + SettingsError, + TimeoutError } diff --git a/services/filestore/app/js/FSPersistorManager.js b/services/filestore/app/js/FSPersistorManager.js index 1a9d2b824d..862acb9bcb 100644 --- a/services/filestore/app/js/FSPersistorManager.js +++ b/services/filestore/app/js/FSPersistorManager.js @@ -1,6 +1,5 @@ const fs = require('fs') const glob = require('glob') -const logger = require('logger-sharelatex') const path = require('path') const rimraf = require('rimraf') const Stream = require('stream') @@ -20,7 +19,6 @@ const filterName = key => key.replace(/\//g, '_') async function sendFile(location, target, source) { const filteredTarget = filterName(target) - logger.log({ location, target: filteredTarget, source }, 'sending file') // actually copy the file (instead of moving it) to maintain consistent behaviour // between the different implementations @@ -39,8 +37,6 @@ async function sendFile(location, target, source) { } async function sendStream(location, target, sourceStream) { - logger.log({ location, target }, 'sending file stream') - const fsPath = await LocalFileWriter.writeStream(sourceStream) try { @@ -53,13 +49,10 @@ async function sendStream(location, target, sourceStream) { // opts may be {start: Number, end: Number} async function getFileStream(location, name, opts) { const filteredName = filterName(name) - logger.log({ location, filteredName }, 'getting file') try { opts.fd = await fsOpen(`${location}/${filteredName}`, 'r') } catch (err) { - logger.err({ err, location, filteredName: name }, 'Error reading from file') - throw _wrapError( err, 'failed to open file for streaming', @@ -78,8 +71,6 @@ async function getFileSize(location, filename) { const stat = await fsStat(fullPath) return stat.size } catch (err) { - logger.err({ err, location, filename }, 'failed to stat file') - throw _wrapError( err, 'failed to stat file', @@ -92,7 +83,6 @@ async function getFileSize(location, filename) { async function copyFile(location, fromName, toName) { const filteredFromName = filterName(fromName) const filteredToName = filterName(toName) - logger.log({ location, filteredFromName, filteredToName }, 'copying file') try { const sourceStream = fs.createReadStream(`${location}/${filteredFromName}`) @@ -110,7 +100,6 @@ async function copyFile(location, fromName, toName) { async function deleteFile(location, name) { const filteredName = filterName(name) - logger.log({ location, filteredName }, 'delete file') try { await fsUnlink(`${location}/${filteredName}`) } catch (err) { @@ -127,8 +116,6 @@ async function deleteFile(location, name) { async function deleteDirectory(location, name) { const filteredName = filterName(name.replace(/\/$/, '')) - logger.log({ location, filteredName }, 'deleting directory') - try { await rmrf(`${location}/${filteredName}`) } catch (err) { diff --git a/services/filestore/app/js/FileController.js b/services/filestore/app/js/FileController.js index a2de68b6df..a1508a20a4 100644 --- a/services/filestore/app/js/FileController.js +++ b/services/filestore/app/js/FileController.js @@ -1,5 +1,4 @@ const PersistorManager = require('./PersistorManager') -const logger = require('logger-sharelatex') const FileHandler = require('./FileHandler') const metrics = require('metrics-sharelatex') const parseRange = require('range-parser') @@ -26,18 +25,17 @@ function getFile(req, res, next) { format, style } + metrics.inc('getFile') - logger.log({ key, bucket, format, style }, 'receiving request to get file') + res.logMsg = 'getting file' + res.logInfo = { key, bucket, format, style, cacheWarm: req.query.cacheWarm } if (req.headers.range) { const range = _getRange(req.headers.range) if (range) { options.start = range.start options.end = range.end - logger.log( - { start: range.start, end: range.end }, - 'getting range of bytes from file' - ) + res.logInfo.range = range } } @@ -45,77 +43,88 @@ function getFile(req, res, next) { if (err) { if (err instanceof Errors.NotFoundError) { res.sendStatus(404) + res.logInfo.notFound = true + next() } else { - logger.err({ err, key, bucket, format, style }, 'problem getting file') - res.sendStatus(500) + next(err) } return } if (req.query.cacheWarm) { - logger.log( - { key, bucket, format, style }, - 'request is only for cache warm so not sending stream' - ) - return res.sendStatus(200) + res.sendStatus(200) + return next() } - logger.log({ key, bucket, format, style }, 'sending file to response') - pipeline(fileStream, res, err => { if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { - logger.err( + next( new Errors.ReadError({ message: 'error transferring stream', info: { bucket, key, format, style } }).withCause(err) ) + } else { + next() } }) }) } -function getFileHead(req, res) { +function getFileHead(req, res, next) { const { key, bucket } = req + metrics.inc('getFileSize') - logger.log({ key, bucket }, 'receiving request to get file metadata') + res.logMsg = 'getting file size' + res.logInfo = { key, bucket } + FileHandler.getFileSize(bucket, key, function(err, fileSize) { if (err) { if (err instanceof Errors.NotFoundError) { res.sendStatus(404) + res.logInfo.notFound = true + next() } else { - res.sendStatus(500) + next(err) } return } res.set('Content-Length', fileSize) res.status(200).end() + next() }) } -function insertFile(req, res) { +function insertFile(req, res, next) { metrics.inc('insertFile') const { key, bucket } = req - logger.log({ key, bucket }, 'receiving request to insert file') + + res.logMsg = 'inserting file' + res.logInfo = { key, bucket } + FileHandler.insertFile(bucket, key, req, function(err) { if (err) { - logger.log({ err, key, bucket }, 'error inserting file') - res.sendStatus(500) + next(err) } else { res.sendStatus(200) + next() } }) } -function copyFile(req, res) { +function copyFile(req, res, next) { metrics.inc('copyFile') const { key, bucket } = req const oldProjectId = req.body.source.project_id const oldFileId = req.body.source.file_id - logger.log( - { key, bucket, oldProject_id: oldProjectId, oldFile_id: oldFileId }, - 'receiving request to copy file' - ) + + req.logInfo = { + key, + bucket, + oldProject_id: oldProjectId, + oldFile_id: oldFileId + } + req.logMsg = 'copying file' PersistorManager.copyFile( bucket, @@ -125,46 +134,52 @@ function copyFile(req, res) { if (err) { if (err instanceof Errors.NotFoundError) { res.sendStatus(404) + res.logInfo.notFound = true + next() } else { - logger.log( - { err, oldProject_id: oldProjectId, oldFile_id: oldFileId }, - 'something went wrong copying file' - ) - res.sendStatus(500) + next(err) } return } res.sendStatus(200) + next() } ) } -function deleteFile(req, res) { +function deleteFile(req, res, next) { metrics.inc('deleteFile') const { key, bucket } = req - logger.log({ key, bucket }, 'receiving request to delete file') - return FileHandler.deleteFile(bucket, key, function(err) { - if (err != null) { - logger.log({ err, key, bucket }, 'something went wrong deleting file') - return res.sendStatus(500) + + req.logInfo = { key, bucket } + req.logMsg = 'deleting file' + + FileHandler.deleteFile(bucket, key, function(err) { + if (err) { + next(err) } else { - return res.sendStatus(204) + res.sendStatus(204) + next() } }) } -function directorySize(req, res) { +function directorySize(req, res, next) { metrics.inc('projectSize') const { project_id: projectId, bucket } = req - logger.log({ projectId, bucket }, 'receiving request to project size') + + req.logMsg = 'getting project size' + req.logInfo = { projectId, bucket } + FileHandler.getDirectorySize(bucket, projectId, function(err, size) { if (err) { - logger.log({ err, projectId, bucket }, 'error inserting file') - return res.sendStatus(500) + return next(err) } res.json({ 'total bytes': size }) + req.logInfo.size = size + next() }) } diff --git a/services/filestore/app/js/FileConverter.js b/services/filestore/app/js/FileConverter.js index 9338b289fb..5ef42cc493 100644 --- a/services/filestore/app/js/FileConverter.js +++ b/services/filestore/app/js/FileConverter.js @@ -1,5 +1,4 @@ const metrics = require('metrics-sharelatex') -const logger = require('logger-sharelatex') const Settings = require('settings-sharelatex') const { callbackify } = require('util') @@ -69,8 +68,6 @@ async function preview(sourcePath) { } async function _convert(sourcePath, requestedFormat, command) { - logger.log({ sourcePath, requestedFormat }, 'converting file format') - if (!APPROVED_FORMATS.includes(requestedFormat)) { throw new ConversionError({ message: 'invalid format requested', @@ -97,9 +94,5 @@ async function _convert(sourcePath, requestedFormat, command) { } timer.done() - logger.log( - { sourcePath, requestedFormat, destPath }, - 'finished converting file' - ) return destPath } diff --git a/services/filestore/app/js/FileHandler.js b/services/filestore/app/js/FileHandler.js index 684a1d2896..3c5b50e693 100644 --- a/services/filestore/app/js/FileHandler.js +++ b/services/filestore/app/js/FileHandler.js @@ -2,17 +2,11 @@ const { promisify } = require('util') const fs = require('fs') const PersistorManager = require('./PersistorManager') const LocalFileWriter = require('./LocalFileWriter') -const logger = require('logger-sharelatex') const FileConverter = require('./FileConverter') const KeyBuilder = require('./KeyBuilder') const async = require('async') const ImageOptimiser = require('./ImageOptimiser') -const { - WriteError, - ReadError, - ConversionError, - NotFoundError -} = require('./Errors') +const { ConversionError } = require('./Errors') module.exports = { insertFile, @@ -33,7 +27,7 @@ function insertFile(bucket, key, stream, callback) { const convertedKey = KeyBuilder.getConvertedFolderKey(key) PersistorManager.deleteDirectory(bucket, convertedKey, function(error) { if (error) { - return callback(new WriteError('error inserting file').withCause(error)) + return callback(error) } PersistorManager.sendStream(bucket, key, stream, callback) }) @@ -51,13 +45,9 @@ function deleteFile(bucket, key, callback) { } function getFile(bucket, key, opts, callback) { - // In this call, opts can contain credentials - if (!opts) { - opts = {} - } - logger.log({ bucket, key, opts: _scrubSecrets(opts) }, 'getting file') + opts = opts || {} if (!opts.format && !opts.style) { - _getStandardFile(bucket, key, opts, callback) + PersistorManager.getFileStream(bucket, key, opts, callback) } else { _getConvertedFile(bucket, key, opts, callback) } @@ -68,27 +58,7 @@ function getFileSize(bucket, key, callback) { } function getDirectorySize(bucket, projectId, callback) { - logger.log({ bucket, project_id: projectId }, 'getting project size') - PersistorManager.directorySize(bucket, projectId, function(err, size) { - if (err) { - return callback( - new ReadError('error getting project size').withCause(err) - ) - } - callback(null, size) - }) -} - -function _getStandardFile(bucket, key, opts, callback) { - PersistorManager.getFileStream(bucket, key, opts, function(err, fileStream) { - if (err && !(err instanceof NotFoundError)) { - logger.err( - { bucket, key, opts: _scrubSecrets(opts) }, - 'error getting fileStream' - ) - } - callback(err, fileStream) - }) + PersistorManager.directorySize(bucket, projectId, callback) } function _getConvertedFile(bucket, key, opts, callback) { @@ -124,7 +94,10 @@ function _getConvertedFileAndCache(bucket, key, convertedKey, opts, callback) { if (err) { LocalFileWriter.deleteFile(convertedFsPath, function() {}) return callback( - new ConversionError('failed to convert file').withCause(err) + new ConversionError({ + message: 'failed to convert file', + info: { opts, bucket, key, convertedKey } + }).withCause(err) ) } // Send back the converted file from the local copy to avoid problems @@ -152,26 +125,26 @@ function _convertFile(bucket, originalKey, opts, callback) { _writeFileToDisk(bucket, originalKey, opts, function(err, originalFsPath) { if (err) { return callback( - new ConversionError('unable to write file to disk').withCause(err) + new ConversionError({ + message: 'unable to write file to disk', + info: { bucket, originalKey, opts } + }).withCause(err) ) } const done = function(err, destPath) { if (err) { - logger.err( - { err, bucket, originalKey, opts: _scrubSecrets(opts) }, - 'error converting file' - ) return callback( - new ConversionError('error converting file').withCause(err) + new ConversionError({ + message: 'error converting file', + info: { bucket, originalKey, opts } + }).withCause(err) ) } LocalFileWriter.deleteFile(originalFsPath, function() {}) callback(err, destPath) } - logger.log({ opts }, 'converting file depending on opts') - if (opts.format) { FileConverter.convert(originalFsPath, opts.format, done) } else if (opts.style === 'thumbnail') { @@ -180,11 +153,14 @@ function _convertFile(bucket, originalKey, opts, callback) { FileConverter.preview(originalFsPath, done) } else { callback( - new ConversionError( - `should have specified opts to convert file with ${JSON.stringify( + new ConversionError({ + message: 'invalid file conversion options', + info: { + bucket, + originalKey, opts - )}` - ) + } + }) ) } }) @@ -193,16 +169,8 @@ function _convertFile(bucket, originalKey, opts, callback) { function _writeFileToDisk(bucket, key, opts, callback) { PersistorManager.getFileStream(bucket, key, opts, function(err, fileStream) { if (err) { - return callback( - new ReadError('unable to get read stream for file').withCause(err) - ) + return callback(err) } LocalFileWriter.writeStream(fileStream, key, callback) }) } - -function _scrubSecrets(opts) { - const safe = Object.assign({}, opts) - delete safe.credentials - return safe -} diff --git a/services/filestore/app/js/HealthCheckController.js b/services/filestore/app/js/HealthCheckController.js index 5e12469ad3..a52d02a444 100644 --- a/services/filestore/app/js/HealthCheckController.js +++ b/services/filestore/app/js/HealthCheckController.js @@ -1,6 +1,5 @@ const fs = require('fs-extra') const path = require('path') -const logger = require('logger-sharelatex') const Settings = require('settings-sharelatex') const streamBuffers = require('stream-buffers') const { promisify } = require('util') @@ -60,13 +59,11 @@ async function checkFileConvert() { } module.exports = { - check(req, res) { - logger.log({}, 'performing health check') + check(req, res, next) { Promise.all([checkCanGetFiles(), checkFileConvert()]) .then(() => res.sendStatus(200)) .catch(err => { - logger.err({ err }, 'Health check: error running') - res.sendStatus(500) + next(err) }) } } diff --git a/services/filestore/app/js/ImageOptimiser.js b/services/filestore/app/js/ImageOptimiser.js index 555e6334bd..9c2ee95c5a 100644 --- a/services/filestore/app/js/ImageOptimiser.js +++ b/services/filestore/app/js/ImageOptimiser.js @@ -12,8 +12,6 @@ module.exports = { async function compressPng(localPath, callback) { const timer = new metrics.Timer('compressPng') - logger.log({ localPath }, 'optimising png path') - const args = ['optipng', localPath] const opts = { timeout: 30 * 1000, @@ -23,7 +21,6 @@ async function compressPng(localPath, callback) { try { await safeExec(args, opts) timer.done() - logger.log({ localPath }, 'finished compressing png') } catch (err) { if (err.code === 'SIGKILL') { logger.warn( @@ -31,10 +28,6 @@ async function compressPng(localPath, callback) { 'optimiser timeout reached' ) } else { - logger.err( - { err, stderr: err.stderr, localPath }, - 'something went wrong compressing png' - ) throw err } } diff --git a/services/filestore/app/js/LocalFileWriter.js b/services/filestore/app/js/LocalFileWriter.js index 22957e15d1..7af282a558 100644 --- a/services/filestore/app/js/LocalFileWriter.js +++ b/services/filestore/app/js/LocalFileWriter.js @@ -3,7 +3,6 @@ const uuid = require('node-uuid') const path = require('path') const Stream = require('stream') const { callbackify, promisify } = require('util') -const logger = require('logger-sharelatex') const metrics = require('metrics-sharelatex') const Settings = require('settings-sharelatex') const { WriteError } = require('./Errors') @@ -23,18 +22,14 @@ async function writeStream(stream, key) { const timer = new metrics.Timer('writingFile') const fsPath = _getPath(key) - logger.log({ fsPath }, 'writing file locally') - const writeStream = fs.createWriteStream(fsPath) try { await pipeline(stream, writeStream) timer.done() - logger.log({ fsPath }, 'finished writing file locally') return fsPath } catch (err) { await deleteFile(fsPath) - logger.err({ err, fsPath }, 'problem writing file locally') throw new WriteError({ message: 'problem writing file locally', info: { err, fsPath } @@ -46,7 +41,6 @@ async function deleteFile(fsPath) { if (!fsPath) { return } - logger.log({ fsPath }, 'removing local temp file') try { await promisify(fs.unlink)(fsPath) } catch (err) { diff --git a/services/filestore/app/js/RequestLogger.js b/services/filestore/app/js/RequestLogger.js new file mode 100644 index 0000000000..5d395097e8 --- /dev/null +++ b/services/filestore/app/js/RequestLogger.js @@ -0,0 +1,32 @@ +const logger = require('logger-sharelatex') + +module.exports = { + logRequest, + logError +} + +function logRequest(req, res) { + // response has already been sent, but we log what happened here + logger.log( + { + info: res.logInfo, + url: req.originalUrl, + params: req.params + }, + res.logMsg || 'HTTP request' + ) +} + +function logError(err, req, res, next) { + logger.err( + { + err, + info: res.logInfo, + url: req.originalUrl, + params: req.params, + msg: res.logMsg + }, + err.message + ) + next(err) // use the standard error handler to send the response +} diff --git a/services/filestore/app/js/S3PersistorManager.js b/services/filestore/app/js/S3PersistorManager.js index 2b65880180..52cadfbfbd 100644 --- a/services/filestore/app/js/S3PersistorManager.js +++ b/services/filestore/app/js/S3PersistorManager.js @@ -4,7 +4,6 @@ http.globalAgent.maxSockets = 300 https.globalAgent.maxSockets = 300 const settings = require('settings-sharelatex') -const logger = require('logger-sharelatex') const metrics = require('metrics-sharelatex') const meter = require('stream-meter') @@ -64,15 +63,13 @@ async function sendStream(bucketName, key, readStream) { metrics.count('s3.egress', meteredStream.bytes) }) - const response = await _getClientForBucket(bucketName) + await _getClientForBucket(bucketName) .upload({ Bucket: bucketName, Key: key, Body: readStream.pipe(meteredStream) }) .promise() - - logger.log({ response, bucketName, key }, 'data uploaded to s3') } catch (err) { throw _wrapError( err, @@ -116,7 +113,6 @@ async function getFileStream(bucketName, key, opts) { } async function deleteDirectory(bucketName, key) { - logger.log({ key, bucketName }, 'deleting directory') let response try { diff --git a/services/filestore/app/js/SafeExec.js b/services/filestore/app/js/SafeExec.js index 5f079fa474..5ed0f18425 100644 --- a/services/filestore/app/js/SafeExec.js +++ b/services/filestore/app/js/SafeExec.js @@ -1,5 +1,4 @@ const _ = require('underscore') -const logger = require('logger-sharelatex') const childProcess = require('child_process') const Settings = require('settings-sharelatex') const { ConversionsDisabledError, FailedCommandError } = require('./Errors') @@ -29,20 +28,6 @@ function safeExec(command, options, callback) { let killTimer - if (options.timeout) { - killTimer = setTimeout(function() { - try { - // use negative process id to kill process group - process.kill(-child.pid, options.killSignal || 'SIGTERM') - } catch (error) { - logger.log( - { process: child.pid, kill_error: error }, - 'error killing process' - ) - } - }, options.timeout) - } - const cleanup = _.once(function(err) { if (killTimer) { clearTimeout(killTimer) @@ -50,6 +35,26 @@ function safeExec(command, options, callback) { callback(err, stdout, stderr) }) + if (options.timeout) { + killTimer = setTimeout(function() { + try { + // use negative process id to kill process group + process.kill(-child.pid, options.killSignal || 'SIGTERM') + } catch (error) { + cleanup( + new FailedCommandError({ + message: 'failed to kill process after timeout', + info: { + command, + options, + pid: child.pid + } + }) + ) + } + }, options.timeout) + } + child.on('close', function(code, signal) { if (code || signal) { return cleanup( diff --git a/services/filestore/test/unit/js/FSPersistorManagerTests.js b/services/filestore/test/unit/js/FSPersistorManagerTests.js index 6847f5b8ef..3b3b4bf417 100644 --- a/services/filestore/test/unit/js/FSPersistorManagerTests.js +++ b/services/filestore/test/unit/js/FSPersistorManagerTests.js @@ -44,10 +44,6 @@ describe('FSPersistorManagerTests', function() { FSPersistorManager = SandboxedModule.require(modulePath, { requires: { './LocalFileWriter': LocalFileWriter, - 'logger-sharelatex': { - log() {}, - err() {} - }, './Errors': Errors, fs, glob, diff --git a/services/filestore/test/unit/js/FileControllerTests.js b/services/filestore/test/unit/js/FileControllerTests.js index 00e3fd1505..c80938663d 100644 --- a/services/filestore/test/unit/js/FileControllerTests.js +++ b/services/filestore/test/unit/js/FileControllerTests.js @@ -12,6 +12,7 @@ describe('FileController', function() { FileController, req, res, + next, stream const settings = { s3: { @@ -26,6 +27,7 @@ describe('FileController', function() { const fileId = 'file_id' const bucket = 'user_files' const key = `${projectId}/${fileId}` + const error = new Error('incorrect utensil') beforeEach(function() { PersistorManager = { @@ -57,10 +59,6 @@ describe('FileController', function() { 'settings-sharelatex': settings, 'metrics-sharelatex': { inc() {} - }, - 'logger-sharelatex': { - log() {}, - err() {} } }, globals: { console } @@ -82,11 +80,13 @@ describe('FileController', function() { sendStatus: sinon.stub().returnsThis(), status: sinon.stub().returnsThis() } + + next = sinon.stub() }) describe('getFile', function() { it('should pipe the stream', function() { - FileController.getFile(req, res) + FileController.getFile(req, res, next) expect(stream.pipeline).to.have.been.calledWith(fileStream, res) }) @@ -96,16 +96,13 @@ describe('FileController', function() { statusCode.should.equal(200) done() } - FileController.getFile(req, res) + FileController.getFile(req, res, next) }) - it('should send a 500 if there is a problem', function(done) { - FileHandler.getFile.yields('error') - res.sendStatus = code => { - code.should.equal(500) - done() - } - FileController.getFile(req, res) + it('should send an error if there is a problem', function() { + FileHandler.getFile.yields(error) + FileController.getFile(req, res, next) + expect(next).to.have.been.calledWith(error) }) describe('with a range header', function() { @@ -125,7 +122,7 @@ describe('FileController', function() { expectedOptions.start = 0 expectedOptions.end = 8 - FileController.getFile(req, res) + FileController.getFile(req, res, next) expect(FileHandler.getFile).to.have.been.calledWith( bucket, key, @@ -135,7 +132,7 @@ describe('FileController', function() { it('should ignore an invalid range header', function() { req.headers.range = 'potato' - FileController.getFile(req, res) + FileController.getFile(req, res, next) expect(FileHandler.getFile).to.have.been.calledWith( bucket, key, @@ -145,7 +142,7 @@ describe('FileController', function() { it("should ignore any type other than 'bytes'", function() { req.headers.range = 'wombats=0-8' - FileController.getFile(req, res) + FileController.getFile(req, res, next) expect(FileHandler.getFile).to.have.been.calledWith( bucket, key, @@ -163,7 +160,7 @@ describe('FileController', function() { done() } - FileController.getFileHead(req, res) + FileController.getFileHead(req, res, next) }) it('should return a 404 is the file is not found', function(done) { @@ -174,18 +171,14 @@ describe('FileController', function() { done() } - FileController.getFileHead(req, res) + FileController.getFileHead(req, res, next) }) - it('should return a 500 on internal errors', function(done) { - FileHandler.getFileSize.yields(new Error()) + it('should send an error on internal errors', function() { + FileHandler.getFileSize.yields(error) - res.sendStatus = code => { - expect(code).to.equal(500) - done() - } - - FileController.getFileHead(req, res) + FileController.getFileHead(req, res, next) + expect(next).to.have.been.calledWith(error) }) }) @@ -196,7 +189,7 @@ describe('FileController', function() { expect(code).to.equal(200) done() } - FileController.insertFile(req, res) + FileController.insertFile(req, res, next) }) }) @@ -224,7 +217,7 @@ describe('FileController', function() { ) done() } - FileController.copyFile(req, res) + FileController.copyFile(req, res, next) }) it('should send a 404 if the original file was not found', function(done) { @@ -233,16 +226,13 @@ describe('FileController', function() { code.should.equal(404) done() } - FileController.copyFile(req, res) + FileController.copyFile(req, res, next) }) - it('should send a 500 if there was an error', function(done) { - PersistorManager.copyFile.yields('error') - res.sendStatus = code => { - code.should.equal(500) - done() - } - FileController.copyFile(req, res) + it('should send an error if there was an error', function() { + PersistorManager.copyFile.yields(error) + FileController.copyFile(req, res, next) + expect(next).to.have.been.calledWith(error) }) }) @@ -253,16 +243,13 @@ describe('FileController', function() { expect(FileHandler.deleteFile).to.have.been.calledWith(bucket, key) done() } - FileController.deleteFile(req, res) + FileController.deleteFile(req, res, next) }) - it('should send a 500 if there was an error', function(done) { - FileHandler.deleteFile.yields('error') - res.sendStatus = code => { - code.should.equal(500) - done() - } - FileController.deleteFile(req, res) + it('should send a 500 if there was an error', function() { + FileHandler.deleteFile.yields(error) + FileController.deleteFile(req, res, next) + expect(next).to.have.been.calledWith(error) }) }) @@ -276,13 +263,10 @@ describe('FileController', function() { }) }) - it('should send a 500 if there was an error', function(done) { - FileHandler.getDirectorySize.callsArgWith(2, 'error') - res.sendStatus = code => { - code.should.equal(500) - done() - } - FileController.directorySize(req, res) + it('should send a 500 if there was an error', function() { + FileHandler.getDirectorySize.yields(error) + FileController.directorySize(req, res, next) + expect(next).to.have.been.calledWith(error) }) }) }) diff --git a/services/filestore/test/unit/js/FileConverterTests.js b/services/filestore/test/unit/js/FileConverterTests.js index 7a7ba12ee2..72d6413417 100644 --- a/services/filestore/test/unit/js/FileConverterTests.js +++ b/services/filestore/test/unit/js/FileConverterTests.js @@ -25,10 +25,6 @@ describe('FileConverter', function() { FileConverter = SandboxedModule.require(modulePath, { requires: { './SafeExec': SafeExec, - 'logger-sharelatex': { - log() {}, - err() {} - }, 'metrics-sharelatex': { inc: sinon.stub(), Timer: sinon.stub().returns({ done: sinon.stub() }) diff --git a/services/filestore/test/unit/js/FileHandlerTests.js b/services/filestore/test/unit/js/FileHandlerTests.js index 671e5c41ea..771ff998eb 100644 --- a/services/filestore/test/unit/js/FileHandlerTests.js +++ b/services/filestore/test/unit/js/FileHandlerTests.js @@ -67,11 +67,7 @@ describe('FileHandler', function() { './FileConverter': FileConverter, './KeyBuilder': KeyBuilder, './ImageOptimiser': ImageOptimiser, - fs: fs, - 'logger-sharelatex': { - log() {}, - err() {} - } + fs: fs }, globals: { console } }) diff --git a/services/filestore/test/unit/js/ImageOptimiserTests.js b/services/filestore/test/unit/js/ImageOptimiserTests.js index 7940767fdf..e4bc967345 100644 --- a/services/filestore/test/unit/js/ImageOptimiserTests.js +++ b/services/filestore/test/unit/js/ImageOptimiserTests.js @@ -6,21 +6,20 @@ const { FailedCommandError } = require('../../../app/js/Errors') const SandboxedModule = require('sandboxed-module') describe('ImageOptimiser', function() { - let ImageOptimiser, SafeExec + let ImageOptimiser, SafeExec, logger const sourcePath = '/wombat/potato.eps' beforeEach(function() { SafeExec = { promises: sinon.stub().resolves() } + logger = { + warn: sinon.stub() + } ImageOptimiser = SandboxedModule.require(modulePath, { requires: { './SafeExec': SafeExec, - 'logger-sharelatex': { - log() {}, - err() {}, - warn() {} - } + 'logger-sharelatex': logger } }) }) @@ -47,13 +46,23 @@ describe('ImageOptimiser', function() { }) describe('when optimiser is sigkilled', function() { - it('should not produce an error', function(done) { - const error = new FailedCommandError('', 'SIGKILL', '', '') - SafeExec.promises.rejects(error) + const expectedError = new FailedCommandError('', 'SIGKILL', '', '') + let error + + beforeEach(function(done) { + SafeExec.promises.rejects(expectedError) ImageOptimiser.compressPng(sourcePath, err => { - expect(err).not.to.exist + error = err done() }) }) + + it('should not produce an error', function() { + expect(error).not.to.exist + }) + + it('should log a warning', function() { + expect(logger.warn).to.have.been.calledOnce + }) }) }) diff --git a/services/filestore/test/unit/js/KeybuilderTests.js b/services/filestore/test/unit/js/KeybuilderTests.js index 5271e892ed..9dcb38f74f 100644 --- a/services/filestore/test/unit/js/KeybuilderTests.js +++ b/services/filestore/test/unit/js/KeybuilderTests.js @@ -7,14 +7,7 @@ describe('LocalFileWriter', function() { const key = 'wombat/potato' beforeEach(function() { - KeyBuilder = SandboxedModule.require(modulePath, { - requires: { - 'logger-sharelatex': { - log() {}, - err() {} - } - } - }) + KeyBuilder = SandboxedModule.require(modulePath) }) describe('cachedKey', function() { diff --git a/services/filestore/test/unit/js/LocalFileWriterTests.js b/services/filestore/test/unit/js/LocalFileWriterTests.js index ad4d73bce6..5f5158f28a 100644 --- a/services/filestore/test/unit/js/LocalFileWriterTests.js +++ b/services/filestore/test/unit/js/LocalFileWriterTests.js @@ -26,10 +26,6 @@ describe('LocalFileWriter', function() { requires: { fs, stream, - 'logger-sharelatex': { - log() {}, - err() {} - }, 'settings-sharelatex': settings, 'metrics-sharelatex': { inc: sinon.stub(), diff --git a/services/filestore/test/unit/js/S3PersistorManagerTests.js b/services/filestore/test/unit/js/S3PersistorManagerTests.js index 2c85e353b7..daeac66d3f 100644 --- a/services/filestore/test/unit/js/S3PersistorManagerTests.js +++ b/services/filestore/test/unit/js/S3PersistorManagerTests.js @@ -122,10 +122,6 @@ describe('S3PersistorManagerTests', function() { './Errors': Errors, fs: Fs, 'stream-meter': Meter, - 'logger-sharelatex': { - log() {}, - err() {} - }, 'metrics-sharelatex': Metrics }, globals: { console } diff --git a/services/filestore/test/unit/js/SafeExecTests.js b/services/filestore/test/unit/js/SafeExecTests.js index 077964ead7..1092be00be 100644 --- a/services/filestore/test/unit/js/SafeExecTests.js +++ b/services/filestore/test/unit/js/SafeExecTests.js @@ -13,10 +13,6 @@ describe('SafeExec', function() { safeExec = SandboxedModule.require(modulePath, { requires: { - 'logger-sharelatex': { - log() {}, - err() {} - }, 'settings-sharelatex': settings } }) From 9615a06e0fecd795d58b3f9ef7c978e0cd444beb Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Fri, 10 Jan 2020 17:08:47 +0000 Subject: [PATCH 395/555] Improve logging middleware to add info methods to request object --- services/filestore/app.js | 7 +- services/filestore/app/js/FileController.js | 52 +++++----- services/filestore/app/js/RequestLogger.js | 99 ++++++++++++++----- .../test/unit/js/FileControllerTests.js | 5 +- 4 files changed, 104 insertions(+), 59 deletions(-) diff --git a/services/filestore/app.js b/services/filestore/app.js index 3147cd10c1..024ceb6834 100644 --- a/services/filestore/app.js +++ b/services/filestore/app.js @@ -26,7 +26,9 @@ if (Metrics.event_loop) { Metrics.event_loop.monitor(logger) } -app.use(Metrics.http.monitor(logger)) +app.use(RequestLogger.middleware) +app.use(RequestLogger.errorHandler) + app.use(function(req, res, next) { Metrics.inc('http-request') res.logInfo = {} @@ -140,9 +142,6 @@ app.get('/status', function(req, res) { app.get('/health_check', healthCheckController.check) -app.use(RequestLogger.logRequest) -app.use(RequestLogger.logError) - const port = settings.internal.filestore.port || 3009 const host = '0.0.0.0' diff --git a/services/filestore/app/js/FileController.js b/services/filestore/app/js/FileController.js index a1508a20a4..c84ffa17d3 100644 --- a/services/filestore/app/js/FileController.js +++ b/services/filestore/app/js/FileController.js @@ -27,15 +27,21 @@ function getFile(req, res, next) { } metrics.inc('getFile') - res.logMsg = 'getting file' - res.logInfo = { key, bucket, format, style, cacheWarm: req.query.cacheWarm } + req.setLogMessage('getting file') + req.addLogFields({ + key, + bucket, + format, + style, + cacheWarm: req.query.cacheWarm + }) if (req.headers.range) { const range = _getRange(req.headers.range) if (range) { options.start = range.start options.end = range.end - res.logInfo.range = range + req.addLogField('range', range) } } @@ -43,8 +49,6 @@ function getFile(req, res, next) { if (err) { if (err instanceof Errors.NotFoundError) { res.sendStatus(404) - res.logInfo.notFound = true - next() } else { next(err) } @@ -52,8 +56,7 @@ function getFile(req, res, next) { } if (req.query.cacheWarm) { - res.sendStatus(200) - return next() + return res.sendStatus(200).end() } pipeline(fileStream, res, err => { @@ -64,8 +67,6 @@ function getFile(req, res, next) { info: { bucket, key, format, style } }).withCause(err) ) - } else { - next() } }) }) @@ -75,15 +76,13 @@ function getFileHead(req, res, next) { const { key, bucket } = req metrics.inc('getFileSize') - res.logMsg = 'getting file size' - res.logInfo = { key, bucket } + req.setLogMessage('getting file size') + req.addLogFields({ key, bucket }) FileHandler.getFileSize(bucket, key, function(err, fileSize) { if (err) { if (err instanceof Errors.NotFoundError) { res.sendStatus(404) - res.logInfo.notFound = true - next() } else { next(err) } @@ -91,7 +90,6 @@ function getFileHead(req, res, next) { } res.set('Content-Length', fileSize) res.status(200).end() - next() }) } @@ -99,15 +97,14 @@ function insertFile(req, res, next) { metrics.inc('insertFile') const { key, bucket } = req - res.logMsg = 'inserting file' - res.logInfo = { key, bucket } + req.setLogMessage('inserting file') + req.addLogFields({ key, bucket }) FileHandler.insertFile(bucket, key, req, function(err) { if (err) { next(err) } else { res.sendStatus(200) - next() } }) } @@ -118,13 +115,13 @@ function copyFile(req, res, next) { const oldProjectId = req.body.source.project_id const oldFileId = req.body.source.file_id - req.logInfo = { + req.addLogFields({ key, bucket, oldProject_id: oldProjectId, oldFile_id: oldFileId - } - req.logMsg = 'copying file' + }) + req.setLogMessage('copying file') PersistorManager.copyFile( bucket, @@ -134,8 +131,6 @@ function copyFile(req, res, next) { if (err) { if (err instanceof Errors.NotFoundError) { res.sendStatus(404) - res.logInfo.notFound = true - next() } else { next(err) } @@ -143,7 +138,6 @@ function copyFile(req, res, next) { } res.sendStatus(200) - next() } ) } @@ -152,15 +146,14 @@ function deleteFile(req, res, next) { metrics.inc('deleteFile') const { key, bucket } = req - req.logInfo = { key, bucket } - req.logMsg = 'deleting file' + req.addLogFields({ key, bucket }) + req.setLogMessage('deleting file') FileHandler.deleteFile(bucket, key, function(err) { if (err) { next(err) } else { res.sendStatus(204) - next() } }) } @@ -169,8 +162,8 @@ function directorySize(req, res, next) { metrics.inc('projectSize') const { project_id: projectId, bucket } = req - req.logMsg = 'getting project size' - req.logInfo = { projectId, bucket } + req.setLogMessage('getting project size') + req.addLogFields({ projectId, bucket }) FileHandler.getDirectorySize(bucket, projectId, function(err, size) { if (err) { @@ -178,8 +171,7 @@ function directorySize(req, res, next) { } res.json({ 'total bytes': size }) - req.logInfo.size = size - next() + req.addLogField('size', size) }) } diff --git a/services/filestore/app/js/RequestLogger.js b/services/filestore/app/js/RequestLogger.js index 5d395097e8..1ae11020ce 100644 --- a/services/filestore/app/js/RequestLogger.js +++ b/services/filestore/app/js/RequestLogger.js @@ -1,32 +1,83 @@ const logger = require('logger-sharelatex') +const metrics = require('metrics-sharelatex') module.exports = { - logRequest, - logError + errorHandler, + middleware } -function logRequest(req, res) { - // response has already been sent, but we log what happened here - logger.log( - { - info: res.logInfo, - url: req.originalUrl, - params: req.params - }, - res.logMsg || 'HTTP request' - ) +function errorHandler(err, req, res, next) { + req._logInfo.set('error', err) + res + .send(err.message) + .status(500) + .end() } -function logError(err, req, res, next) { - logger.err( - { - err, - info: res.logInfo, - url: req.originalUrl, - params: req.params, - msg: res.logMsg - }, - err.message - ) - next(err) // use the standard error handler to send the response +function middleware(req, res, next) { + const startTime = new Date() + + // methods to allow the setting of additional information to be logged for the request + req._logInfo = {} + req._logMessage = 'http request' + req.addLogField = function(field, value) { + req._logInfo[field] = value + } + req.addLogFields = function(fields) { + Object.assign(req._logInfo, fields) + } + req.setLogMessage = function(message) { + req._logMessage = message + } + + // override the 'end' method to log and record metrics + const end = res.end + res.end = function() { + // apply the standard request 'end' method before logging and metrics + end.apply(this, arguments) + + const responseTime = new Date() - startTime + + const routePath = req.route && req.route.path.toString() + + if (routePath) { + metrics.timing('http_request', responseTime, null, { + method: req.method, + status_code: res.statusCode, + path: routePath + .replace(/\//g, '_') + .replace(/:/g, '') + .slice(1) + }) + } + + const level = res.statusCode >= 500 ? 'err' : 'log' + logger[level]( + { + req: { + url: req.originalUrl || req.url, + route: routePath, + method: req.method, + referrer: req.headers.referer || req.headers.referrer, + 'remote-addr': + req.ip || + (req.socket && req.socket.remoteAddress) || + (req.socket && + req.socket.socket && + req.socket.socket.remoteAddress), + 'user-agent': req.headers['user-agent'], + 'content-length': req.headers['content-length'] + }, + res: { + 'content-length': res._headers['content-length'], + statusCode: res.statusCode, + 'response-time': responseTime + }, + info: req._logInfo + }, + req._logMessage + ) + } + + next() } diff --git a/services/filestore/test/unit/js/FileControllerTests.js b/services/filestore/test/unit/js/FileControllerTests.js index c80938663d..579e0a59fa 100644 --- a/services/filestore/test/unit/js/FileControllerTests.js +++ b/services/filestore/test/unit/js/FileControllerTests.js @@ -72,7 +72,10 @@ describe('FileController', function() { project_id: projectId, file_id: fileId }, - headers: {} + headers: {}, + setLogMessage: sinon.stub(), + addLogField: sinon.stub(), + addLogFields: sinon.stub() } res = { From 28fb998719bede5a78588c7b171af0f8e6cd36b3 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Tue, 14 Jan 2020 12:02:39 +0000 Subject: [PATCH 396/555] Refactor request logger into a class --- services/filestore/app.js | 6 +- services/filestore/app/js/FileController.js | 28 ++-- services/filestore/app/js/RequestLogger.js | 149 +++++++++--------- .../test/unit/js/FileControllerTests.js | 7 +- 4 files changed, 99 insertions(+), 91 deletions(-) diff --git a/services/filestore/app.js b/services/filestore/app.js index 024ceb6834..91804e186f 100644 --- a/services/filestore/app.js +++ b/services/filestore/app.js @@ -16,6 +16,9 @@ const RequestLogger = require('./app/js/RequestLogger') const app = express() +const requestLogger = new RequestLogger() +requestLogger.attach(app) + if (settings.sentry && settings.sentry.dsn) { logger.initializeErrorReporting(settings.sentry.dsn) } @@ -26,9 +29,6 @@ if (Metrics.event_loop) { Metrics.event_loop.monitor(logger) } -app.use(RequestLogger.middleware) -app.use(RequestLogger.errorHandler) - app.use(function(req, res, next) { Metrics.inc('http-request') res.logInfo = {} diff --git a/services/filestore/app/js/FileController.js b/services/filestore/app/js/FileController.js index c84ffa17d3..930434dc9d 100644 --- a/services/filestore/app/js/FileController.js +++ b/services/filestore/app/js/FileController.js @@ -27,8 +27,8 @@ function getFile(req, res, next) { } metrics.inc('getFile') - req.setLogMessage('getting file') - req.addLogFields({ + req.requestLogger.setMessage('getting file') + req.requestLogger.addFields({ key, bucket, format, @@ -41,7 +41,7 @@ function getFile(req, res, next) { if (range) { options.start = range.start options.end = range.end - req.addLogField('range', range) + req.requestLogger.addFields({ range }) } } @@ -76,8 +76,8 @@ function getFileHead(req, res, next) { const { key, bucket } = req metrics.inc('getFileSize') - req.setLogMessage('getting file size') - req.addLogFields({ key, bucket }) + req.requestLogger.setMessage('getting file size') + req.requestLogger.addFields({ key, bucket }) FileHandler.getFileSize(bucket, key, function(err, fileSize) { if (err) { @@ -97,8 +97,8 @@ function insertFile(req, res, next) { metrics.inc('insertFile') const { key, bucket } = req - req.setLogMessage('inserting file') - req.addLogFields({ key, bucket }) + req.requestLogger.setMessage('inserting file') + req.requestLogger.addFields({ key, bucket }) FileHandler.insertFile(bucket, key, req, function(err) { if (err) { @@ -115,13 +115,13 @@ function copyFile(req, res, next) { const oldProjectId = req.body.source.project_id const oldFileId = req.body.source.file_id - req.addLogFields({ + req.requestLogger.addFields({ key, bucket, oldProject_id: oldProjectId, oldFile_id: oldFileId }) - req.setLogMessage('copying file') + req.requestLogger.setMessage('copying file') PersistorManager.copyFile( bucket, @@ -146,8 +146,8 @@ function deleteFile(req, res, next) { metrics.inc('deleteFile') const { key, bucket } = req - req.addLogFields({ key, bucket }) - req.setLogMessage('deleting file') + req.requestLogger.addFields({ key, bucket }) + req.requestLogger.setMessage('deleting file') FileHandler.deleteFile(bucket, key, function(err) { if (err) { @@ -162,8 +162,8 @@ function directorySize(req, res, next) { metrics.inc('projectSize') const { project_id: projectId, bucket } = req - req.setLogMessage('getting project size') - req.addLogFields({ projectId, bucket }) + req.requestLogger.setMessage('getting project size') + req.requestLogger.addFields({ projectId, bucket }) FileHandler.getDirectorySize(bucket, projectId, function(err, size) { if (err) { @@ -171,7 +171,7 @@ function directorySize(req, res, next) { } res.json({ 'total bytes': size }) - req.addLogField('size', size) + req.requestLogger.addFields({ size }) }) } diff --git a/services/filestore/app/js/RequestLogger.js b/services/filestore/app/js/RequestLogger.js index 1ae11020ce..e8870cda2e 100644 --- a/services/filestore/app/js/RequestLogger.js +++ b/services/filestore/app/js/RequestLogger.js @@ -1,83 +1,90 @@ const logger = require('logger-sharelatex') const metrics = require('metrics-sharelatex') -module.exports = { - errorHandler, - middleware -} - -function errorHandler(err, req, res, next) { - req._logInfo.set('error', err) - res - .send(err.message) - .status(500) - .end() -} - -function middleware(req, res, next) { - const startTime = new Date() - - // methods to allow the setting of additional information to be logged for the request - req._logInfo = {} - req._logMessage = 'http request' - req.addLogField = function(field, value) { - req._logInfo[field] = value - } - req.addLogFields = function(fields) { - Object.assign(req._logInfo, fields) - } - req.setLogMessage = function(message) { - req._logMessage = message +class RequestLogger { + constructor() { + this.errorHandler = this.errorHandler.bind(this) + this.middleware = this.middleware.bind(this) + this._logInfo = {} + this._logMessage = 'http request' } - // override the 'end' method to log and record metrics - const end = res.end - res.end = function() { - // apply the standard request 'end' method before logging and metrics - end.apply(this, arguments) + attach(app) { + app.use(this.middleware) + app.use(this.errorHandler) + } - const responseTime = new Date() - startTime + errorHandler(err, req, res, next) { + this._logInfo.error = err + res + .send(err.message) + .status(500) + .end() + } - const routePath = req.route && req.route.path.toString() + addFields(fields) { + Object.assign(this._logInfo, fields) + } - if (routePath) { - metrics.timing('http_request', responseTime, null, { - method: req.method, - status_code: res.statusCode, - path: routePath - .replace(/\//g, '_') - .replace(/:/g, '') - .slice(1) - }) + setMessage(message) { + this._logMessage = message + } + + middleware(req, res, next) { + const startTime = new Date() + req.requestLogger = this + + // override the 'end' method to log and record metrics + const end = res.end + res.end = function() { + // apply the standard request 'end' method before logging and metrics + end.apply(this, arguments) + + const responseTime = new Date() - startTime + + const routePath = req.route && req.route.path.toString() + + if (routePath) { + metrics.timing('http_request', responseTime, null, { + method: req.method, + status_code: res.statusCode, + path: routePath + .replace(/\//g, '_') + .replace(/:/g, '') + .slice(1) + }) + } + + const level = res.statusCode >= 500 ? 'err' : 'log' + logger[level]( + { + req: { + url: req.originalUrl || req.url, + route: routePath, + method: req.method, + referrer: req.headers.referer || req.headers.referrer, + 'remote-addr': + req.ip || + (req.socket && req.socket.remoteAddress) || + (req.socket && + req.socket.socket && + req.socket.socket.remoteAddress), + 'user-agent': req.headers['user-agent'], + 'content-length': req.headers['content-length'] + }, + res: { + 'content-length': res._headers['content-length'], + statusCode: res.statusCode, + 'response-time': responseTime + }, + info: this._logInfo + }, + this._logMessage + ) } - const level = res.statusCode >= 500 ? 'err' : 'log' - logger[level]( - { - req: { - url: req.originalUrl || req.url, - route: routePath, - method: req.method, - referrer: req.headers.referer || req.headers.referrer, - 'remote-addr': - req.ip || - (req.socket && req.socket.remoteAddress) || - (req.socket && - req.socket.socket && - req.socket.socket.remoteAddress), - 'user-agent': req.headers['user-agent'], - 'content-length': req.headers['content-length'] - }, - res: { - 'content-length': res._headers['content-length'], - statusCode: res.statusCode, - 'response-time': responseTime - }, - info: req._logInfo - }, - req._logMessage - ) + next() } - - next() } + +module.exports = RequestLogger diff --git a/services/filestore/test/unit/js/FileControllerTests.js b/services/filestore/test/unit/js/FileControllerTests.js index 579e0a59fa..2d1411ea27 100644 --- a/services/filestore/test/unit/js/FileControllerTests.js +++ b/services/filestore/test/unit/js/FileControllerTests.js @@ -73,9 +73,10 @@ describe('FileController', function() { file_id: fileId }, headers: {}, - setLogMessage: sinon.stub(), - addLogField: sinon.stub(), - addLogFields: sinon.stub() + requestLogger: { + setMessage: sinon.stub(), + addFields: sinon.stub() + } } res = { From 035364b62e5f5bc4c73a901ea49e15574d8a1ab4 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Tue, 14 Jan 2020 17:13:12 +0000 Subject: [PATCH 397/555] Fix RequestLogger object lifecycle --- services/filestore/app.js | 3 +- services/filestore/app/js/RequestLogger.js | 36 ++++++++++------------ 2 files changed, 18 insertions(+), 21 deletions(-) diff --git a/services/filestore/app.js b/services/filestore/app.js index 91804e186f..9b78de99ad 100644 --- a/services/filestore/app.js +++ b/services/filestore/app.js @@ -16,8 +16,7 @@ const RequestLogger = require('./app/js/RequestLogger') const app = express() -const requestLogger = new RequestLogger() -requestLogger.attach(app) +RequestLogger.attach(app) if (settings.sentry && settings.sentry.dsn) { logger.initializeErrorReporting(settings.sentry.dsn) diff --git a/services/filestore/app/js/RequestLogger.js b/services/filestore/app/js/RequestLogger.js index e8870cda2e..89622f118c 100644 --- a/services/filestore/app/js/RequestLogger.js +++ b/services/filestore/app/js/RequestLogger.js @@ -3,25 +3,10 @@ const metrics = require('metrics-sharelatex') class RequestLogger { constructor() { - this.errorHandler = this.errorHandler.bind(this) - this.middleware = this.middleware.bind(this) this._logInfo = {} this._logMessage = 'http request' } - attach(app) { - app.use(this.middleware) - app.use(this.errorHandler) - } - - errorHandler(err, req, res, next) { - this._logInfo.error = err - res - .send(err.message) - .status(500) - .end() - } - addFields(fields) { Object.assign(this._logInfo, fields) } @@ -30,9 +15,22 @@ class RequestLogger { this._logMessage = message } - middleware(req, res, next) { + static attach(app) { + app.use(RequestLogger.middleware) + app.use(RequestLogger.errorHandler) + } + + static errorHandler(err, req, res, next) { + this._logInfo.error = err + res + .send(err.message) + .status(500) + .end() + } + + static middleware(req, res, next) { const startTime = new Date() - req.requestLogger = this + req.requestLogger = new RequestLogger() // override the 'end' method to log and record metrics const end = res.end @@ -77,9 +75,9 @@ class RequestLogger { statusCode: res.statusCode, 'response-time': responseTime }, - info: this._logInfo + info: req.requestLogger._logInfo }, - this._logMessage + req.requestLogger._logMessage ) } From 997434c930662cb09e26eb7dbc347f21b6bfb6a5 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Tue, 14 Jan 2020 20:55:33 +0000 Subject: [PATCH 398/555] Remove redundant 'res.logInfo' --- services/filestore/app.js | 1 - 1 file changed, 1 deletion(-) diff --git a/services/filestore/app.js b/services/filestore/app.js index 9b78de99ad..84182c3dcf 100644 --- a/services/filestore/app.js +++ b/services/filestore/app.js @@ -30,7 +30,6 @@ if (Metrics.event_loop) { app.use(function(req, res, next) { Metrics.inc('http-request') - res.logInfo = {} next() }) From c1d81dc1ea5c58424d184321d9c8b341e5caf372 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Fri, 31 Jan 2020 15:45:26 +0100 Subject: [PATCH 399/555] [misc] install a maintained ghostscript version and delete package lists also do not install vim and bail out in case any command exited with a non zero exit code. --- services/filestore/install_deps.sh | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/services/filestore/install_deps.sh b/services/filestore/install_deps.sh index 2bf275b1cb..75f6ff4f86 100755 --- a/services/filestore/install_deps.sh +++ b/services/filestore/install_deps.sh @@ -1,15 +1,14 @@ #!/bin/sh + +set -ex + apt-get update -apt-get install vim imagemagick optipng --yes +apt-get install ghostscript imagemagick optipng --yes + +rm -rf /var/lib/apt/lists/* -wget -q https://s3.amazonaws.com/sl-public-dev-assets/ghostscript-9.15.tar.gz -O /tmp/ghostscript-9.15.tar.gz -cd /tmp -tar -xvf /tmp/ghostscript-9.15.tar.gz -cd /tmp/ghostscript-9.15 && ./configure && make && make install -npm rebuild mkdir /app/user_files/ /app/uploads/ /app/template_files/ chown -R node:node /app/user_files chown -R node:node /app/uploads chown -R node:node /app/template_files -ls -al /app \ No newline at end of file From ac4361d56032aff3c476c3d498f5d8af98df83c1 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Fri, 31 Jan 2020 15:49:50 +0100 Subject: [PATCH 400/555] [misc] dockerignore: hide the local data dirs --- services/filestore/.dockerignore | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/services/filestore/.dockerignore b/services/filestore/.dockerignore index ba1c3442de..96b359ff8a 100644 --- a/services/filestore/.dockerignore +++ b/services/filestore/.dockerignore @@ -5,3 +5,7 @@ gitrev .npm .nvmrc nodemon.json + +uploads/ +user_files/ +template_files/ From 14ba1ea1c1a5364cb6c39484f8a0d59a79b92ddd Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 7 Feb 2020 14:16:31 +0000 Subject: [PATCH 401/555] update to node 10.19.0 --- services/filestore/.nvmrc | 2 +- services/filestore/Dockerfile | 4 ++-- services/filestore/buildscript.txt | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/services/filestore/.nvmrc b/services/filestore/.nvmrc index 73bffb0393..5b7269c0a9 100644 --- a/services/filestore/.nvmrc +++ b/services/filestore/.nvmrc @@ -1 +1 @@ -10.17.0 +10.19.0 diff --git a/services/filestore/Dockerfile b/services/filestore/Dockerfile index 4cdf40264f..fe07a75be2 100644 --- a/services/filestore/Dockerfile +++ b/services/filestore/Dockerfile @@ -3,7 +3,7 @@ # https://github.com/sharelatex/sharelatex-dev-environment # Version: 1.3 -FROM node:10.17.0 as app +FROM node:10.19.0 as app WORKDIR /app @@ -16,7 +16,7 @@ COPY . /app -FROM node:10.17.0 +FROM node:10.19.0 COPY --from=app /app /app diff --git a/services/filestore/buildscript.txt b/services/filestore/buildscript.txt index 7e00306b70..6205f63f56 100644 --- a/services/filestore/buildscript.txt +++ b/services/filestore/buildscript.txt @@ -3,7 +3,7 @@ filestore --dependencies=s3 --acceptance-creds= --script-version=1.3 ---node-version=10.17.0 +--node-version=10.19.0 --language=es --docker-repos=gcr.io/overleaf-ops --public-repo=True From 1341af04f594b36b452c6b9c2df53d397fce0e85 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Mon, 10 Feb 2020 17:10:42 +0100 Subject: [PATCH 402/555] [misc] update the build scripts to 1.3.5 --- services/filestore/.dockerignore | 1 - services/filestore/.eslintrc | 15 +++------------ services/filestore/.prettierrc | 2 +- services/filestore/Dockerfile | 15 +++++++++------ services/filestore/Makefile | 7 ++++++- services/filestore/buildscript.txt | 15 ++++++++------- services/filestore/docker-compose.ci.yml | 13 +++++++------ services/filestore/docker-compose.yml | 21 +++++++++++++-------- services/filestore/install_deps.sh | 5 ----- 9 files changed, 47 insertions(+), 47 deletions(-) diff --git a/services/filestore/.dockerignore b/services/filestore/.dockerignore index 96b359ff8a..cb8e2f9135 100644 --- a/services/filestore/.dockerignore +++ b/services/filestore/.dockerignore @@ -5,7 +5,6 @@ gitrev .npm .nvmrc nodemon.json - uploads/ user_files/ template_files/ diff --git a/services/filestore/.eslintrc b/services/filestore/.eslintrc index 6bd234a2dc..42a4b5cace 100644 --- a/services/filestore/.eslintrc +++ b/services/filestore/.eslintrc @@ -1,7 +1,7 @@ // this file was auto-generated, do not edit it directly. // instead run bin/update_build_scripts from // https://github.com/sharelatex/sharelatex-dev-environment -// Version: 1.3 +// Version: 1.3.5 { "extends": [ "standard", @@ -28,7 +28,7 @@ "overrides": [ { // Test specific rules - "files": ["**/test/*/src/**/*.js"], + "files": ["test/**/*.js"], "globals": { "expect": true }, @@ -53,18 +53,9 @@ "mocha/prefer-arrow-callback": "error" } }, - { - // Frontend test specific rules - "files": ["**/test/frontend/**/*.js"], - "globals": { - "expect": true, - "define": true, - "$": true - } - }, { // Backend specific rules - "files": ["**/app/src/**/*.js"], + "files": ["app/**/*.js", "app.js", "index.js"], "rules": { // don't allow console.log in backend code "no-console": "error" diff --git a/services/filestore/.prettierrc b/services/filestore/.prettierrc index ddf06f31b6..5845b82113 100644 --- a/services/filestore/.prettierrc +++ b/services/filestore/.prettierrc @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.3 +# Version: 1.3.5 { "semi": false, "singleQuote": true diff --git a/services/filestore/Dockerfile b/services/filestore/Dockerfile index fe07a75be2..c4a7b37f9a 100644 --- a/services/filestore/Dockerfile +++ b/services/filestore/Dockerfile @@ -1,11 +1,15 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.3 +# Version: 1.3.5 -FROM node:10.19.0 as app +FROM node:10.19.0 as base WORKDIR /app +COPY install_deps.sh /app +RUN chmod 0755 ./install_deps.sh && ./install_deps.sh + +FROM base as app #wildcard as some files may not be in all repos COPY package*.json npm-shrink*.json /app/ @@ -16,12 +20,11 @@ COPY . /app -FROM node:10.19.0 +FROM base COPY --from=app /app /app - -WORKDIR /app -RUN chmod 0755 ./install_deps.sh && ./install_deps.sh +RUN mkdir -p uploads user_files template_files \ +&& chown node:node uploads user_files template_files USER node CMD ["node", "--expose-gc", "app.js"] diff --git a/services/filestore/Makefile b/services/filestore/Makefile index 67bec94e5a..86514a2121 100644 --- a/services/filestore/Makefile +++ b/services/filestore/Makefile @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.3 +# Version: 1.3.5 BUILD_NUMBER ?= local BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) @@ -33,9 +33,14 @@ test_unit: test_acceptance: test_clean test_acceptance_pre_run test_acceptance_run +test_acceptance_debug: test_clean test_acceptance_pre_run test_acceptance_run_debug + test_acceptance_run: @[ ! -d test/acceptance ] && echo "filestore has no acceptance tests" || $(DOCKER_COMPOSE) run --rm test_acceptance +test_acceptance_run_debug: + @[ ! -d test/acceptance ] && echo "filestore has no acceptance tests" || $(DOCKER_COMPOSE) run -p 127.0.0.9:19999:19999 --rm test_acceptance npm run test:acceptance -- --inspect=0.0.0.0:19999 --inspect-brk + test_clean: $(DOCKER_COMPOSE) down -v -t 0 diff --git a/services/filestore/buildscript.txt b/services/filestore/buildscript.txt index 6205f63f56..75478ce00e 100644 --- a/services/filestore/buildscript.txt +++ b/services/filestore/buildscript.txt @@ -1,10 +1,11 @@ filestore ---env-add=ENABLE_CONVERSIONS="true",USE_PROM_METRICS="true",AWS_S3_USER_FILES_BUCKET_NAME=fake_user_files,AWS_S3_TEMPLATE_FILES_BUCKET_NAME=fake_template_files,AWS_S3_PUBLIC_FILES_BUCKET_NAME=fake_public_files,AWS_S3_ENDPOINT=http://s3:9090,AWS_ACCESS_KEY_ID=fake,AWS_SECRET_ACCESS_KEY=fake ---dependencies=s3 ---acceptance-creds= ---script-version=1.3 ---node-version=10.19.0 ---language=es ---docker-repos=gcr.io/overleaf-ops --public-repo=True +--language=es +--env-add=ENABLE_CONVERSIONS="true",USE_PROM_METRICS="true",AWS_S3_USER_FILES_BUCKET_NAME=fake_user_files,AWS_S3_TEMPLATE_FILES_BUCKET_NAME=fake_template_files,AWS_S3_PUBLIC_FILES_BUCKET_NAME=fake_public_files +--node-version=10.19.0 +--acceptance-creds= +--dependencies=s3 +--docker-repos=gcr.io/overleaf-ops --env-pass-through= +--data-dirs=uploads,user_files,template_files +--script-version=1.3.5 diff --git a/services/filestore/docker-compose.ci.yml b/services/filestore/docker-compose.ci.yml index ae9b43ee25..38ee4d81f4 100644 --- a/services/filestore/docker-compose.ci.yml +++ b/services/filestore/docker-compose.ci.yml @@ -1,9 +1,9 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.3 +# Version: 1.3.5 -version: "2.1" +version: "2.3" services: test_unit: @@ -22,6 +22,10 @@ services: REDIS_HOST: redis MONGO_HOST: mongo POSTGRES_HOST: postgres + AWS_S3_ENDPOINT: http://s3:9090 + AWS_S3_PATH_STYLE: 'true' + AWS_ACCESS_KEY_ID: fake + AWS_SECRET_ACCESS_KEY: fake MOCHA_GREP: ${MOCHA_GREP} NODE_ENV: test ENABLE_CONVERSIONS: "true" @@ -29,9 +33,6 @@ services: AWS_S3_USER_FILES_BUCKET_NAME: fake_user_files AWS_S3_TEMPLATE_FILES_BUCKET_NAME: fake_template_files AWS_S3_PUBLIC_FILES_BUCKET_NAME: fake_public_files - AWS_S3_ENDPOINT: http://s3:9090 - AWS_ACCESS_KEY_ID: fake - AWS_SECRET_ACCESS_KEY: fake depends_on: s3: condition: service_healthy @@ -49,6 +50,6 @@ services: s3: image: adobe/s3mock environment: - - initialBuckets=fake_user_files,fake_template_files,fake_public_files + - initialBuckets=fake_user_files,fake_template_files,fake_public_files,bucket healthcheck: test: ["CMD", "curl", "-f", "http://localhost:9090"] diff --git a/services/filestore/docker-compose.yml b/services/filestore/docker-compose.yml index 792d70800b..40984ea078 100644 --- a/services/filestore/docker-compose.yml +++ b/services/filestore/docker-compose.yml @@ -1,13 +1,15 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.3 +# Version: 1.3.5 -version: "2.1" +version: "2.3" services: test_unit: - build: . + build: + context: . + target: base volumes: - .:/app working_dir: /app @@ -18,7 +20,9 @@ services: user: node test_acceptance: - build: . + build: + context: . + target: base volumes: - .:/app working_dir: /app @@ -27,6 +31,10 @@ services: REDIS_HOST: redis MONGO_HOST: mongo POSTGRES_HOST: postgres + AWS_S3_ENDPOINT: http://s3:9090 + AWS_S3_PATH_STYLE: 'true' + AWS_ACCESS_KEY_ID: fake + AWS_SECRET_ACCESS_KEY: fake MOCHA_GREP: ${MOCHA_GREP} LOG_LEVEL: ERROR NODE_ENV: test @@ -35,9 +43,6 @@ services: AWS_S3_USER_FILES_BUCKET_NAME: fake_user_files AWS_S3_TEMPLATE_FILES_BUCKET_NAME: fake_template_files AWS_S3_PUBLIC_FILES_BUCKET_NAME: fake_public_files - AWS_S3_ENDPOINT: http://s3:9090 - AWS_ACCESS_KEY_ID: fake - AWS_SECRET_ACCESS_KEY: fake user: node depends_on: s3: @@ -47,6 +52,6 @@ services: s3: image: adobe/s3mock environment: - - initialBuckets=fake_user_files,fake_template_files,fake_public_files + - initialBuckets=fake_user_files,fake_template_files,fake_public_files,bucket healthcheck: test: ["CMD", "curl", "-f", "http://localhost:9090"] diff --git a/services/filestore/install_deps.sh b/services/filestore/install_deps.sh index 75f6ff4f86..105e3a0bff 100755 --- a/services/filestore/install_deps.sh +++ b/services/filestore/install_deps.sh @@ -7,8 +7,3 @@ apt-get update apt-get install ghostscript imagemagick optipng --yes rm -rf /var/lib/apt/lists/* - -mkdir /app/user_files/ /app/uploads/ /app/template_files/ -chown -R node:node /app/user_files -chown -R node:node /app/uploads -chown -R node:node /app/template_files From e3613e52db683417d8e123454ae9883430642c27 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Wed, 12 Feb 2020 10:34:56 +0000 Subject: [PATCH 403/555] Fix log info assignment in error handler --- services/filestore/app/js/RequestLogger.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/app/js/RequestLogger.js b/services/filestore/app/js/RequestLogger.js index 89622f118c..fd72d85d17 100644 --- a/services/filestore/app/js/RequestLogger.js +++ b/services/filestore/app/js/RequestLogger.js @@ -21,7 +21,7 @@ class RequestLogger { } static errorHandler(err, req, res, next) { - this._logInfo.error = err + req.requestLogger._logInfo.error = err res .send(err.message) .status(500) From a2e1591e93cd1605563c8f91d10625017b597662 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Wed, 12 Feb 2020 12:37:00 +0000 Subject: [PATCH 404/555] remove unused .travis.yml file --- services/filestore/.travis.yml | 15 --------------- 1 file changed, 15 deletions(-) delete mode 100644 services/filestore/.travis.yml diff --git a/services/filestore/.travis.yml b/services/filestore/.travis.yml deleted file mode 100644 index c9b4f1cf1a..0000000000 --- a/services/filestore/.travis.yml +++ /dev/null @@ -1,15 +0,0 @@ -language: node_js - -node_js: - - "0.10" - -before_install: - - npm install -g grunt-cli - -install: - - npm install - - grunt install - -script: - - grunt test:unit - From f877f51775c56949970811dbc7ef1650b50c7477 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Tue, 14 Jan 2020 12:18:56 +0000 Subject: [PATCH 405/555] Rename *PersistorManager to *Persistor --- .../{FSPersistorManager.js => FSPersistor.js} | 0 services/filestore/app/js/PersistorManager.js | 4 +- .../{S3PersistorManager.js => S3Persistor.js} | 0 ...torManagerTests.js => FSPersistorTests.js} | 89 +++++++------------ .../test/unit/js/PersistorManagerTests.js | 24 +++-- ...torManagerTests.js => S3PersistorTests.js} | 89 ++++++++----------- 6 files changed, 80 insertions(+), 126 deletions(-) rename services/filestore/app/js/{FSPersistorManager.js => FSPersistor.js} (100%) rename services/filestore/app/js/{S3PersistorManager.js => S3Persistor.js} (100%) rename services/filestore/test/unit/js/{FSPersistorManagerTests.js => FSPersistorTests.js} (75%) rename services/filestore/test/unit/js/{S3PersistorManagerTests.js => S3PersistorTests.js} (87%) diff --git a/services/filestore/app/js/FSPersistorManager.js b/services/filestore/app/js/FSPersistor.js similarity index 100% rename from services/filestore/app/js/FSPersistorManager.js rename to services/filestore/app/js/FSPersistor.js diff --git a/services/filestore/app/js/PersistorManager.js b/services/filestore/app/js/PersistorManager.js index cca0cf0f36..bd944a03f7 100644 --- a/services/filestore/app/js/PersistorManager.js +++ b/services/filestore/app/js/PersistorManager.js @@ -14,10 +14,10 @@ if (!settings.filestore.backend) { switch (settings.filestore.backend) { case 'aws-sdk': case 's3': - module.exports = require('./S3PersistorManager') + module.exports = require('./S3Persistor') break case 'fs': - module.exports = require('./FSPersistorManager') + module.exports = require('./FSPersistor') break default: throw new Error(`unknown filestore backend: ${settings.filestore.backend}`) diff --git a/services/filestore/app/js/S3PersistorManager.js b/services/filestore/app/js/S3Persistor.js similarity index 100% rename from services/filestore/app/js/S3PersistorManager.js rename to services/filestore/app/js/S3Persistor.js diff --git a/services/filestore/test/unit/js/FSPersistorManagerTests.js b/services/filestore/test/unit/js/FSPersistorTests.js similarity index 75% rename from services/filestore/test/unit/js/FSPersistorManagerTests.js rename to services/filestore/test/unit/js/FSPersistorTests.js index 3b3b4bf417..ba343c548c 100644 --- a/services/filestore/test/unit/js/FSPersistorManagerTests.js +++ b/services/filestore/test/unit/js/FSPersistorTests.js @@ -7,9 +7,9 @@ const Errors = require('../../../app/js/Errors') chai.use(require('sinon-chai')) chai.use(require('chai-as-promised')) -const modulePath = '../../../app/js/FSPersistorManager.js' +const modulePath = '../../../app/js/FSPersistor.js' -describe('FSPersistorManagerTests', function() { +describe('FSPersistorTests', function() { const stat = { size: 4, isFile: sinon.stub().returns(true) } const fd = 1234 const readStream = 'readStream' @@ -22,7 +22,7 @@ describe('FSPersistorManagerTests', function() { const files = ['animals/wombat.tex', 'vegetables/potato.tex'] const globs = [`${location}/${files[0]}`, `${location}/${files[1]}`] const filteredFilenames = ['animals_wombat.tex', 'vegetables_potato.tex'] - let fs, rimraf, stream, LocalFileWriter, FSPersistorManager, glob + let fs, rimraf, stream, LocalFileWriter, FSPersistor, glob beforeEach(function() { fs = { @@ -41,7 +41,7 @@ describe('FSPersistorManagerTests', function() { deleteFile: sinon.stub().resolves() } } - FSPersistorManager = SandboxedModule.require(modulePath, { + FSPersistor = SandboxedModule.require(modulePath, { requires: { './LocalFileWriter': LocalFileWriter, './Errors': Errors, @@ -57,7 +57,7 @@ describe('FSPersistorManagerTests', function() { describe('sendFile', function() { const localFilesystemPath = '/path/to/local/file' it('should copy the file', async function() { - await FSPersistorManager.promises.sendFile( + await FSPersistor.promises.sendFile( location, files[0], localFilesystemPath @@ -72,33 +72,21 @@ describe('FSPersistorManagerTests', function() { it('should return an error if the file cannot be stored', async function() { stream.pipeline.yields(error) await expect( - FSPersistorManager.promises.sendFile( - location, - files[0], - localFilesystemPath - ) + FSPersistor.promises.sendFile(location, files[0], localFilesystemPath) ).to.eventually.be.rejected.and.have.property('cause', error) }) }) describe('sendStream', function() { it('should send the stream to LocalFileWriter', async function() { - await FSPersistorManager.promises.sendStream( - location, - files[0], - remoteStream - ) + await FSPersistor.promises.sendStream(location, files[0], remoteStream) expect(LocalFileWriter.promises.writeStream).to.have.been.calledWith( remoteStream ) }) it('should delete the temporary file', async function() { - await FSPersistorManager.promises.sendStream( - location, - files[0], - remoteStream - ) + await FSPersistor.promises.sendStream(location, files[0], remoteStream) expect(LocalFileWriter.promises.deleteFile).to.have.been.calledWith( tempFile ) @@ -107,30 +95,26 @@ describe('FSPersistorManagerTests', function() { it('should return the error from LocalFileWriter', async function() { LocalFileWriter.promises.writeStream.rejects(error) await expect( - FSPersistorManager.promises.sendStream(location, files[0], remoteStream) + FSPersistor.promises.sendStream(location, files[0], remoteStream) ).to.eventually.be.rejectedWith(error) }) it('should send the temporary file to the filestore', async function() { - await FSPersistorManager.promises.sendStream( - location, - files[0], - remoteStream - ) + await FSPersistor.promises.sendStream(location, files[0], remoteStream) expect(fs.createReadStream).to.have.been.calledWith(tempFile) }) }) describe('getFileStream', function() { it('should use correct file location', async function() { - await FSPersistorManager.promises.getFileStream(location, files[0], {}) + await FSPersistor.promises.getFileStream(location, files[0], {}) expect(fs.open).to.have.been.calledWith( `${location}/${filteredFilenames[0]}` ) }) it('should pass the options to createReadStream', async function() { - await FSPersistorManager.promises.getFileStream(location, files[0], { + await FSPersistor.promises.getFileStream(location, files[0], { start: 0, end: 8 }) @@ -146,18 +130,14 @@ describe('FSPersistorManagerTests', function() { err.code = 'ENOENT' fs.open.yields(err) - await expect( - FSPersistorManager.promises.getFileStream(location, files[0], {}) - ) + await expect(FSPersistor.promises.getFileStream(location, files[0], {})) .to.eventually.be.rejected.and.be.an.instanceOf(Errors.NotFoundError) .and.have.property('cause', err) }) it('should wrap any other error', async function() { fs.open.yields(error) - await expect( - FSPersistorManager.promises.getFileStream(location, files[0], {}) - ) + await expect(FSPersistor.promises.getFileStream(location, files[0], {})) .to.eventually.be.rejectedWith('failed to open file for streaming') .and.be.an.instanceOf(Errors.ReadError) .and.have.property('cause', error) @@ -181,18 +161,18 @@ describe('FSPersistorManagerTests', function() { it('should return the file size', async function() { expect( - await FSPersistorManager.promises.getFileSize(location, files[0]) + await FSPersistor.promises.getFileSize(location, files[0]) ).to.equal(size) }) it('should throw a NotFoundError if the file does not exist', async function() { await expect( - FSPersistorManager.promises.getFileSize(location, badFilename) + FSPersistor.promises.getFileSize(location, badFilename) ).to.eventually.be.rejected.and.be.an.instanceOf(Errors.NotFoundError) }) it('should wrap any other error', async function() { - await expect(FSPersistorManager.promises.getFileSize(location, 'raccoon')) + await expect(FSPersistor.promises.getFileSize(location, 'raccoon')) .to.eventually.be.rejected.and.be.an.instanceOf(Errors.ReadError) .and.have.property('cause', error) }) @@ -200,28 +180,28 @@ describe('FSPersistorManagerTests', function() { describe('copyFile', function() { it('Should open the source for reading', async function() { - await FSPersistorManager.promises.copyFile(location, files[0], files[1]) + await FSPersistor.promises.copyFile(location, files[0], files[1]) expect(fs.createReadStream).to.have.been.calledWith( `${location}/${filteredFilenames[0]}` ) }) it('Should open the target for writing', async function() { - await FSPersistorManager.promises.copyFile(location, files[0], files[1]) + await FSPersistor.promises.copyFile(location, files[0], files[1]) expect(fs.createWriteStream).to.have.been.calledWith( `${location}/${filteredFilenames[1]}` ) }) it('Should pipe the source to the target', async function() { - await FSPersistorManager.promises.copyFile(location, files[0], files[1]) + await FSPersistor.promises.copyFile(location, files[0], files[1]) expect(stream.pipeline).to.have.been.calledWith(readStream, writeStream) }) }) describe('deleteFile', function() { it('Should call unlink with correct options', async function() { - await FSPersistorManager.promises.deleteFile(location, files[0]) + await FSPersistor.promises.deleteFile(location, files[0]) expect(fs.unlink).to.have.been.calledWith( `${location}/${filteredFilenames[0]}` ) @@ -230,14 +210,14 @@ describe('FSPersistorManagerTests', function() { it('Should propagate the error', async function() { fs.unlink.yields(error) await expect( - FSPersistorManager.promises.deleteFile(location, files[0]) + FSPersistor.promises.deleteFile(location, files[0]) ).to.eventually.be.rejected.and.have.property('cause', error) }) }) describe('deleteDirectory', function() { it('Should call rmdir(rimraf) with correct options', async function() { - await FSPersistorManager.promises.deleteDirectory(location, files[0]) + await FSPersistor.promises.deleteDirectory(location, files[0]) expect(rimraf).to.have.been.calledWith( `${location}/${filteredFilenames[0]}` ) @@ -246,7 +226,7 @@ describe('FSPersistorManagerTests', function() { it('Should propagate the error', async function() { rimraf.yields(error) await expect( - FSPersistorManager.promises.deleteDirectory(location, files[0]) + FSPersistor.promises.deleteDirectory(location, files[0]) ).to.eventually.be.rejected.and.have.property('cause', error) }) }) @@ -266,7 +246,7 @@ describe('FSPersistorManagerTests', function() { }) it('Should call stat with correct options', async function() { - await FSPersistorManager.promises.checkIfFileExists(location, files[0]) + await FSPersistor.promises.checkIfFileExists(location, files[0]) expect(fs.stat).to.have.been.calledWith( `${location}/${filteredFilenames[0]}` ) @@ -274,23 +254,18 @@ describe('FSPersistorManagerTests', function() { it('Should return true for existing files', async function() { expect( - await FSPersistorManager.promises.checkIfFileExists(location, files[0]) + await FSPersistor.promises.checkIfFileExists(location, files[0]) ).to.equal(true) }) it('Should return false for non-existing files', async function() { expect( - await FSPersistorManager.promises.checkIfFileExists( - location, - badFilename - ) + await FSPersistor.promises.checkIfFileExists(location, badFilename) ).to.equal(false) }) it('should wrap the error if there is a problem', async function() { - await expect( - FSPersistorManager.promises.checkIfFileExists(location, 'llama') - ) + await expect(FSPersistor.promises.checkIfFileExists(location, 'llama')) .to.eventually.be.rejected.and.be.an.instanceOf(Errors.ReadError) .and.have.property('cause', error) }) @@ -299,9 +274,7 @@ describe('FSPersistorManagerTests', function() { describe('directorySize', function() { it('should wrap the error', async function() { glob.yields(error) - await expect( - FSPersistorManager.promises.directorySize(location, files[0]) - ) + await expect(FSPersistor.promises.directorySize(location, files[0])) .to.eventually.be.rejected.and.be.an.instanceOf(Errors.ReadError) .and.include({ cause: error }) .and.have.property('info') @@ -309,7 +282,7 @@ describe('FSPersistorManagerTests', function() { }) it('should filter the directory name', async function() { - await FSPersistorManager.promises.directorySize(location, files[0]) + await FSPersistor.promises.directorySize(location, files[0]) expect(glob).to.have.been.calledWith( `${location}/${filteredFilenames[0]}_*` ) @@ -317,7 +290,7 @@ describe('FSPersistorManagerTests', function() { it('should sum directory files size', async function() { expect( - await FSPersistorManager.promises.directorySize(location, files[0]) + await FSPersistor.promises.directorySize(location, files[0]) ).to.equal(stat.size * files.length) }) }) diff --git a/services/filestore/test/unit/js/PersistorManagerTests.js b/services/filestore/test/unit/js/PersistorManagerTests.js index 0ecbb22078..cdc9de0f92 100644 --- a/services/filestore/test/unit/js/PersistorManagerTests.js +++ b/services/filestore/test/unit/js/PersistorManagerTests.js @@ -6,18 +6,14 @@ const SandboxedModule = require('sandboxed-module') const modulePath = '../../../app/js/PersistorManager.js' describe('PersistorManager', function() { - let PersistorManager, - FSPersistorManager, - S3PersistorManager, - settings, - requires + let PersistorManager, FSPersistor, S3Persistor, settings, requires beforeEach(function() { - FSPersistorManager = { - wrappedMethod: sinon.stub().returns('FSPersistorManager') + FSPersistor = { + wrappedMethod: sinon.stub().returns('FSPersistor') } - S3PersistorManager = { - wrappedMethod: sinon.stub().returns('S3PersistorManager') + S3Persistor = { + wrappedMethod: sinon.stub().returns('S3Persistor') } settings = { @@ -25,8 +21,8 @@ describe('PersistorManager', function() { } requires = { - './S3PersistorManager': S3PersistorManager, - './FSPersistorManager': FSPersistorManager, + './S3Persistor': S3Persistor, + './FSPersistor': FSPersistor, 'settings-sharelatex': settings, 'logger-sharelatex': { log() {}, @@ -40,7 +36,7 @@ describe('PersistorManager', function() { PersistorManager = SandboxedModule.require(modulePath, { requires }) expect(PersistorManager).to.respondTo('wrappedMethod') - expect(PersistorManager.wrappedMethod()).to.equal('S3PersistorManager') + expect(PersistorManager.wrappedMethod()).to.equal('S3Persistor') }) it("should implement the S3 wrapped method when 'aws-sdk' is configured", function() { @@ -48,7 +44,7 @@ describe('PersistorManager', function() { PersistorManager = SandboxedModule.require(modulePath, { requires }) expect(PersistorManager).to.respondTo('wrappedMethod') - expect(PersistorManager.wrappedMethod()).to.equal('S3PersistorManager') + expect(PersistorManager.wrappedMethod()).to.equal('S3Persistor') }) it('should implement the FS wrapped method when FS is configured', function() { @@ -56,7 +52,7 @@ describe('PersistorManager', function() { PersistorManager = SandboxedModule.require(modulePath, { requires }) expect(PersistorManager).to.respondTo('wrappedMethod') - expect(PersistorManager.wrappedMethod()).to.equal('FSPersistorManager') + expect(PersistorManager.wrappedMethod()).to.equal('FSPersistor') }) it('should throw an error when the backend is not configured', function() { diff --git a/services/filestore/test/unit/js/S3PersistorManagerTests.js b/services/filestore/test/unit/js/S3PersistorTests.js similarity index 87% rename from services/filestore/test/unit/js/S3PersistorManagerTests.js rename to services/filestore/test/unit/js/S3PersistorTests.js index daeac66d3f..7a945b4d19 100644 --- a/services/filestore/test/unit/js/S3PersistorManagerTests.js +++ b/services/filestore/test/unit/js/S3PersistorTests.js @@ -1,12 +1,12 @@ const sinon = require('sinon') const chai = require('chai') const { expect } = chai -const modulePath = '../../../app/js/S3PersistorManager.js' +const modulePath = '../../../app/js/S3Persistor.js' const SandboxedModule = require('sandboxed-module') const Errors = require('../../../app/js/Errors') -describe('S3PersistorManagerTests', function() { +describe('S3PersistorTests', function() { const defaultS3Key = 'frog' const defaultS3Secret = 'prince' const defaultS3Credentials = { @@ -33,7 +33,7 @@ describe('S3PersistorManagerTests', function() { Meter, MeteredStream, ReadStream, - S3PersistorManager, + S3Persistor, S3Client, S3ReadStream, S3NotFoundError, @@ -115,7 +115,7 @@ describe('S3PersistorManagerTests', function() { } S3 = sinon.stub().returns(S3Client) - S3PersistorManager = SandboxedModule.require(modulePath, { + S3Persistor = SandboxedModule.require(modulePath, { requires: { 'aws-sdk/clients/s3': S3, 'settings-sharelatex': settings, @@ -133,7 +133,7 @@ describe('S3PersistorManagerTests', function() { let stream beforeEach(async function() { - stream = await S3PersistorManager.promises.getFileStream(bucket, key) + stream = await S3Persistor.promises.getFileStream(bucket, key) }) it('returns a stream', function() { @@ -164,7 +164,7 @@ describe('S3PersistorManagerTests', function() { let stream beforeEach(async function() { - stream = await S3PersistorManager.promises.getFileStream(bucket, key, { + stream = await S3Persistor.promises.getFileStream(bucket, key, { start: 5, end: 10 }) @@ -201,7 +201,7 @@ describe('S3PersistorManagerTests', function() { auth_secret: alternativeSecret } - stream = await S3PersistorManager.promises.getFileStream(bucket, key) + stream = await S3Persistor.promises.getFileStream(bucket, key) }) it('returns a stream', function() { @@ -220,16 +220,13 @@ describe('S3PersistorManagerTests', function() { }) it('caches the credentials', async function() { - stream = await S3PersistorManager.promises.getFileStream(bucket, key) + stream = await S3Persistor.promises.getFileStream(bucket, key) expect(S3).to.have.been.calledOnceWith(alternativeS3Credentials) }) it('uses the default credentials for an unknown bucket', async function() { - stream = await S3PersistorManager.promises.getFileStream( - 'anotherBucket', - key - ) + stream = await S3Persistor.promises.getFileStream('anotherBucket', key) expect(S3).to.have.been.calledTwice expect(S3.firstCall).to.have.been.calledWith(alternativeS3Credentials) @@ -237,14 +234,8 @@ describe('S3PersistorManagerTests', function() { }) it('caches the default credentials', async function() { - stream = await S3PersistorManager.promises.getFileStream( - 'anotherBucket', - key - ) - stream = await S3PersistorManager.promises.getFileStream( - 'anotherBucket', - key - ) + stream = await S3Persistor.promises.getFileStream('anotherBucket', key) + stream = await S3Persistor.promises.getFileStream('anotherBucket', key) expect(S3).to.have.been.calledTwice expect(S3.firstCall).to.have.been.calledWith(alternativeS3Credentials) @@ -256,7 +247,7 @@ describe('S3PersistorManagerTests', function() { delete settings.filestore.s3.secret await expect( - S3PersistorManager.promises.getFileStream('anotherBucket', key) + S3Persistor.promises.getFileStream('anotherBucket', key) ).to.eventually.be.rejected.and.be.an.instanceOf(Errors.SettingsError) }) }) @@ -268,7 +259,7 @@ describe('S3PersistorManagerTests', function() { S3ReadStream.on = sinon.stub() S3ReadStream.on.withArgs('error').yields(S3NotFoundError) try { - stream = await S3PersistorManager.promises.getFileStream(bucket, key) + stream = await S3Persistor.promises.getFileStream(bucket, key) } catch (err) { error = err } @@ -298,7 +289,7 @@ describe('S3PersistorManagerTests', function() { S3ReadStream.on = sinon.stub() S3ReadStream.on.withArgs('error').yields(S3AccessDeniedError) try { - stream = await S3PersistorManager.promises.getFileStream(bucket, key) + stream = await S3Persistor.promises.getFileStream(bucket, key) } catch (err) { error = err } @@ -328,7 +319,7 @@ describe('S3PersistorManagerTests', function() { S3ReadStream.on = sinon.stub() S3ReadStream.on.withArgs('error').yields(genericError) try { - stream = await S3PersistorManager.promises.getFileStream(bucket, key) + stream = await S3Persistor.promises.getFileStream(bucket, key) } catch (err) { error = err } @@ -357,7 +348,7 @@ describe('S3PersistorManagerTests', function() { let size beforeEach(async function() { - size = await S3PersistorManager.promises.getFileSize(bucket, key) + size = await S3Persistor.promises.getFileSize(bucket, key) }) it('should return the object size', function() { @@ -380,7 +371,7 @@ describe('S3PersistorManagerTests', function() { promise: sinon.stub().rejects(S3NotFoundError) }) try { - await S3PersistorManager.promises.getFileSize(bucket, key) + await S3Persistor.promises.getFileSize(bucket, key) } catch (err) { error = err } @@ -403,7 +394,7 @@ describe('S3PersistorManagerTests', function() { promise: sinon.stub().rejects(genericError) }) try { - await S3PersistorManager.promises.getFileSize(bucket, key) + await S3Persistor.promises.getFileSize(bucket, key) } catch (err) { error = err } @@ -422,7 +413,7 @@ describe('S3PersistorManagerTests', function() { describe('sendStream', function() { describe('with valid parameters', function() { beforeEach(async function() { - return S3PersistorManager.promises.sendStream(bucket, key, ReadStream) + return S3Persistor.promises.sendStream(bucket, key, ReadStream) }) it('should upload the stream', function() { @@ -449,7 +440,7 @@ describe('S3PersistorManagerTests', function() { promise: sinon.stub().rejects(genericError) }) try { - await S3PersistorManager.promises.sendStream(bucket, key, ReadStream) + await S3Persistor.promises.sendStream(bucket, key, ReadStream) } catch (err) { error = err } @@ -464,7 +455,7 @@ describe('S3PersistorManagerTests', function() { describe('sendFile', function() { describe('with valid parameters', function() { beforeEach(async function() { - return S3PersistorManager.promises.sendFile(bucket, key, filename) + return S3Persistor.promises.sendFile(bucket, key, filename) }) it('should create a read stream for the file', function() { @@ -486,7 +477,7 @@ describe('S3PersistorManagerTests', function() { beforeEach(async function() { Fs.createReadStream = sinon.stub().throws(FileNotFoundError) try { - await S3PersistorManager.promises.sendFile(bucket, key, filename) + await S3Persistor.promises.sendFile(bucket, key, filename) } catch (err) { error = err } @@ -507,7 +498,7 @@ describe('S3PersistorManagerTests', function() { beforeEach(async function() { Fs.createReadStream = sinon.stub().throws(genericError) try { - await S3PersistorManager.promises.sendFile(bucket, key, filename) + await S3Persistor.promises.sendFile(bucket, key, filename) } catch (err) { error = err } @@ -526,7 +517,7 @@ describe('S3PersistorManagerTests', function() { describe('copyFile', function() { describe('with valid parameters', function() { beforeEach(async function() { - return S3PersistorManager.promises.copyFile(bucket, key, destKey) + return S3Persistor.promises.copyFile(bucket, key, destKey) }) it('should copy the object', function() { @@ -546,7 +537,7 @@ describe('S3PersistorManagerTests', function() { promise: sinon.stub().rejects(S3NotFoundError) }) try { - await S3PersistorManager.promises.copyFile(bucket, key, destKey) + await S3Persistor.promises.copyFile(bucket, key, destKey) } catch (err) { error = err } @@ -561,7 +552,7 @@ describe('S3PersistorManagerTests', function() { describe('deleteFile', function() { describe('with valid parameters', function() { beforeEach(async function() { - return S3PersistorManager.promises.deleteFile(bucket, key) + return S3Persistor.promises.deleteFile(bucket, key) }) it('should delete the object', function() { @@ -580,7 +571,7 @@ describe('S3PersistorManagerTests', function() { promise: sinon.stub().rejects(S3NotFoundError) }) try { - await S3PersistorManager.promises.deleteFile(bucket, key) + await S3Persistor.promises.deleteFile(bucket, key) } catch (err) { error = err } @@ -595,7 +586,7 @@ describe('S3PersistorManagerTests', function() { describe('deleteDirectory', function() { describe('with valid parameters', function() { beforeEach(async function() { - return S3PersistorManager.promises.deleteDirectory(bucket, key) + return S3Persistor.promises.deleteDirectory(bucket, key) }) it('should list the objects in the directory', function() { @@ -621,7 +612,7 @@ describe('S3PersistorManagerTests', function() { S3Client.listObjects = sinon .stub() .returns({ promise: sinon.stub().resolves({ Contents: [] }) }) - return S3PersistorManager.promises.deleteDirectory(bucket, key) + return S3Persistor.promises.deleteDirectory(bucket, key) }) it('should list the objects in the directory', function() { @@ -644,7 +635,7 @@ describe('S3PersistorManagerTests', function() { .stub() .returns({ promise: sinon.stub().rejects(genericError) }) try { - await S3PersistorManager.promises.deleteDirectory(bucket, key) + await S3Persistor.promises.deleteDirectory(bucket, key) } catch (err) { error = err } @@ -671,7 +662,7 @@ describe('S3PersistorManagerTests', function() { .stub() .returns({ promise: sinon.stub().rejects(genericError) }) try { - await S3PersistorManager.promises.deleteDirectory(bucket, key) + await S3Persistor.promises.deleteDirectory(bucket, key) } catch (err) { error = err } @@ -692,7 +683,7 @@ describe('S3PersistorManagerTests', function() { let size beforeEach(async function() { - size = await S3PersistorManager.promises.directorySize(bucket, key) + size = await S3Persistor.promises.directorySize(bucket, key) }) it('should list the objects in the directory', function() { @@ -714,7 +705,7 @@ describe('S3PersistorManagerTests', function() { S3Client.listObjects = sinon .stub() .returns({ promise: sinon.stub().resolves({ Contents: [] }) }) - size = await S3PersistorManager.promises.directorySize(bucket, key) + size = await S3Persistor.promises.directorySize(bucket, key) }) it('should list the objects in the directory', function() { @@ -737,7 +728,7 @@ describe('S3PersistorManagerTests', function() { .stub() .returns({ promise: sinon.stub().rejects(genericError) }) try { - await S3PersistorManager.promises.directorySize(bucket, key) + await S3Persistor.promises.directorySize(bucket, key) } catch (err) { error = err } @@ -758,10 +749,7 @@ describe('S3PersistorManagerTests', function() { let exists beforeEach(async function() { - exists = await S3PersistorManager.promises.checkIfFileExists( - bucket, - key - ) + exists = await S3Persistor.promises.checkIfFileExists(bucket, key) }) it('should get the object header', function() { @@ -783,10 +771,7 @@ describe('S3PersistorManagerTests', function() { S3Client.headObject = sinon .stub() .returns({ promise: sinon.stub().rejects(S3NotFoundError) }) - exists = await S3PersistorManager.promises.checkIfFileExists( - bucket, - key - ) + exists = await S3Persistor.promises.checkIfFileExists(bucket, key) }) it('should get the object header', function() { @@ -809,7 +794,7 @@ describe('S3PersistorManagerTests', function() { .stub() .returns({ promise: sinon.stub().rejects(genericError) }) try { - await S3PersistorManager.promises.checkIfFileExists(bucket, key) + await S3Persistor.promises.checkIfFileExists(bucket, key) } catch (err) { error = err } From 2625e03a31d8bb766dde35aee40aa98f48f61c87 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 16 Jan 2020 16:25:12 +0000 Subject: [PATCH 406/555] Add MigrationPersistor for sending 404 requests to a fallback persistor --- services/filestore/app/js/FSPersistor.js | 7 +- .../filestore/app/js/MigrationPersistor.js | 113 +++++ services/filestore/app/js/PersistorManager.js | 35 +- services/filestore/app/js/S3Persistor.js | 7 +- .../filestore/config/settings.defaults.coffee | 61 +-- services/filestore/npm-shrinkwrap.json | 6 + services/filestore/package.json | 3 +- .../test/acceptance/js/FilestoreApp.js | 1 + .../test/acceptance/js/FilestoreTests.js | 405 ++++++++++++++- .../test/unit/js/MigrationPersistorTests.js | 463 ++++++++++++++++++ 10 files changed, 1038 insertions(+), 63 deletions(-) create mode 100644 services/filestore/app/js/MigrationPersistor.js create mode 100644 services/filestore/test/unit/js/MigrationPersistorTests.js diff --git a/services/filestore/app/js/FSPersistor.js b/services/filestore/app/js/FSPersistor.js index 862acb9bcb..2ba65f06d2 100644 --- a/services/filestore/app/js/FSPersistor.js +++ b/services/filestore/app/js/FSPersistor.js @@ -103,12 +103,17 @@ async function deleteFile(location, name) { try { await fsUnlink(`${location}/${filteredName}`) } catch (err) { - throw _wrapError( + const wrappedError = _wrapError( err, 'failed to delete file', { location, filteredName }, WriteError ) + if (!(wrappedError instanceof NotFoundError)) { + // S3 doesn't give us a 404 when a file wasn't there to be deleted, so we + // should be consistent here as well + throw wrappedError + } } } diff --git a/services/filestore/app/js/MigrationPersistor.js b/services/filestore/app/js/MigrationPersistor.js new file mode 100644 index 0000000000..9f7a834f31 --- /dev/null +++ b/services/filestore/app/js/MigrationPersistor.js @@ -0,0 +1,113 @@ +const metrics = require('metrics-sharelatex') +const Settings = require('settings-sharelatex') +const logger = require('logger-sharelatex') +const { callbackify } = require('util') +const { NotFoundError } = require('./Errors') + +// Persistor that wraps two other persistors. Talks to the 'primary' by default, +// but will fall back to an older persistor in the case of a not-found error. +// If `Settings.filestore.fallback.copyOnMiss` is set, this will copy files from the fallback +// to the primary, in the event that they are missing. +// +// It is unlikely that the bucket/location name will be the same on the fallback +// as the primary. The bucket names should be overridden in `Settings.filestore.fallback.buckets` +// e.g. +// Settings.filestore.fallback.buckets = { +// myBucketOnS3: 'myBucketOnGCS' +// }s + +module.exports = function(primary, fallback) { + function _wrapMethodOnBothPersistors(method) { + return async function(bucket, key, ...moreArgs) { + const fallbackBucket = _getFallbackBucket(bucket) + + await Promise.all([ + primary.promises[method](bucket, key, ...moreArgs), + fallback.promises[method](fallbackBucket, key, ...moreArgs) + ]) + } + } + + async function copyFileWithFallback(bucket, sourceKey, destKey) { + try { + return await primary.promises.copyFile(bucket, sourceKey, destKey) + } catch (err) { + if (err instanceof NotFoundError) { + const fallbackBucket = _getFallbackBucket(bucket) + return _copyFileFromFallback(fallbackBucket, bucket, sourceKey, destKey) + } + } + } + + function _getFallbackBucket(bucket) { + return ( + Settings.filestore.fallback.buckets && + Settings.filestore.fallback.buckets[bucket] + ) + } + + function _wrapFallbackMethod(method, enableCopy = true) { + return async function(bucket, key, ...moreArgs) { + try { + return await primary.promises[method](bucket, key, ...moreArgs) + } catch (err) { + if (err instanceof NotFoundError) { + const fallbackBucket = _getFallbackBucket(bucket) + if (Settings.filestore.fallback.copyOnMiss && enableCopy) { + // run in background + _copyFileFromFallback(fallbackBucket, bucket, key, key).catch( + err => { + logger.warn({ err }, 'failed to copy file from fallback') + } + ) + } + return fallback.promises[method](fallbackBucket, key, ...moreArgs) + } + throw err + } + } + } + + async function _copyFileFromFallback( + sourceBucket, + destBucket, + sourceKey, + destKey + ) { + const sourceStream = await fallback.promises.getFileStream( + sourceBucket, + sourceKey, + {} + ) + + await primary.promises.sendStream(destBucket, destKey, sourceStream) + metrics.inc('fallback.copy') + } + + return { + primaryPersistor: primary, + fallbackPersistor: fallback, + sendFile: primary.sendFile, + sendStream: primary.sendStream, + getFileStream: callbackify(_wrapFallbackMethod('getFileStream')), + deleteDirectory: callbackify( + _wrapMethodOnBothPersistors('deleteDirectory') + ), + getFileSize: callbackify(_wrapFallbackMethod('getFileSize')), + deleteFile: callbackify(_wrapMethodOnBothPersistors('deleteFile')), + copyFile: callbackify(copyFileWithFallback), + checkIfFileExists: callbackify(_wrapFallbackMethod('checkIfFileExists')), + directorySize: callbackify(_wrapFallbackMethod('directorySize', false)), + promises: { + sendFile: primary.promises.sendFile, + sendStream: primary.promises.sendStream, + getFileStream: _wrapFallbackMethod('getFileStream'), + deleteDirectory: _wrapMethodOnBothPersistors('deleteDirectory'), + getFileSize: _wrapFallbackMethod('getFileSize'), + deleteFile: _wrapMethodOnBothPersistors('deleteFile'), + copyFile: copyFileWithFallback, + checkIfFileExists: _wrapFallbackMethod('checkIfFileExists'), + directorySize: _wrapFallbackMethod('directorySize', false) + } + } +} diff --git a/services/filestore/app/js/PersistorManager.js b/services/filestore/app/js/PersistorManager.js index bd944a03f7..32f6cd41f8 100644 --- a/services/filestore/app/js/PersistorManager.js +++ b/services/filestore/app/js/PersistorManager.js @@ -3,7 +3,8 @@ const logger = require('logger-sharelatex') logger.log( { - backend: settings.filestore.backend + backend: settings.filestore.backend, + fallback: settings.filestore.fallback && settings.filestore.fallback.backend }, 'Loading backend' ) @@ -11,14 +12,26 @@ if (!settings.filestore.backend) { throw new Error('no backend specified - config incomplete') } -switch (settings.filestore.backend) { - case 'aws-sdk': - case 's3': - module.exports = require('./S3Persistor') - break - case 'fs': - module.exports = require('./FSPersistor') - break - default: - throw new Error(`unknown filestore backend: ${settings.filestore.backend}`) +function getPersistor(backend) { + switch (backend) { + case 'aws-sdk': + case 's3': + return require('./S3Persistor') + case 'fs': + return require('./FSPersistor') + default: + throw new Error(`unknown filestore backend: ${backend}`) + } } + +let persistor = getPersistor(settings.filestore.backend) + +if (settings.filestore.fallback && settings.filestore.fallback.backend) { + const migrationPersistor = require('./MigrationPersistor') + persistor = migrationPersistor( + persistor, + getPersistor(settings.filestore.fallback.backend) + ) +} + +module.exports = persistor diff --git a/services/filestore/app/js/S3Persistor.js b/services/filestore/app/js/S3Persistor.js index 52cadfbfbd..6d22823401 100644 --- a/services/filestore/app/js/S3Persistor.js +++ b/services/filestore/app/js/S3Persistor.js @@ -173,6 +173,7 @@ async function deleteFile(bucketName, key) { .deleteObject({ Bucket: bucketName, Key: key }) .promise() } catch (err) { + // s3 does not give us a NotFoundError here throw _wrapError( err, 'failed to delete file in S3', @@ -232,8 +233,12 @@ async function directorySize(bucketName, key) { } function _wrapError(error, message, params, ErrorType) { + // the AWS client can return one of 'NoSuchKey', 'NotFound' or 404 (integer) + // when something is not found, depending on the endpoint if ( - ['NoSuchKey', 'NotFound', 'AccessDenied', 'ENOENT'].includes(error.code) + ['NoSuchKey', 'NotFound', 404, 'AccessDenied', 'ENOENT'].includes( + error.code + ) ) { return new NotFoundError({ message: 'no such file', diff --git a/services/filestore/config/settings.defaults.coffee b/services/filestore/config/settings.defaults.coffee index 206f932a76..a4a2df2d24 100644 --- a/services/filestore/config/settings.defaults.coffee +++ b/services/filestore/config/settings.defaults.coffee @@ -7,6 +7,19 @@ if process.env['AWS_KEY'] && !process.env['AWS_ACCESS_KEY_ID'] if process.env['AWS_SECRET'] && !process.env['AWS_SECRET_ACCESS_KEY'] process.env['AWS_SECRET_ACCESS_KEY'] = process.env['AWS_SECRET'] +# pre-backend setting, fall back to old behaviour +unless process.env['BACKEND']? + if process.env['AWS_ACCESS_KEY_ID']? or process.env['S3_BUCKET_CREDENTIALS']? + process.env['BACKEND'] = "s3" + process.env['USER_FILES_BUCKET_NAME'] = process.env['AWS_S3_USER_FILES_BUCKET_NAME'] + process.env['TEMPLATE_FILES_BUCKET_NAME'] = process.env['AWS_S3_TEMPLATE_FILES_BUCKET_NAME'] + process.env['PUBLIC_FILES_BUCKET_NAME'] = process.env['AWS_S3_PUBLIC_FILES_BUCKET_NAME'] + else + process.env['BACKEND'] = "fs" + process.env['USER_FILES_BUCKET_NAME'] = Path.resolve(__dirname + "/../user_files") + process.env['TEMPLATE_FILES_BUCKET_NAME'] = Path.resolve(__dirname + "/../public_files") + process.env['PUBLIC_FILES_BUCKET_NAME'] = Path.resolve(__dirname + "/../template_files") + settings = internal: filestore: @@ -18,38 +31,28 @@ settings = # Choices are # s3 - Amazon S3 # fs - local filesystem - if process.env['AWS_ACCESS_KEY_ID']? or process.env['S3_BUCKET_CREDENTIALS']? - backend: "s3" - s3: + backend: process.env['BACKEND'] + + s3: + if process.env['AWS_ACCESS_KEY_ID']? or process.env['S3_BUCKET_CREDENTIALS']? key: process.env['AWS_ACCESS_KEY_ID'] secret: process.env['AWS_SECRET_ACCESS_KEY'] endpoint: process.env['AWS_S3_ENDPOINT'] - stores: - user_files: process.env['AWS_S3_USER_FILES_BUCKET_NAME'] - template_files: process.env['AWS_S3_TEMPLATE_FILES_BUCKET_NAME'] - public_files: process.env['AWS_S3_PUBLIC_FILES_BUCKET_NAME'] - # if you are using S3, then fill in your S3 details below, - # or use env var with the same structure. - # s3: - # key: "" # default - # secret: "" # default - # - # s3BucketCreds: - # bucketname1: # secrets for bucketname1 - # auth_key: "" - # auth_secret: "" - # bucketname2: # secrets for bucketname2... - s3BucketCreds: JSON.parse process.env['S3_BUCKET_CREDENTIALS'] if process.env['S3_BUCKET_CREDENTIALS']? - else - backend: "fs" - stores: - # - # For local filesystem this is the directory to store the files in. - # Must contain full path, e.g. "/var/lib/sharelatex/data". - # This path must exist, not be tmpfs and be writable to by the user sharelatex is run as. - user_files: Path.resolve(__dirname + "/../user_files") - public_files: Path.resolve(__dirname + "/../public_files") - template_files: Path.resolve(__dirname + "/../template_files") + + stores: + user_files: process.env['USER_FILES_BUCKET_NAME'] + template_files: process.env['TEMPLATE_FILES_BUCKET_NAME'] + public_files: process.env['PUBLIC_FILES_BUCKET_NAME'] + + s3BucketCreds: JSON.parse process.env['S3_BUCKET_CREDENTIALS'] if process.env['S3_BUCKET_CREDENTIALS']? + + fallback: + if process.env['FALLBACK_BACKEND']? + backend: process.env['FALLBACK_BACKEND'] + # mapping of bucket names on the fallback, to bucket names on the primary. + # e.g. { myS3UserFilesBucketName: 'myGoogleUserFilesBucketName' } + buckets: JSON.parse process.env['FALLBACK_BUCKET_MAPPING'] if process.env['FALLBACK_BUCKET_MAPPING']? + copyOnMiss: if process.env['COPY_ON_MISS'] == 'true' then true else false path: uploadFolder: Path.resolve(__dirname + "/../uploads") diff --git a/services/filestore/npm-shrinkwrap.json b/services/filestore/npm-shrinkwrap.json index 8d78271caa..b343d6ad2c 100644 --- a/services/filestore/npm-shrinkwrap.json +++ b/services/filestore/npm-shrinkwrap.json @@ -5055,6 +5055,12 @@ "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.0.tgz", "integrity": "sha1-1cdSgl5TZ+eG944Y5EXqIjoVWVI=" }, + "streamifier": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/streamifier/-/streamifier-0.1.1.tgz", + "integrity": "sha1-l+mNj6TRBdYqJpHR3AfoINuN/E8=", + "dev": true + }, "string-width": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", diff --git a/services/filestore/package.json b/services/filestore/package.json index 14e35cd8a2..303393bd56 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -68,6 +68,7 @@ "prettier-eslint-cli": "^5.0.0", "sandboxed-module": "2.0.3", "sinon": "7.1.1", - "sinon-chai": "^3.3.0" + "sinon-chai": "^3.3.0", + "streamifier": "^0.1.1" } } diff --git a/services/filestore/test/acceptance/js/FilestoreApp.js b/services/filestore/test/acceptance/js/FilestoreApp.js index 718d53bcf8..20564e2d40 100644 --- a/services/filestore/test/acceptance/js/FilestoreApp.js +++ b/services/filestore/test/acceptance/js/FilestoreApp.js @@ -56,6 +56,7 @@ class FilestoreApp { } this.initing = false + this.persistor = require('../../../app/js/PersistorManager') } async waitForInit() { diff --git a/services/filestore/test/acceptance/js/FilestoreTests.js b/services/filestore/test/acceptance/js/FilestoreTests.js index d7dfbce57c..5a0de3abd8 100644 --- a/services/filestore/test/acceptance/js/FilestoreTests.js +++ b/services/filestore/test/acceptance/js/FilestoreTests.js @@ -11,6 +11,7 @@ const S3 = require('aws-sdk/clients/s3') const Stream = require('stream') const request = require('request') const { promisify } = require('util') +const streamifier = require('streamifier') chai.use(require('chai-as-promised')) const fsWriteFile = promisify(fs.writeFile) @@ -25,6 +26,19 @@ async function getMetric(filestoreUrl, metric) { return parseInt(found ? found[1] : 0) || 0 } +if (!process.env.AWS_ACCESS_KEY_ID) { + throw new Error('please provide credentials for the AWS S3 test server') +} + +function streamToString(stream) { + const chunks = [] + return new Promise((resolve, reject) => { + stream.on('data', chunk => chunks.push(chunk)) + stream.on('error', reject) + stream.on('end', () => resolve(Buffer.concat(chunks).toString('utf8'))) + }) +} + // store settings for multiple backends, so that we can test each one. // fs will always be available - add others if they are configured const BackendSettings = { @@ -35,11 +49,8 @@ const BackendSettings = { public_files: Path.resolve(__dirname, '../../../public_files'), template_files: Path.resolve(__dirname, '../../../template_files') } - } -} - -if (process.env.AWS_ACCESS_KEY_ID) { - BackendSettings.S3Persistor = { + }, + S3Persistor: { backend: 's3', s3: { key: process.env.AWS_ACCESS_KEY_ID, @@ -52,6 +63,62 @@ if (process.env.AWS_ACCESS_KEY_ID) { template_files: process.env.AWS_S3_TEMPLATE_FILES_BUCKET_NAME, public_files: process.env.AWS_S3_PUBLIC_FILES_BUCKET_NAME } + }, + FallbackS3ToFSPersistor: { + backend: 's3', + s3: { + key: process.env.AWS_ACCESS_KEY_ID, + secret: process.env.AWS_SECRET_ACCESS_KEY, + endpoint: process.env.AWS_S3_ENDPOINT, + pathStyle: true + }, + stores: { + user_files: process.env.AWS_S3_USER_FILES_BUCKET_NAME, + template_files: process.env.AWS_S3_TEMPLATE_FILES_BUCKET_NAME, + public_files: process.env.AWS_S3_PUBLIC_FILES_BUCKET_NAME + }, + fallback: { + backend: 'fs', + buckets: { + [process.env.AWS_S3_USER_FILES_BUCKET_NAME]: Path.resolve( + __dirname, + '../../../user_files' + ), + [process.env.AWS_S3_TEMPLATE_FILES_BUCKET_NAME]: Path.resolve( + __dirname, + '../../../public_files' + ), + [process.env.AWS_S3_PUBLIC_FILES_BUCKET_NAME]: Path.resolve( + __dirname, + '../../../template_files' + ) + } + } + }, + FallbackFSToS3Persistor: { + backend: 'fs', + s3: { + key: process.env.AWS_ACCESS_KEY_ID, + secret: process.env.AWS_SECRET_ACCESS_KEY, + endpoint: process.env.AWS_S3_ENDPOINT, + pathStyle: true + }, + stores: { + user_files: Path.resolve(__dirname, '../../../user_files'), + public_files: Path.resolve(__dirname, '../../../public_files'), + template_files: Path.resolve(__dirname, '../../../template_files') + }, + fallback: { + backend: 's3', + buckets: { + [Path.resolve(__dirname, '../../../user_files')]: process.env + .AWS_S3_USER_FILES_BUCKET_NAME, + [Path.resolve(__dirname, '../../../public_files')]: process.env + .AWS_S3_TEMPLATE_FILES_BUCKET_NAME, + [Path.resolve(__dirname, '../../../template_files')]: process.env + .AWS_S3_PUBLIC_FILES_BUCKET_NAME + } + } } } @@ -100,23 +167,21 @@ describe('Filestore', function() { }) describe('with a file on the server', function() { - let fileId, fileUrl + let fileId, fileUrl, constantFileContent const localFileReadPath = '/tmp/filestore_acceptance_tests_file_read.txt' - const constantFileContent = [ - 'hello world', - `line 2 goes here ${Math.random()}`, - 'there are 3 lines in all' - ].join('\n') - - before(async function() { - await fsWriteFile(localFileReadPath, constantFileContent) - }) beforeEach(async function() { fileId = Math.random() fileUrl = `${filestoreUrl}/project/acceptance_tests/file/${directoryName}%2F${fileId}` + constantFileContent = [ + 'hello world', + `line 2 goes here ${Math.random()}`, + 'there are 3 lines in all' + ].join('\n') + + await fsWriteFile(localFileReadPath, constantFileContent) const writeStream = request.post(fileUrl) const readStream = fs.createReadStream(localFileReadPath) @@ -177,7 +242,7 @@ describe('Filestore', function() { }) it('should be able to copy files', async function() { - const newProjectID = 'acceptance_tests_copyied_project' + const newProjectID = 'acceptance_tests_copied_project' const newFileId = Math.random() const newFileUrl = `${filestoreUrl}/project/${newProjectID}/file/${directoryName}%2F${newFileId}` const opts = { @@ -198,6 +263,18 @@ describe('Filestore', function() { expect(response.body).to.equal(constantFileContent) }) + it('should be able to overwrite the file', async function() { + const newContent = `here is some different content, ${Math.random()}` + const writeStream = request.post(fileUrl) + const readStream = streamifier.createReadStream(newContent) + // hack to consume the result to ensure the http request has been fully processed + const resultStream = fs.createWriteStream('/dev/null') + await pipeline(readStream, writeStream, resultStream) + + const response = await rp.get(fileUrl) + expect(response.body).to.equal(newContent) + }) + if (backend === 'S3Persistor') { it('should record an egress metric for the upload', async function() { const metric = await getMetric(filestoreUrl, 's3_egress') @@ -292,10 +369,10 @@ describe('Filestore', function() { if (backend === 'S3Persistor') { describe('with a file in a specific bucket', function() { - let constantFileContents, fileId, fileUrl, bucketName + let constantFileContent, fileId, fileUrl, bucketName beforeEach(async function() { - constantFileContents = `This is a file in a different S3 bucket ${Math.random()}` + constantFileContent = `This is a file in a different S3 bucket ${Math.random()}` fileId = Math.random().toString() bucketName = Math.random().toString() fileUrl = `${filestoreUrl}/bucket/${bucketName}/key/${fileId}` @@ -320,14 +397,302 @@ describe('Filestore', function() { .upload({ Bucket: bucketName, Key: fileId, - Body: constantFileContents + Body: constantFileContent }) .promise() }) it('should get the file from the specified bucket', async function() { const response = await rp.get(fileUrl) - expect(response.body).to.equal(constantFileContents) + expect(response.body).to.equal(constantFileContent) + }) + }) + } + + if (BackendSettings[backend].fallback) { + describe('with a fallback', function() { + async function uploadStringToPersistor( + persistor, + bucket, + key, + content + ) { + const fileStream = streamifier.createReadStream(content) + await persistor.promises.sendStream(bucket, key, fileStream) + } + + async function getStringFromPersistor(persistor, bucket, key) { + const stream = await persistor.promises.getFileStream( + bucket, + key, + {} + ) + return streamToString(stream) + } + + async function expectPersistorToHaveFile( + persistor, + bucket, + key, + content + ) { + const foundContent = await getStringFromPersistor( + persistor, + bucket, + key + ) + expect(foundContent).to.equal(content) + } + + async function expectPersistorNotToHaveFile(persistor, bucket, key) { + await expect( + getStringFromPersistor(persistor, bucket, key) + ).to.eventually.have.been.rejected.with.property( + 'name', + 'NotFoundError' + ) + } + + let constantFileContent, + fileId, + fileKey, + fileUrl, + bucket, + fallbackBucket + const projectId = 'acceptance_tests' + + beforeEach(function() { + constantFileContent = `This is yet more file content ${Math.random()}` + fileId = Math.random().toString() + fileKey = `${projectId}/${directoryName}/${fileId}` + fileUrl = `${filestoreUrl}/project/${projectId}/file/${directoryName}%2F${fileId}` + + bucket = Settings.filestore.stores.user_files + fallbackBucket = Settings.filestore.fallback.buckets[bucket] + }) + + describe('with a file in the fallback bucket', function() { + beforeEach(async function() { + await uploadStringToPersistor( + app.persistor.fallbackPersistor, + fallbackBucket, + fileKey, + constantFileContent + ) + }) + + it('should not find file in the primary', async function() { + await expectPersistorNotToHaveFile( + app.persistor.primaryPersistor, + bucket, + fileKey + ) + }) + + it('should find the file in the fallback', async function() { + await expectPersistorToHaveFile( + app.persistor.fallbackPersistor, + fallbackBucket, + fileKey, + constantFileContent + ) + }) + + it('should fetch the file', async function() { + const res = await rp.get(fileUrl) + expect(res.body).to.equal(constantFileContent) + }) + + it('should not copy the file to the primary', async function() { + await rp.get(fileUrl) + + await expectPersistorNotToHaveFile( + app.persistor.primaryPersistor, + bucket, + fileKey + ) + }) + + describe('when copyOnMiss is enabled', function() { + beforeEach(function() { + Settings.filestore.fallback.copyOnMiss = true + }) + + it('copies the file to the primary', async function() { + await rp.get(fileUrl) + // wait for the file to copy in the background + await promisify(setTimeout)(1000) + + await expectPersistorToHaveFile( + app.persistor.primaryPersistor, + bucket, + fileKey, + constantFileContent + ) + }) + }) + + describe('when copying a file', function() { + let newFileId, newFileUrl, newFileKey + const newProjectID = 'acceptance_tests_copied_project' + + beforeEach(async function() { + newFileId = Math.random() + newFileUrl = `${filestoreUrl}/project/${newProjectID}/file/${directoryName}%2F${newFileId}` + newFileKey = `${newProjectID}/${directoryName}/${newFileId}` + + const opts = { + method: 'put', + uri: newFileUrl, + json: { + source: { + project_id: 'acceptance_tests', + file_id: `${directoryName}/${fileId}` + } + } + } + + const response = await rp(opts) + expect(response.statusCode).to.equal(200) + }) + + it('should leave the old file in the old bucket', async function() { + await expectPersistorToHaveFile( + app.persistor.fallbackPersistor, + fallbackBucket, + fileKey, + constantFileContent + ) + }) + + it('should not create a new file in the old bucket', async function() { + await expectPersistorNotToHaveFile( + app.persistor.fallbackPersistor, + fallbackBucket, + newFileKey + ) + }) + + it('should not copy the old file to the new bucket', async function() { + await expectPersistorNotToHaveFile( + app.persistor.primaryPersistor, + bucket, + fileKey + ) + }) + + it('should create a new file in the new bucket', async function() { + await expectPersistorToHaveFile( + app.persistor.primaryPersistor, + bucket, + newFileKey, + constantFileContent + ) + }) + }) + }) + + describe('when sending a file', function() { + beforeEach(async function() { + const writeStream = request.post(fileUrl) + const readStream = streamifier.createReadStream( + constantFileContent + ) + // hack to consume the result to ensure the http request has been fully processed + const resultStream = fs.createWriteStream('/dev/null') + await pipeline(readStream, writeStream, resultStream) + }) + + it('should store the file on the primary', async function() { + await expectPersistorToHaveFile( + app.persistor.primaryPersistor, + bucket, + fileKey, + constantFileContent + ) + }) + + it('should not store the file on the fallback', async function() { + await expectPersistorNotToHaveFile( + app.persistor.fallbackPersistor, + fallbackBucket, + `acceptance_tests/${directoryName}/${fileId}` + ) + }) + }) + + describe('when deleting a file', function() { + describe('when the file exists on the primary', function() { + beforeEach(async function() { + await uploadStringToPersistor( + app.persistor.primaryPersistor, + bucket, + fileKey, + constantFileContent + ) + }) + + it('should delete the file', async function() { + const response = await rp.del(fileUrl) + expect(response.statusCode).to.equal(204) + await expect( + rp.get(fileUrl) + ).to.eventually.be.rejected.and.have.property('statusCode', 404) + }) + }) + + describe('when the file exists on the fallback', function() { + beforeEach(async function() { + await uploadStringToPersistor( + app.persistor.fallbackPersistor, + fallbackBucket, + fileKey, + constantFileContent + ) + }) + + it('should delete the file', async function() { + const response = await rp.del(fileUrl) + expect(response.statusCode).to.equal(204) + await expect( + rp.get(fileUrl) + ).to.eventually.be.rejected.and.have.property('statusCode', 404) + }) + }) + + describe('when the file exists on both the primary and the fallback', function() { + beforeEach(async function() { + await uploadStringToPersistor( + app.persistor.primaryPersistor, + bucket, + fileKey, + constantFileContent + ) + await uploadStringToPersistor( + app.persistor.fallbackPersistor, + fallbackBucket, + fileKey, + constantFileContent + ) + }) + + it('should delete the files', async function() { + const response = await rp.del(fileUrl) + expect(response.statusCode).to.equal(204) + await expect( + rp.get(fileUrl) + ).to.eventually.be.rejected.and.have.property('statusCode', 404) + }) + }) + + describe('when the file does not exist', function() { + it('should return return 204', async function() { + // S3 doesn't give us a 404 when the object doesn't exist, so to stay + // consistent we merrily return 204 ourselves here as well + const response = await rp.del(fileUrl) + expect(response.statusCode).to.equal(204) + }) + }) }) }) } diff --git a/services/filestore/test/unit/js/MigrationPersistorTests.js b/services/filestore/test/unit/js/MigrationPersistorTests.js new file mode 100644 index 0000000000..1cc8324d46 --- /dev/null +++ b/services/filestore/test/unit/js/MigrationPersistorTests.js @@ -0,0 +1,463 @@ +const sinon = require('sinon') +const chai = require('chai') +const { expect } = chai +const modulePath = '../../../app/js/MigrationPersistor.js' +const SandboxedModule = require('sandboxed-module') + +const Errors = require('../../../app/js/Errors') + +// Not all methods are tested here, but a method with each type of wrapping has +// tests. Specifically, the following wrapping methods are tested here: +// getFileStream: _wrapFallbackMethod +// sendStream: forward-to-primary +// deleteFile: _wrapMethodOnBothPersistors +// copyFile: copyFileWithFallback + +describe('MigrationPersistorTests', function() { + const bucket = 'womBucket' + const fallbackBucket = 'bucKangaroo' + const key = 'monKey' + const destKey = 'donKey' + const genericError = new Error('guru meditation error') + const notFoundError = new Errors.NotFoundError('not found') + const size = 33 + const fileStream = 'fileStream' + + function newPersistor(hasFile) { + return { + promises: { + sendFile: sinon.stub().resolves(), + sendStream: sinon.stub().resolves(), + getFileStream: hasFile + ? sinon.stub().resolves(fileStream) + : sinon.stub().rejects(notFoundError), + deleteDirectory: sinon.stub().resolves(), + getFileSize: hasFile + ? sinon.stub().resolves(size) + : sinon.stub().rejects(notFoundError), + deleteFile: sinon.stub().resolves(), + copyFile: hasFile + ? sinon.stub().resolves() + : sinon.stub().rejects(notFoundError), + checkIfFileExists: sinon.stub().resolves(hasFile), + directorySize: hasFile + ? sinon.stub().resolves(size) + : sinon.stub().rejects(notFoundError) + } + } + } + + let Metrics, Settings, Logger, MigrationPersistor + + beforeEach(function() { + Settings = { + filestore: { + fallback: { + buckets: { + [bucket]: fallbackBucket + } + } + } + } + + Metrics = { + inc: sinon.stub() + } + + Logger = { + warn: sinon.stub() + } + + MigrationPersistor = SandboxedModule.require(modulePath, { + requires: { + 'settings-sharelatex': Settings, + './Errors': Errors, + 'metrics-sharelatex': Metrics, + 'logger-sharelatex': Logger + }, + globals: { console } + }) + }) + + describe('getFileStream', function() { + const options = { wombat: 'potato' } + describe('when the primary persistor has the file', function() { + let primaryPersistor, fallbackPersistor, migrationPersistor, response + beforeEach(async function() { + primaryPersistor = newPersistor(true) + fallbackPersistor = newPersistor(false) + migrationPersistor = MigrationPersistor( + primaryPersistor, + fallbackPersistor + ) + response = await migrationPersistor.promises.getFileStream( + bucket, + key, + options + ) + }) + + it('should return the file stream', function() { + expect(response).to.equal(fileStream) + }) + + it('should fetch the file from the primary persistor, with the correct options', function() { + expect( + primaryPersistor.promises.getFileStream + ).to.have.been.calledWithExactly(bucket, key, options) + }) + + it('should not query the fallback persistor', function() { + expect(fallbackPersistor.promises.getFileStream).not.to.have.been.called + }) + }) + + describe('when the fallback persistor has the file', function() { + let primaryPersistor, fallbackPersistor, migrationPersistor, response + beforeEach(async function() { + primaryPersistor = newPersistor(false) + fallbackPersistor = newPersistor(true) + migrationPersistor = MigrationPersistor( + primaryPersistor, + fallbackPersistor + ) + response = await migrationPersistor.promises.getFileStream( + bucket, + key, + options + ) + }) + + it('should return the file stream', function() { + expect(response).to.equal(fileStream) + }) + + it('should fetch the file from the primary persistor with the correct options', function() { + expect( + primaryPersistor.promises.getFileStream + ).to.have.been.calledWithExactly(bucket, key, options) + }) + + it('should fetch the file from the fallback persistor with the fallback bucket with the correct options', function() { + expect( + fallbackPersistor.promises.getFileStream + ).to.have.been.calledWithExactly(fallbackBucket, key, options) + }) + + it('should only create one stream', function() { + expect(fallbackPersistor.promises.getFileStream).to.have.been.calledOnce + }) + + it('should not send the file to the primary', function() { + expect(primaryPersistor.promises.sendStream).not.to.have.been.called + }) + }) + + describe('when the file should be copied to the primary', function() { + let primaryPersistor, fallbackPersistor, migrationPersistor + beforeEach(async function() { + primaryPersistor = newPersistor(false) + fallbackPersistor = newPersistor(true) + migrationPersistor = MigrationPersistor( + primaryPersistor, + fallbackPersistor + ) + Settings.filestore.fallback.copyOnMiss = true + return migrationPersistor.promises.getFileStream(bucket, key, options) + }) + + it('should create two streams', function() { + expect(fallbackPersistor.promises.getFileStream).to.have.been + .calledTwice + }) + + it('should send one of the streams to the primary', function() { + expect( + primaryPersistor.promises.sendStream + ).to.have.been.calledWithExactly(bucket, key, fileStream) + }) + }) + + describe('when neither persistor has the file', function() { + it('rejects with a NotFoundError', async function() { + const migrationPersistor = MigrationPersistor( + newPersistor(false), + newPersistor(false) + ) + return expect( + migrationPersistor.promises.getFileStream(bucket, key) + ).to.eventually.be.rejected.and.be.an.instanceOf(Errors.NotFoundError) + }) + }) + + describe('when the primary persistor throws an unexpected error', function() { + let primaryPersistor, fallbackPersistor, migrationPersistor, error + beforeEach(async function() { + primaryPersistor = newPersistor(false) + fallbackPersistor = newPersistor(true) + primaryPersistor.promises.getFileStream = sinon + .stub() + .rejects(genericError) + migrationPersistor = MigrationPersistor( + primaryPersistor, + fallbackPersistor + ) + try { + await migrationPersistor.promises.getFileStream(bucket, key, options) + } catch (err) { + error = err + } + }) + + it('rejects with the error', function() { + expect(error).to.equal(genericError) + }) + + it('does not call the fallback', function() { + expect(fallbackPersistor.promises.getFileStream).not.to.have.been.called + }) + }) + + describe('when the fallback persistor throws an unexpected error', function() { + let primaryPersistor, fallbackPersistor, migrationPersistor, error + beforeEach(async function() { + primaryPersistor = newPersistor(false) + fallbackPersistor = newPersistor(false) + fallbackPersistor.promises.getFileStream = sinon + .stub() + .rejects(genericError) + migrationPersistor = MigrationPersistor( + primaryPersistor, + fallbackPersistor + ) + try { + await migrationPersistor.promises.getFileStream(bucket, key, options) + } catch (err) { + error = err + } + }) + + it('rejects with the error', function() { + expect(error).to.equal(genericError) + }) + + it('should have called the fallback', function() { + expect( + fallbackPersistor.promises.getFileStream + ).to.have.been.calledWith(fallbackBucket, key) + }) + }) + }) + + describe('sendStream', function() { + let primaryPersistor, fallbackPersistor, migrationPersistor + beforeEach(function() { + primaryPersistor = newPersistor(false) + fallbackPersistor = newPersistor(false) + migrationPersistor = MigrationPersistor( + primaryPersistor, + fallbackPersistor + ) + }) + + describe('when it works', function() { + beforeEach(async function() { + return migrationPersistor.promises.sendStream(bucket, key, fileStream) + }) + + it('should send the file to the primary persistor', function() { + expect( + primaryPersistor.promises.sendStream + ).to.have.been.calledWithExactly(bucket, key, fileStream) + }) + + it('should not send the file to the fallback persistor', function() { + expect(fallbackPersistor.promises.sendStream).not.to.have.been.called + }) + }) + + describe('when the primary persistor throws an error', function() { + it('returns the error', async function() { + primaryPersistor.promises.sendStream.rejects(notFoundError) + return expect( + migrationPersistor.promises.sendStream(bucket, key, fileStream) + ).to.eventually.be.rejected.and.be.an.instanceOf(Errors.NotFoundError) + }) + }) + }) + + describe('deleteFile', function() { + let primaryPersistor, fallbackPersistor, migrationPersistor + beforeEach(function() { + primaryPersistor = newPersistor(false) + fallbackPersistor = newPersistor(false) + migrationPersistor = MigrationPersistor( + primaryPersistor, + fallbackPersistor + ) + }) + + describe('when it works', function() { + beforeEach(async function() { + return migrationPersistor.promises.deleteFile(bucket, key) + }) + + it('should delete the file from the primary', function() { + expect( + primaryPersistor.promises.deleteFile + ).to.have.been.calledWithExactly(bucket, key) + }) + + it('should delete the file from the fallback', function() { + expect( + fallbackPersistor.promises.deleteFile + ).to.have.been.calledWithExactly(fallbackBucket, key) + }) + }) + + describe('when the primary persistor throws an error', function() { + let error + beforeEach(async function() { + primaryPersistor.promises.deleteFile.rejects(genericError) + try { + await migrationPersistor.promises.deleteFile(bucket, key) + } catch (err) { + error = err + } + }) + + it('should return the error', function() { + expect(error).to.equal(genericError) + }) + + it('should delete the file from the primary', function() { + expect( + primaryPersistor.promises.deleteFile + ).to.have.been.calledWithExactly(bucket, key) + }) + + it('should delete the file from the fallback', function() { + expect( + fallbackPersistor.promises.deleteFile + ).to.have.been.calledWithExactly(fallbackBucket, key) + }) + }) + + describe('when the fallback persistor throws an error', function() { + let error + beforeEach(async function() { + fallbackPersistor.promises.deleteFile.rejects(genericError) + try { + await migrationPersistor.promises.deleteFile(bucket, key) + } catch (err) { + error = err + } + }) + + it('should return the error', function() { + expect(error).to.equal(genericError) + }) + + it('should delete the file from the primary', function() { + expect( + primaryPersistor.promises.deleteFile + ).to.have.been.calledWithExactly(bucket, key) + }) + + it('should delete the file from the fallback', function() { + expect( + fallbackPersistor.promises.deleteFile + ).to.have.been.calledWithExactly(fallbackBucket, key) + }) + }) + }) + + describe('copyFile', function() { + describe('when the file exists on the primary', function() { + let primaryPersistor, fallbackPersistor, migrationPersistor + beforeEach(async function() { + primaryPersistor = newPersistor(true) + fallbackPersistor = newPersistor(false) + migrationPersistor = MigrationPersistor( + primaryPersistor, + fallbackPersistor + ) + return migrationPersistor.promises.copyFile(bucket, key, destKey) + }) + + it('should call copyFile to copy the file', function() { + expect( + primaryPersistor.promises.copyFile + ).to.have.been.calledWithExactly(bucket, key, destKey) + }) + + it('should not try to read from the fallback', function() { + expect(fallbackPersistor.promises.getFileStream).not.to.have.been.called + }) + }) + + describe('when the file does not exist on the primary', function() { + let primaryPersistor, fallbackPersistor, migrationPersistor + beforeEach(async function() { + primaryPersistor = newPersistor(false) + fallbackPersistor = newPersistor(true) + migrationPersistor = MigrationPersistor( + primaryPersistor, + fallbackPersistor + ) + return migrationPersistor.promises.copyFile(bucket, key, destKey) + }) + + it('should call copyFile to copy the file', function() { + expect( + primaryPersistor.promises.copyFile + ).to.have.been.calledWithExactly(bucket, key, destKey) + }) + + it('should fetch the file from the fallback', function() { + expect( + fallbackPersistor.promises.getFileStream + ).not.to.have.been.calledWithExactly(fallbackBucket, key) + }) + + it('should send the file to the primary', function() { + expect( + primaryPersistor.promises.sendStream + ).to.have.been.calledWithExactly(bucket, destKey, fileStream) + }) + }) + + describe('when the file does not exist on the fallback', function() { + let primaryPersistor, fallbackPersistor, migrationPersistor, error + beforeEach(async function() { + primaryPersistor = newPersistor(false) + fallbackPersistor = newPersistor(false) + migrationPersistor = MigrationPersistor( + primaryPersistor, + fallbackPersistor + ) + try { + await migrationPersistor.promises.copyFile(bucket, key, destKey) + } catch (err) { + error = err + } + }) + + it('should call copyFile to copy the file', function() { + expect( + primaryPersistor.promises.copyFile + ).to.have.been.calledWithExactly(bucket, key, destKey) + }) + + it('should fetch the file from the fallback', function() { + expect( + fallbackPersistor.promises.getFileStream + ).not.to.have.been.calledWithExactly(fallbackBucket, key) + }) + + it('should return a not-found error', function() { + expect(error).to.be.an.instanceOf(Errors.NotFoundError) + }) + }) + }) +}) From b4b7fd226e01921c9f2ffa0c3b3a5979c58f6956 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 27 Jan 2020 11:26:37 +0000 Subject: [PATCH 407/555] Add mechanisms to transfer files with md5-based integrity checks Fix error in settings and tidy up tests Remove unused variable declaration Remove .only from tests and update eslint rules to catch it in future Use to catch errors more safely getting md5 hash Avoid unnecessary call to S3 to get md5 response --- services/filestore/.eslintrc | 3 +- services/filestore/app/js/FSPersistor.js | 41 +++++- .../filestore/app/js/MigrationPersistor.js | 136 ++++++++++++++++-- services/filestore/app/js/S3Persistor.js | 114 ++++++++++++++- .../filestore/config/settings.defaults.coffee | 8 +- services/filestore/npm-shrinkwrap.json | 15 ++ services/filestore/package.json | 1 + .../test/acceptance/js/FilestoreTests.js | 55 +++---- .../test/unit/js/FSPersistorTests.js | 57 +++++++- .../test/unit/js/MigrationPersistorTests.js | 125 +++++++++++----- .../test/unit/js/S3PersistorTests.js | 77 +++++++++- 11 files changed, 535 insertions(+), 97 deletions(-) diff --git a/services/filestore/.eslintrc b/services/filestore/.eslintrc index 42a4b5cace..73103de7f6 100644 --- a/services/filestore/.eslintrc +++ b/services/filestore/.eslintrc @@ -23,7 +23,8 @@ "rules": { // Swap the no-unused-expressions rule with a more chai-friendly one "no-unused-expressions": 0, - "chai-friendly/no-unused-expressions": "error" + "chai-friendly/no-unused-expressions": "error", + "no-console": "error" }, "overrides": [ { diff --git a/services/filestore/app/js/FSPersistor.js b/services/filestore/app/js/FSPersistor.js index 2ba65f06d2..3f54e2d091 100644 --- a/services/filestore/app/js/FSPersistor.js +++ b/services/filestore/app/js/FSPersistor.js @@ -1,6 +1,7 @@ const fs = require('fs') const glob = require('glob') const path = require('path') +const crypto = require('crypto') const rimraf = require('rimraf') const Stream = require('stream') const { promisify, callbackify } = require('util') @@ -36,11 +37,22 @@ async function sendFile(location, target, source) { } } -async function sendStream(location, target, sourceStream) { +async function sendStream(location, target, sourceStream, sourceMd5) { const fsPath = await LocalFileWriter.writeStream(sourceStream) + if (!sourceMd5) { + sourceMd5 = await _getFileMd5HashForPath(fsPath) + } try { await sendFile(location, target, fsPath) + const destMd5 = await getFileMd5Hash(location, target) + if (sourceMd5 !== destMd5) { + await LocalFileWriter.deleteFile(`${location}/${filterName(target)}`) + throw new WriteError({ + message: 'md5 hash mismatch', + info: { sourceMd5, destMd5, location, target } + }) + } } finally { await LocalFileWriter.deleteFile(fsPath) } @@ -80,6 +92,31 @@ async function getFileSize(location, filename) { } } +async function getFileMd5Hash(location, filename) { + const fullPath = path.join(location, filterName(filename)) + try { + return await _getFileMd5HashForPath(fullPath) + } catch (err) { + throw new ReadError({ + message: 'unable to get md5 hash from file', + info: { location, filename } + }).withCause(err) + } +} + +async function _getFileMd5HashForPath(fullPath) { + return new Promise((resolve, reject) => { + const readStream = fs.createReadStream(fullPath) + const hash = crypto.createHash('md5') + hash.setEncoding('hex') + readStream.on('end', () => { + hash.end() + resolve(hash.read()) + }) + pipeline(readStream, hash).catch(reject) + }) +} + async function copyFile(location, fromName, toName) { const filteredFromName = filterName(fromName) const filteredToName = filterName(toName) @@ -202,6 +239,7 @@ module.exports = { sendStream: callbackify(sendStream), getFileStream: callbackify(getFileStream), getFileSize: callbackify(getFileSize), + getFileMd5Hash: callbackify(getFileMd5Hash), copyFile: callbackify(copyFile), deleteFile: callbackify(deleteFile), deleteDirectory: callbackify(deleteDirectory), @@ -212,6 +250,7 @@ module.exports = { sendStream, getFileStream, getFileSize, + getFileMd5Hash, copyFile, deleteFile, deleteDirectory, diff --git a/services/filestore/app/js/MigrationPersistor.js b/services/filestore/app/js/MigrationPersistor.js index 9f7a834f31..fdc31368a3 100644 --- a/services/filestore/app/js/MigrationPersistor.js +++ b/services/filestore/app/js/MigrationPersistor.js @@ -1,8 +1,9 @@ const metrics = require('metrics-sharelatex') const Settings = require('settings-sharelatex') const logger = require('logger-sharelatex') +const Minipass = require('minipass') const { callbackify } = require('util') -const { NotFoundError } = require('./Errors') +const { NotFoundError, WriteError } = require('./Errors') // Persistor that wraps two other persistors. Talks to the 'primary' by default, // but will fall back to an older persistor in the case of a not-found error. @@ -14,7 +15,7 @@ const { NotFoundError } = require('./Errors') // e.g. // Settings.filestore.fallback.buckets = { // myBucketOnS3: 'myBucketOnGCS' -// }s +// } module.exports = function(primary, fallback) { function _wrapMethodOnBothPersistors(method) { @@ -40,10 +41,7 @@ module.exports = function(primary, fallback) { } function _getFallbackBucket(bucket) { - return ( - Settings.filestore.fallback.buckets && - Settings.filestore.fallback.buckets[bucket] - ) + return Settings.filestore.fallback.buckets[bucket] } function _wrapFallbackMethod(method, enableCopy = true) { @@ -68,20 +66,130 @@ module.exports = function(primary, fallback) { } } - async function _copyFileFromFallback( + async function _getFileStreamAndCopyIfRequired(bucketName, key, opts) { + const shouldCopy = + Settings.filestore.fallback.copyOnMiss && !opts.start && !opts.end + + try { + return await primary.promises.getFileStream(bucketName, key, opts) + } catch (err) { + if (err instanceof NotFoundError) { + const fallbackBucket = _getFallbackBucket(bucketName) + if (shouldCopy) { + return _copyFileFromFallback( + fallbackBucket, + bucketName, + key, + key, + true + ) + } else { + return fallback.promises.getFileStream(fallbackBucket, key, opts) + } + } + throw err + } + } + + async function _copyFromFallbackStreamAndVerify( + stream, sourceBucket, destBucket, sourceKey, destKey ) { + try { + let sourceMd5 + try { + sourceMd5 = await fallback.promises.getFileMd5Hash( + sourceBucket, + sourceKey + ) + } catch (err) { + logger.warn(err, 'error getting md5 hash from fallback persistor') + } + + await primary.promises.sendStream(destBucket, destKey, stream, sourceMd5) + } catch (err) { + let error = err + metrics.inc('fallback.copy.failure') + + try { + await primary.promises.deleteFile(destBucket, destKey) + } catch (err) { + error = new WriteError({ + message: 'unable to clean up destination copy artifact', + info: { + destBucket, + destKey + } + }).withCause(err) + } + + error = new WriteError({ + message: 'unable to copy file to destination persistor', + info: { + sourceBucket, + destBucket, + sourceKey, + destKey + } + }).withCause(error) + + logger.warn({ error }, 'failed to copy file from fallback') + throw error + } + } + + async function _copyFileFromFallback( + sourceBucket, + destBucket, + sourceKey, + destKey, + returnStream = false + ) { + metrics.inc('fallback.copy') const sourceStream = await fallback.promises.getFileStream( sourceBucket, sourceKey, {} ) - await primary.promises.sendStream(destBucket, destKey, sourceStream) - metrics.inc('fallback.copy') + if (!returnStream) { + return _copyFromFallbackStreamAndVerify( + sourceStream, + sourceBucket, + destBucket, + sourceKey, + destKey + ) + } + + const tee = new Minipass() + const clientStream = new Minipass() + const copyStream = new Minipass() + + tee.pipe(clientStream) + tee.pipe(copyStream) + + // copy the file in the background + _copyFromFallbackStreamAndVerify( + copyStream, + sourceBucket, + destBucket, + sourceKey, + destKey + ).catch( + // the error handler in this method will log a metric and a warning, so + // we don't need to do anything extra here, but catching it will prevent + // unhandled promise rejection warnings + () => {} + ) + + // start piping the source stream into the tee after everything is set up, + // otherwise one stream may consume bytes that don't arrive at the other + sourceStream.pipe(tee) + return clientStream } return { @@ -89,7 +197,8 @@ module.exports = function(primary, fallback) { fallbackPersistor: fallback, sendFile: primary.sendFile, sendStream: primary.sendStream, - getFileStream: callbackify(_wrapFallbackMethod('getFileStream')), + getFileStream: callbackify(_getFileStreamAndCopyIfRequired), + getFileMd5Hash: callbackify(_wrapFallbackMethod('getFileMd5Hash')), deleteDirectory: callbackify( _wrapMethodOnBothPersistors('deleteDirectory') ), @@ -97,17 +206,18 @@ module.exports = function(primary, fallback) { deleteFile: callbackify(_wrapMethodOnBothPersistors('deleteFile')), copyFile: callbackify(copyFileWithFallback), checkIfFileExists: callbackify(_wrapFallbackMethod('checkIfFileExists')), - directorySize: callbackify(_wrapFallbackMethod('directorySize', false)), + directorySize: callbackify(_wrapFallbackMethod('directorySize')), promises: { sendFile: primary.promises.sendFile, sendStream: primary.promises.sendStream, - getFileStream: _wrapFallbackMethod('getFileStream'), + getFileStream: _getFileStreamAndCopyIfRequired, + getFileMd5Hash: _wrapFallbackMethod('getFileMd5Hash'), deleteDirectory: _wrapMethodOnBothPersistors('deleteDirectory'), getFileSize: _wrapFallbackMethod('getFileSize'), deleteFile: _wrapMethodOnBothPersistors('deleteFile'), copyFile: copyFileWithFallback, checkIfFileExists: _wrapFallbackMethod('checkIfFileExists'), - directorySize: _wrapFallbackMethod('directorySize', false) + directorySize: _wrapFallbackMethod('directorySize') } } } diff --git a/services/filestore/app/js/S3Persistor.js b/services/filestore/app/js/S3Persistor.js index 6d22823401..ef465da25c 100644 --- a/services/filestore/app/js/S3Persistor.js +++ b/services/filestore/app/js/S3Persistor.js @@ -5,8 +5,11 @@ https.globalAgent.maxSockets = 300 const settings = require('settings-sharelatex') const metrics = require('metrics-sharelatex') +const logger = require('logger-sharelatex') +const Minipass = require('minipass') const meter = require('stream-meter') +const crypto = require('crypto') const fs = require('fs') const S3 = require('aws-sdk/clients/s3') const { URL } = require('url') @@ -22,6 +25,7 @@ module.exports = { sendFile: callbackify(sendFile), sendStream: callbackify(sendStream), getFileStream: callbackify(getFileStream), + getFileMd5Hash: callbackify(getFileMd5Hash), deleteDirectory: callbackify(deleteDirectory), getFileSize: callbackify(getFileSize), deleteFile: callbackify(deleteFile), @@ -32,6 +36,7 @@ module.exports = { sendFile, sendStream, getFileStream, + getFileMd5Hash, deleteDirectory, getFileSize, deleteFile, @@ -41,6 +46,10 @@ module.exports = { } } +function hexToBase64(hex) { + return Buffer.from(hex, 'hex').toString('base64') +} + async function sendFile(bucketName, key, fsPath) { let readStream try { @@ -56,20 +65,79 @@ async function sendFile(bucketName, key, fsPath) { return sendStream(bucketName, key, readStream) } -async function sendStream(bucketName, key, readStream) { +async function sendStream(bucketName, key, readStream, sourceMd5) { try { + // if there is no supplied md5 hash, we calculate the hash as the data passes through + const passthroughStream = new Minipass() + let hashPromise + let b64Hash + + if (sourceMd5) { + b64Hash = hexToBase64(sourceMd5) + } else { + const hash = crypto.createHash('md5') + hash.setEncoding('hex') + passthroughStream.pipe(hash) + hashPromise = new Promise((resolve, reject) => { + passthroughStream.on('end', () => { + hash.end() + resolve(hash.read()) + }) + passthroughStream.on('error', err => { + reject(err) + }) + }) + } + const meteredStream = meter() + passthroughStream.pipe(meteredStream) meteredStream.on('finish', () => { metrics.count('s3.egress', meteredStream.bytes) }) - await _getClientForBucket(bucketName) - .upload({ - Bucket: bucketName, - Key: key, - Body: readStream.pipe(meteredStream) - }) + // pipe the readstream through minipass, which can write to both the metered + // stream (which goes on to S3) and the md5 generator if necessary + // - we do this last so that a listener streams does not consume data meant + // for both destinations + readStream.pipe(passthroughStream) + + // if we have an md5 hash, pass this to S3 to verify the upload + const uploadOptions = { + Bucket: bucketName, + Key: key, + Body: meteredStream + } + if (b64Hash) { + uploadOptions.ContentMD5 = b64Hash + } + + const response = await _getClientForBucket(bucketName) + .upload(uploadOptions) .promise() + const destMd5 = _md5FromResponse(response) + + // if we didn't have an md5 hash, compare our computed one with S3's + if (hashPromise) { + sourceMd5 = await hashPromise + + if (sourceMd5 !== destMd5) { + try { + await deleteFile(bucketName, key) + } catch (err) { + logger.warn(err, 'error deleting file for invalid upload') + } + + throw new WriteError({ + message: 'source and destination hashes do not match', + info: { + sourceMd5, + destMd5, + bucketName, + key + } + }) + } + } } catch (err) { throw _wrapError( err, @@ -167,6 +235,23 @@ async function getFileSize(bucketName, key) { } } +async function getFileMd5Hash(bucketName, key) { + try { + const response = await _getClientForBucket(bucketName) + .headObject({ Bucket: bucketName, Key: key }) + .promise() + const md5 = _md5FromResponse(response) + return md5 + } catch (err) { + throw _wrapError( + err, + 'error getting hash of s3 object', + { bucketName, key }, + ReadError + ) + } +} + async function deleteFile(bucketName, key) { try { await _getClientForBucket(bucketName) @@ -314,3 +399,18 @@ function _buildClientOptions(bucketCredentials) { return options } + +function _md5FromResponse(response) { + const md5 = (response.ETag || '').replace(/[ "]/g, '') + if (!md5.match(/^[a-f0-9]{32}$/)) { + throw new ReadError({ + message: 's3 etag not in md5-hash format', + info: { + md5, + eTag: response.ETag + } + }) + } + + return md5 +} diff --git a/services/filestore/config/settings.defaults.coffee b/services/filestore/config/settings.defaults.coffee index a4a2df2d24..bb124ae8e0 100644 --- a/services/filestore/config/settings.defaults.coffee +++ b/services/filestore/config/settings.defaults.coffee @@ -17,8 +17,8 @@ unless process.env['BACKEND']? else process.env['BACKEND'] = "fs" process.env['USER_FILES_BUCKET_NAME'] = Path.resolve(__dirname + "/../user_files") - process.env['TEMPLATE_FILES_BUCKET_NAME'] = Path.resolve(__dirname + "/../public_files") - process.env['PUBLIC_FILES_BUCKET_NAME'] = Path.resolve(__dirname + "/../template_files") + process.env['TEMPLATE_FILES_BUCKET_NAME'] = Path.resolve(__dirname + "/../template_files") + process.env['PUBLIC_FILES_BUCKET_NAME'] = Path.resolve(__dirname + "/../public_files") settings = internal: @@ -51,8 +51,8 @@ settings = backend: process.env['FALLBACK_BACKEND'] # mapping of bucket names on the fallback, to bucket names on the primary. # e.g. { myS3UserFilesBucketName: 'myGoogleUserFilesBucketName' } - buckets: JSON.parse process.env['FALLBACK_BUCKET_MAPPING'] if process.env['FALLBACK_BUCKET_MAPPING']? - copyOnMiss: if process.env['COPY_ON_MISS'] == 'true' then true else false + buckets: JSON.parse(process.env['FALLBACK_BUCKET_MAPPING'] || '{}') + copyOnMiss: process.env['COPY_ON_MISS'] == 'true' path: uploadFolder: Path.resolve(__dirname + "/../uploads") diff --git a/services/filestore/npm-shrinkwrap.json b/services/filestore/npm-shrinkwrap.json index b343d6ad2c..a4206a94e0 100644 --- a/services/filestore/npm-shrinkwrap.json +++ b/services/filestore/npm-shrinkwrap.json @@ -3129,6 +3129,21 @@ "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=" }, + "minipass": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.1.tgz", + "integrity": "sha512-UFqVihv6PQgwj8/yTGvl9kPz7xIAY+R5z6XYjRInD3Gk3qx6QGSD6zEcpeG4Dy/lQnv1J6zv8ejV90hyYIKf3w==", + "requires": { + "yallist": "^4.0.0" + }, + "dependencies": { + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + } + } + }, "mkdirp": { "version": "0.5.1", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", diff --git a/services/filestore/package.json b/services/filestore/package.json index 303393bd56..2e9cef8aa0 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -31,6 +31,7 @@ "knox": "~0.9.1", "logger-sharelatex": "^1.7.0", "metrics-sharelatex": "^2.2.0", + "minipass": "^3.1.1", "mocha": "5.2.0", "node-transloadit": "0.0.4", "node-uuid": "~1.4.1", diff --git a/services/filestore/test/acceptance/js/FilestoreTests.js b/services/filestore/test/acceptance/js/FilestoreTests.js index 5a0de3abd8..1c96445a3a 100644 --- a/services/filestore/test/acceptance/js/FilestoreTests.js +++ b/services/filestore/test/acceptance/js/FilestoreTests.js @@ -84,11 +84,11 @@ const BackendSettings = { __dirname, '../../../user_files' ), - [process.env.AWS_S3_TEMPLATE_FILES_BUCKET_NAME]: Path.resolve( + [process.env.AWS_S3_PUBLIC_FILES_BUCKET_NAME]: Path.resolve( __dirname, '../../../public_files' ), - [process.env.AWS_S3_PUBLIC_FILES_BUCKET_NAME]: Path.resolve( + [process.env.AWS_S3_TEMPLATE_FILES_BUCKET_NAME]: Path.resolve( __dirname, '../../../template_files' ) @@ -114,9 +114,9 @@ const BackendSettings = { [Path.resolve(__dirname, '../../../user_files')]: process.env .AWS_S3_USER_FILES_BUCKET_NAME, [Path.resolve(__dirname, '../../../public_files')]: process.env - .AWS_S3_TEMPLATE_FILES_BUCKET_NAME, + .AWS_S3_PUBLIC_FILES_BUCKET_NAME, [Path.resolve(__dirname, '../../../template_files')]: process.env - .AWS_S3_PUBLIC_FILES_BUCKET_NAME + .AWS_S3_TEMPLATE_FILES_BUCKET_NAME } } } @@ -130,7 +130,7 @@ describe('Filestore', function() { // redefine the test suite for every available backend Object.keys(BackendSettings).forEach(backend => { describe(backend, function() { - let app, previousEgress, previousIngress + let app, previousEgress, previousIngress, projectId before(async function() { // create the app with the relevant filestore settings @@ -151,6 +151,7 @@ describe('Filestore', function() { getMetric(filestoreUrl, 's3_ingress') ]) } + projectId = `acceptance_tests_${Math.random()}` }) it('should send a 200 for the status endpoint', async function() { @@ -174,7 +175,7 @@ describe('Filestore', function() { beforeEach(async function() { fileId = Math.random() - fileUrl = `${filestoreUrl}/project/acceptance_tests/file/${directoryName}%2F${fileId}` + fileUrl = `${filestoreUrl}/project/${projectId}/file/${directoryName}%2F${fileId}` constantFileContent = [ 'hello world', `line 2 goes here ${Math.random()}`, @@ -242,7 +243,7 @@ describe('Filestore', function() { }) it('should be able to copy files', async function() { - const newProjectID = 'acceptance_tests_copied_project' + const newProjectID = `acceptance_tests_copied_project_${Math.random()}` const newFileId = Math.random() const newFileUrl = `${filestoreUrl}/project/${newProjectID}/file/${directoryName}%2F${newFileId}` const opts = { @@ -250,7 +251,7 @@ describe('Filestore', function() { uri: newFileUrl, json: { source: { - project_id: 'acceptance_tests', + project_id: projectId, file_id: `${directoryName}/${fileId}` } } @@ -304,7 +305,7 @@ describe('Filestore', function() { }) describe('with multiple files', function() { - let fileIds, fileUrls, project + let fileIds, fileUrls const directoryName = 'directory' const localFileReadPaths = [ '/tmp/filestore_acceptance_tests_file_read_1.txt', @@ -331,11 +332,10 @@ describe('Filestore', function() { }) beforeEach(async function() { - project = `acceptance_tests_${Math.random()}` fileIds = [Math.random(), Math.random()] fileUrls = [ - `${filestoreUrl}/project/${project}/file/${directoryName}%2F${fileIds[0]}`, - `${filestoreUrl}/project/${project}/file/${directoryName}%2F${fileIds[1]}` + `${filestoreUrl}/project/${projectId}/file/${directoryName}%2F${fileIds[0]}`, + `${filestoreUrl}/project/${projectId}/file/${directoryName}%2F${fileIds[1]}` ] const writeStreams = [ @@ -359,7 +359,7 @@ describe('Filestore', function() { it('should get the directory size', async function() { const response = await rp.get( - `${filestoreUrl}/project/${project}/size` + `${filestoreUrl}/project/${projectId}/size` ) expect(parseInt(JSON.parse(response.body)['total bytes'])).to.equal( constantFileContents[0].length + constantFileContents[1].length @@ -459,7 +459,6 @@ describe('Filestore', function() { fileUrl, bucket, fallbackBucket - const projectId = 'acceptance_tests' beforeEach(function() { constantFileContent = `This is yet more file content ${Math.random()}` @@ -503,14 +502,20 @@ describe('Filestore', function() { expect(res.body).to.equal(constantFileContent) }) - it('should not copy the file to the primary', async function() { - await rp.get(fileUrl) + describe('when copyOnMiss is disabled', function() { + beforeEach(function() { + Settings.filestore.fallback.copyOnMiss = false + }) - await expectPersistorNotToHaveFile( - app.persistor.primaryPersistor, - bucket, - fileKey - ) + it('should not copy the file to the primary', async function() { + await rp.get(fileUrl) + + await expectPersistorNotToHaveFile( + app.persistor.primaryPersistor, + bucket, + fileKey + ) + }) }) describe('when copyOnMiss is enabled', function() { @@ -534,9 +539,9 @@ describe('Filestore', function() { describe('when copying a file', function() { let newFileId, newFileUrl, newFileKey - const newProjectID = 'acceptance_tests_copied_project' beforeEach(async function() { + const newProjectID = `acceptance_tests_copied_project_${Math.random()}` newFileId = Math.random() newFileUrl = `${filestoreUrl}/project/${newProjectID}/file/${directoryName}%2F${newFileId}` newFileKey = `${newProjectID}/${directoryName}/${newFileId}` @@ -546,7 +551,7 @@ describe('Filestore', function() { uri: newFileUrl, json: { source: { - project_id: 'acceptance_tests', + project_id: projectId, file_id: `${directoryName}/${fileId}` } } @@ -616,7 +621,7 @@ describe('Filestore', function() { await expectPersistorNotToHaveFile( app.persistor.fallbackPersistor, fallbackBucket, - `acceptance_tests/${directoryName}/${fileId}` + `${projectId}/${directoryName}/${fileId}` ) }) }) @@ -706,7 +711,7 @@ describe('Filestore', function() { beforeEach(async function() { fileId = Math.random() - fileUrl = `${filestoreUrl}/project/acceptance_tests/file/${directoryName}%2F${fileId}` + fileUrl = `${filestoreUrl}/project/${projectId}/file/${directoryName}%2F${fileId}` const stat = await fsStat(localFileReadPath) localFileSize = stat.size const writeStream = request.post(fileUrl) diff --git a/services/filestore/test/unit/js/FSPersistorTests.js b/services/filestore/test/unit/js/FSPersistorTests.js index ba343c548c..1be8eea3e2 100644 --- a/services/filestore/test/unit/js/FSPersistorTests.js +++ b/services/filestore/test/unit/js/FSPersistorTests.js @@ -12,19 +12,32 @@ const modulePath = '../../../app/js/FSPersistor.js' describe('FSPersistorTests', function() { const stat = { size: 4, isFile: sinon.stub().returns(true) } const fd = 1234 - const readStream = 'readStream' const writeStream = 'writeStream' const remoteStream = 'remoteStream' const tempFile = '/tmp/potato.txt' const location = '/foo' const error = new Error('guru meditation error') + const md5 = 'ffffffff' const files = ['animals/wombat.tex', 'vegetables/potato.tex'] const globs = [`${location}/${files[0]}`, `${location}/${files[1]}`] const filteredFilenames = ['animals_wombat.tex', 'vegetables_potato.tex'] - let fs, rimraf, stream, LocalFileWriter, FSPersistor, glob + let fs, + rimraf, + stream, + LocalFileWriter, + FSPersistor, + glob, + readStream, + crypto, + Hash beforeEach(function() { + readStream = { + name: 'readStream', + on: sinon.stub().yields(), + pipe: sinon.stub() + } fs = { createReadStream: sinon.stub().returns(readStream), createWriteStream: sinon.stub().returns(writeStream), @@ -41,6 +54,14 @@ describe('FSPersistorTests', function() { deleteFile: sinon.stub().resolves() } } + Hash = { + end: sinon.stub(), + read: sinon.stub().returns(md5), + setEncoding: sinon.stub() + } + crypto = { + createHash: sinon.stub().returns(Hash) + } FSPersistor = SandboxedModule.require(modulePath, { requires: { './LocalFileWriter': LocalFileWriter, @@ -48,7 +69,8 @@ describe('FSPersistorTests', function() { fs, glob, rimraf, - stream + stream, + crypto }, globals: { console } }) @@ -103,6 +125,35 @@ describe('FSPersistorTests', function() { await FSPersistor.promises.sendStream(location, files[0], remoteStream) expect(fs.createReadStream).to.have.been.calledWith(tempFile) }) + + describe('when the md5 hash does not match', function() { + it('should return a write error', async function() { + await expect( + FSPersistor.promises.sendStream( + location, + files[0], + remoteStream, + '00000000' + ) + ) + .to.eventually.be.rejected.and.be.an.instanceOf(Errors.WriteError) + .and.have.property('message', 'md5 hash mismatch') + }) + + it('deletes the copied file', async function() { + try { + await FSPersistor.promises.sendStream( + location, + files[0], + remoteStream, + '00000000' + ) + } catch (_) {} + expect(LocalFileWriter.promises.deleteFile).to.have.been.calledWith( + `${location}/${filteredFilenames[0]}` + ) + }) + }) }) describe('getFileStream', function() { diff --git a/services/filestore/test/unit/js/MigrationPersistorTests.js b/services/filestore/test/unit/js/MigrationPersistorTests.js index 1cc8324d46..83159f38ad 100644 --- a/services/filestore/test/unit/js/MigrationPersistorTests.js +++ b/services/filestore/test/unit/js/MigrationPersistorTests.js @@ -21,35 +21,53 @@ describe('MigrationPersistorTests', function() { const genericError = new Error('guru meditation error') const notFoundError = new Errors.NotFoundError('not found') const size = 33 - const fileStream = 'fileStream' + const md5 = 'ffffffff' - function newPersistor(hasFile) { - return { - promises: { - sendFile: sinon.stub().resolves(), - sendStream: sinon.stub().resolves(), - getFileStream: hasFile - ? sinon.stub().resolves(fileStream) - : sinon.stub().rejects(notFoundError), - deleteDirectory: sinon.stub().resolves(), - getFileSize: hasFile - ? sinon.stub().resolves(size) - : sinon.stub().rejects(notFoundError), - deleteFile: sinon.stub().resolves(), - copyFile: hasFile - ? sinon.stub().resolves() - : sinon.stub().rejects(notFoundError), - checkIfFileExists: sinon.stub().resolves(hasFile), - directorySize: hasFile - ? sinon.stub().resolves(size) - : sinon.stub().rejects(notFoundError) - } - } - } - - let Metrics, Settings, Logger, MigrationPersistor + let Metrics, + Settings, + Logger, + MigrationPersistor, + Minipass, + fileStream, + newPersistor beforeEach(function() { + fileStream = { + name: 'fileStream', + on: sinon + .stub() + .withArgs('end') + .yields(), + pipe: sinon.stub() + } + + newPersistor = function(hasFile) { + return { + promises: { + sendFile: sinon.stub().resolves(), + sendStream: sinon.stub().resolves(), + getFileStream: hasFile + ? sinon.stub().resolves(fileStream) + : sinon.stub().rejects(notFoundError), + deleteDirectory: sinon.stub().resolves(), + getFileSize: hasFile + ? sinon.stub().resolves(size) + : sinon.stub().rejects(notFoundError), + deleteFile: sinon.stub().resolves(), + copyFile: hasFile + ? sinon.stub().resolves() + : sinon.stub().rejects(notFoundError), + checkIfFileExists: sinon.stub().resolves(hasFile), + directorySize: hasFile + ? sinon.stub().resolves(size) + : sinon.stub().rejects(notFoundError), + getFileMd5Hash: hasFile + ? sinon.stub().resolves(md5) + : sinon.stub().rejects(notFoundError) + } + } + } + Settings = { filestore: { fallback: { @@ -68,12 +86,20 @@ describe('MigrationPersistorTests', function() { warn: sinon.stub() } + Minipass = sinon.stub() + Minipass.prototype.on = sinon + .stub() + .withArgs('end') + .yields() + Minipass.prototype.pipe = sinon.stub() + MigrationPersistor = SandboxedModule.require(modulePath, { requires: { 'settings-sharelatex': Settings, './Errors': Errors, 'metrics-sharelatex': Metrics, - 'logger-sharelatex': Logger + 'logger-sharelatex': Logger, + minipass: Minipass }, globals: { console } }) @@ -144,7 +170,7 @@ describe('MigrationPersistorTests', function() { ).to.have.been.calledWithExactly(fallbackBucket, key, options) }) - it('should only create one stream', function() { + it('should create one read stream', function() { expect(fallbackPersistor.promises.getFileStream).to.have.been.calledOnce }) @@ -154,7 +180,10 @@ describe('MigrationPersistorTests', function() { }) describe('when the file should be copied to the primary', function() { - let primaryPersistor, fallbackPersistor, migrationPersistor + let primaryPersistor, + fallbackPersistor, + migrationPersistor, + returnedStream beforeEach(async function() { primaryPersistor = newPersistor(false) fallbackPersistor = newPersistor(true) @@ -163,18 +192,36 @@ describe('MigrationPersistorTests', function() { fallbackPersistor ) Settings.filestore.fallback.copyOnMiss = true - return migrationPersistor.promises.getFileStream(bucket, key, options) + returnedStream = await migrationPersistor.promises.getFileStream( + bucket, + key, + options + ) }) - it('should create two streams', function() { - expect(fallbackPersistor.promises.getFileStream).to.have.been - .calledTwice + it('should create one read stream', function() { + expect(fallbackPersistor.promises.getFileStream).to.have.been.calledOnce }) - it('should send one of the streams to the primary', function() { + it('should get the md5 hash from the source', function() { + expect( + fallbackPersistor.promises.getFileMd5Hash + ).to.have.been.calledWith(fallbackBucket, key) + }) + + it('should send a stream to the primary', function() { expect( primaryPersistor.promises.sendStream - ).to.have.been.calledWithExactly(bucket, key, fileStream) + ).to.have.been.calledWithExactly( + bucket, + key, + sinon.match.instanceOf(Minipass), + md5 + ) + }) + + it('should send a stream to the client', function() { + expect(returnedStream).to.be.an.instanceOf(Minipass) }) }) @@ -420,10 +467,16 @@ describe('MigrationPersistorTests', function() { ).not.to.have.been.calledWithExactly(fallbackBucket, key) }) + it('should get the md5 hash from the source', function() { + expect( + fallbackPersistor.promises.getFileMd5Hash + ).to.have.been.calledWith(fallbackBucket, key) + }) + it('should send the file to the primary', function() { expect( primaryPersistor.promises.sendStream - ).to.have.been.calledWithExactly(bucket, destKey, fileStream) + ).to.have.been.calledWithExactly(bucket, destKey, fileStream, md5) }) }) diff --git a/services/filestore/test/unit/js/S3PersistorTests.js b/services/filestore/test/unit/js/S3PersistorTests.js index 7a945b4d19..4f700c8797 100644 --- a/services/filestore/test/unit/js/S3PersistorTests.js +++ b/services/filestore/test/unit/js/S3PersistorTests.js @@ -26,8 +26,10 @@ describe('S3PersistorTests', function() { { Key: 'hippo', Size: 22 } ] const filesSize = 33 + const md5 = 'ffffffff00000000ffffffff00000000' let Metrics, + Logger, S3, Fs, Meter, @@ -40,7 +42,10 @@ describe('S3PersistorTests', function() { S3AccessDeniedError, FileNotFoundError, EmptyPromise, - settings + settings, + Minipass, + Hash, + crypto beforeEach(function() { settings = { @@ -100,7 +105,8 @@ describe('S3PersistorTests', function() { }), headObject: sinon.stub().returns({ promise: sinon.stub().resolves({ - ContentLength: objectSize + ContentLength: objectSize, + ETag: md5 }) }), listObjects: sinon.stub().returns({ @@ -108,21 +114,46 @@ describe('S3PersistorTests', function() { Contents: files }) }), - upload: sinon.stub().returns(EmptyPromise), + upload: sinon + .stub() + .returns({ promise: sinon.stub().resolves({ ETag: `"${md5}"` }) }), copyObject: sinon.stub().returns(EmptyPromise), deleteObject: sinon.stub().returns(EmptyPromise), deleteObjects: sinon.stub().returns(EmptyPromise) } S3 = sinon.stub().returns(S3Client) + Hash = { + end: sinon.stub(), + read: sinon.stub().returns(md5), + setEncoding: sinon.stub() + } + crypto = { + createHash: sinon.stub().returns(Hash) + } + + Minipass = sinon.stub() + Minipass.prototype.on = sinon + .stub() + .withArgs('end') + .yields() + Minipass.prototype.pipe = sinon.stub() + + Logger = { + warn: sinon.stub() + } + S3Persistor = SandboxedModule.require(modulePath, { requires: { 'aws-sdk/clients/s3': S3, 'settings-sharelatex': settings, + 'logger-sharelatex': Logger, './Errors': Errors, fs: Fs, 'stream-meter': Meter, - 'metrics-sharelatex': Metrics + 'metrics-sharelatex': Metrics, + minipass: Minipass, + crypto }, globals: { console } }) @@ -420,17 +451,49 @@ describe('S3PersistorTests', function() { expect(S3Client.upload).to.have.been.calledWith({ Bucket: bucket, Key: key, - Body: 'readStream' + Body: MeteredStream }) }) it('should meter the stream', function() { - expect(ReadStream.pipe).to.have.been.calledWith(MeteredStream) + expect(Minipass.prototype.pipe).to.have.been.calledWith(MeteredStream) }) it('should record an egress metric', function() { expect(Metrics.count).to.have.been.calledWith('s3.egress', objectSize) }) + + it('calculates the md5 hash of the file', function() { + expect(Minipass.prototype.pipe).to.have.been.calledWith(Hash) + }) + }) + + describe('when a hash is supploed', function() { + beforeEach(async function() { + return S3Persistor.promises.sendStream( + bucket, + key, + ReadStream, + 'aaaaaaaabbbbbbbbaaaaaaaabbbbbbbb' + ) + }) + + it('should not calculate the md5 hash of the file', function() { + expect(Minipass.prototype.pipe).not.to.have.been.calledWith(Hash) + }) + + it('sends the hash in base64', function() { + expect(S3Client.upload).to.have.been.calledWith({ + Bucket: bucket, + Key: key, + Body: MeteredStream, + ContentMD5: 'qqqqqru7u7uqqqqqu7u7uw==' + }) + }) + + it('does not fetch the md5 hash of the uploaded file', function() { + expect(S3Client.headObject).not.to.have.been.called + }) }) describe('when the upload fails', function() { @@ -466,7 +529,7 @@ describe('S3PersistorTests', function() { expect(S3Client.upload).to.have.been.calledWith({ Bucket: bucket, Key: key, - Body: 'readStream' + Body: MeteredStream }) }) }) From 5d5d325691cf3c1b4f13294a1bf3003c956ed9a3 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Wed, 29 Jan 2020 11:09:20 +0000 Subject: [PATCH 408/555] Preserve all error information when cleanup of copied file fails --- .../filestore/app/js/MigrationPersistor.js | 32 +++++++++---------- 1 file changed, 15 insertions(+), 17 deletions(-) diff --git a/services/filestore/app/js/MigrationPersistor.js b/services/filestore/app/js/MigrationPersistor.js index fdc31368a3..3a4789693b 100644 --- a/services/filestore/app/js/MigrationPersistor.js +++ b/services/filestore/app/js/MigrationPersistor.js @@ -111,22 +111,7 @@ module.exports = function(primary, fallback) { await primary.promises.sendStream(destBucket, destKey, stream, sourceMd5) } catch (err) { - let error = err - metrics.inc('fallback.copy.failure') - - try { - await primary.promises.deleteFile(destBucket, destKey) - } catch (err) { - error = new WriteError({ - message: 'unable to clean up destination copy artifact', - info: { - destBucket, - destKey - } - }).withCause(err) - } - - error = new WriteError({ + const error = new WriteError({ message: 'unable to copy file to destination persistor', info: { sourceBucket, @@ -134,7 +119,20 @@ module.exports = function(primary, fallback) { sourceKey, destKey } - }).withCause(error) + }).withCause(err) + metrics.inc('fallback.copy.failure') + + try { + await primary.promises.deleteFile(destBucket, destKey) + } catch (err) { + error.info.cleanupError = new WriteError({ + message: 'unable to clean up destination copy artifact', + info: { + destBucket, + destKey + } + }).withCause(err) + } logger.warn({ error }, 'failed to copy file from fallback') throw error From 9e0b378948d12c5bd137fc41d2baafe445a10f53 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 3 Feb 2020 15:55:17 +0000 Subject: [PATCH 409/555] Remove minipass as dependency and refactor to make things clearer --- .../filestore/app/js/MigrationPersistor.js | 185 +++++++++--------- services/filestore/app/js/S3Persistor.js | 20 +- .../test/acceptance/js/FilestoreTests.js | 71 +++++-- .../test/unit/js/MigrationPersistorTests.js | 31 +-- .../test/unit/js/S3PersistorTests.js | 33 ++-- 5 files changed, 194 insertions(+), 146 deletions(-) diff --git a/services/filestore/app/js/MigrationPersistor.js b/services/filestore/app/js/MigrationPersistor.js index 3a4789693b..3ddc762922 100644 --- a/services/filestore/app/js/MigrationPersistor.js +++ b/services/filestore/app/js/MigrationPersistor.js @@ -1,10 +1,12 @@ const metrics = require('metrics-sharelatex') const Settings = require('settings-sharelatex') const logger = require('logger-sharelatex') -const Minipass = require('minipass') -const { callbackify } = require('util') +const Stream = require('stream') +const { callbackify, promisify } = require('util') const { NotFoundError, WriteError } = require('./Errors') +const pipeline = promisify(Stream.pipeline) + // Persistor that wraps two other persistors. Talks to the 'primary' by default, // but will fall back to an older persistor in the case of a not-found error. // If `Settings.filestore.fallback.copyOnMiss` is set, this will copy files from the fallback @@ -29,14 +31,86 @@ module.exports = function(primary, fallback) { } } + async function getFileStreamWithFallback(bucket, key, opts) { + const shouldCopy = + Settings.filestore.fallback.copyOnMiss && !opts.start && !opts.end + + try { + return await primary.promises.getFileStream(bucket, key, opts) + } catch (err) { + if (err instanceof NotFoundError) { + const fallbackBucket = _getFallbackBucket(bucket) + const fallbackStream = await fallback.promises.getFileStream( + fallbackBucket, + key, + opts + ) + // tee the stream to the client, and as a copy to the primary (if necessary) + // start listening on both straight away so that we don't consume bytes + // in one place before the other + const returnStream = new Stream.PassThrough() + pipeline(fallbackStream, returnStream) + + if (shouldCopy) { + const copyStream = new Stream.PassThrough() + pipeline(fallbackStream, copyStream) + + _copyStreamFromFallbackAndVerify( + copyStream, + fallbackBucket, + bucket, + key, + key + ).catch(() => { + // swallow errors, as this runs in the background and will log a warning + }) + } + return returnStream + } + throw err + } + } + async function copyFileWithFallback(bucket, sourceKey, destKey) { try { return await primary.promises.copyFile(bucket, sourceKey, destKey) } catch (err) { if (err instanceof NotFoundError) { const fallbackBucket = _getFallbackBucket(bucket) - return _copyFileFromFallback(fallbackBucket, bucket, sourceKey, destKey) + const fallbackStream = await fallback.promises.getFileStream( + fallbackBucket, + sourceKey, + {} + ) + + const copyStream = new Stream.PassThrough() + pipeline(fallbackStream, copyStream) + + if (Settings.filestore.fallback.copyOnMiss) { + const missStream = new Stream.PassThrough() + pipeline(fallbackStream, missStream) + + // copy from sourceKey -> sourceKey + _copyStreamFromFallbackAndVerify( + missStream, + fallbackBucket, + bucket, + sourceKey, + sourceKey + ).then(() => { + // swallow errors, as this runs in the background and will log a warning + }) + } + // copy from sourceKey -> destKey + return _copyStreamFromFallbackAndVerify( + copyStream, + fallbackBucket, + bucket, + sourceKey, + destKey + ) } + throw err } } @@ -44,20 +118,29 @@ module.exports = function(primary, fallback) { return Settings.filestore.fallback.buckets[bucket] } - function _wrapFallbackMethod(method, enableCopy = true) { + function _wrapFallbackMethod(method) { return async function(bucket, key, ...moreArgs) { try { return await primary.promises[method](bucket, key, ...moreArgs) } catch (err) { if (err instanceof NotFoundError) { const fallbackBucket = _getFallbackBucket(bucket) - if (Settings.filestore.fallback.copyOnMiss && enableCopy) { - // run in background - _copyFileFromFallback(fallbackBucket, bucket, key, key).catch( - err => { - logger.warn({ err }, 'failed to copy file from fallback') - } + if (Settings.filestore.fallback.copyOnMiss) { + const fallbackStream = await fallback.promises.getFileStream( + fallbackBucket, + key, + {} ) + // run in background + _copyStreamFromFallbackAndVerify( + fallbackStream, + fallbackBucket, + bucket, + key, + key + ).catch(err => { + logger.warn({ err }, 'failed to copy file from fallback') + }) } return fallback.promises[method](fallbackBucket, key, ...moreArgs) } @@ -66,32 +149,7 @@ module.exports = function(primary, fallback) { } } - async function _getFileStreamAndCopyIfRequired(bucketName, key, opts) { - const shouldCopy = - Settings.filestore.fallback.copyOnMiss && !opts.start && !opts.end - - try { - return await primary.promises.getFileStream(bucketName, key, opts) - } catch (err) { - if (err instanceof NotFoundError) { - const fallbackBucket = _getFallbackBucket(bucketName) - if (shouldCopy) { - return _copyFileFromFallback( - fallbackBucket, - bucketName, - key, - key, - true - ) - } else { - return fallback.promises.getFileStream(fallbackBucket, key, opts) - } - } - throw err - } - } - - async function _copyFromFallbackStreamAndVerify( + async function _copyStreamFromFallbackAndVerify( stream, sourceBucket, destBucket, @@ -139,63 +197,12 @@ module.exports = function(primary, fallback) { } } - async function _copyFileFromFallback( - sourceBucket, - destBucket, - sourceKey, - destKey, - returnStream = false - ) { - metrics.inc('fallback.copy') - const sourceStream = await fallback.promises.getFileStream( - sourceBucket, - sourceKey, - {} - ) - - if (!returnStream) { - return _copyFromFallbackStreamAndVerify( - sourceStream, - sourceBucket, - destBucket, - sourceKey, - destKey - ) - } - - const tee = new Minipass() - const clientStream = new Minipass() - const copyStream = new Minipass() - - tee.pipe(clientStream) - tee.pipe(copyStream) - - // copy the file in the background - _copyFromFallbackStreamAndVerify( - copyStream, - sourceBucket, - destBucket, - sourceKey, - destKey - ).catch( - // the error handler in this method will log a metric and a warning, so - // we don't need to do anything extra here, but catching it will prevent - // unhandled promise rejection warnings - () => {} - ) - - // start piping the source stream into the tee after everything is set up, - // otherwise one stream may consume bytes that don't arrive at the other - sourceStream.pipe(tee) - return clientStream - } - return { primaryPersistor: primary, fallbackPersistor: fallback, sendFile: primary.sendFile, sendStream: primary.sendStream, - getFileStream: callbackify(_getFileStreamAndCopyIfRequired), + getFileStream: callbackify(getFileStreamWithFallback), getFileMd5Hash: callbackify(_wrapFallbackMethod('getFileMd5Hash')), deleteDirectory: callbackify( _wrapMethodOnBothPersistors('deleteDirectory') @@ -208,7 +215,7 @@ module.exports = function(primary, fallback) { promises: { sendFile: primary.promises.sendFile, sendStream: primary.promises.sendStream, - getFileStream: _getFileStreamAndCopyIfRequired, + getFileStream: getFileStreamWithFallback, getFileMd5Hash: _wrapFallbackMethod('getFileMd5Hash'), deleteDirectory: _wrapMethodOnBothPersistors('deleteDirectory'), getFileSize: _wrapFallbackMethod('getFileSize'), diff --git a/services/filestore/app/js/S3Persistor.js b/services/filestore/app/js/S3Persistor.js index ef465da25c..a10251a642 100644 --- a/services/filestore/app/js/S3Persistor.js +++ b/services/filestore/app/js/S3Persistor.js @@ -7,13 +7,13 @@ const settings = require('settings-sharelatex') const metrics = require('metrics-sharelatex') const logger = require('logger-sharelatex') -const Minipass = require('minipass') const meter = require('stream-meter') +const Stream = require('stream') const crypto = require('crypto') const fs = require('fs') const S3 = require('aws-sdk/clients/s3') const { URL } = require('url') -const { callbackify } = require('util') +const { callbackify, promisify } = require('util') const { WriteError, ReadError, @@ -46,6 +46,8 @@ module.exports = { } } +const pipeline = promisify(Stream.pipeline) + function hexToBase64(hex) { return Buffer.from(hex, 'hex').toString('base64') } @@ -68,7 +70,6 @@ async function sendFile(bucketName, key, fsPath) { async function sendStream(bucketName, key, readStream, sourceMd5) { try { // if there is no supplied md5 hash, we calculate the hash as the data passes through - const passthroughStream = new Minipass() let hashPromise let b64Hash @@ -77,29 +78,24 @@ async function sendStream(bucketName, key, readStream, sourceMd5) { } else { const hash = crypto.createHash('md5') hash.setEncoding('hex') - passthroughStream.pipe(hash) + pipeline(readStream, hash) hashPromise = new Promise((resolve, reject) => { - passthroughStream.on('end', () => { + readStream.on('end', () => { hash.end() resolve(hash.read()) }) - passthroughStream.on('error', err => { + readStream.on('error', err => { reject(err) }) }) } const meteredStream = meter() - passthroughStream.pipe(meteredStream) meteredStream.on('finish', () => { metrics.count('s3.egress', meteredStream.bytes) }) - // pipe the readstream through minipass, which can write to both the metered - // stream (which goes on to S3) and the md5 generator if necessary - // - we do this last so that a listener streams does not consume data meant - // for both destinations - readStream.pipe(passthroughStream) + pipeline(readStream, meteredStream) // if we have an md5 hash, pass this to S3 to verify the upload const uploadOptions = { diff --git a/services/filestore/test/acceptance/js/FilestoreTests.js b/services/filestore/test/acceptance/js/FilestoreTests.js index 1c96445a3a..1d927618e5 100644 --- a/services/filestore/test/acceptance/js/FilestoreTests.js +++ b/services/filestore/test/acceptance/js/FilestoreTests.js @@ -497,16 +497,16 @@ describe('Filestore', function() { ) }) - it('should fetch the file', async function() { - const res = await rp.get(fileUrl) - expect(res.body).to.equal(constantFileContent) - }) - describe('when copyOnMiss is disabled', function() { beforeEach(function() { Settings.filestore.fallback.copyOnMiss = false }) + it('should fetch the file', async function() { + const res = await rp.get(fileUrl) + expect(res.body).to.equal(constantFileContent) + }) + it('should not copy the file to the primary', async function() { await rp.get(fileUrl) @@ -523,6 +523,11 @@ describe('Filestore', function() { Settings.filestore.fallback.copyOnMiss = true }) + it('should fetch the file', async function() { + const res = await rp.get(fileUrl) + expect(res.body).to.equal(constantFileContent) + }) + it('copies the file to the primary', async function() { await rp.get(fileUrl) // wait for the file to copy in the background @@ -578,21 +583,51 @@ describe('Filestore', function() { ) }) - it('should not copy the old file to the new bucket', async function() { - await expectPersistorNotToHaveFile( - app.persistor.primaryPersistor, - bucket, - fileKey - ) + describe('when copyOnMiss is false', function() { + beforeEach(function() { + Settings.filestore.fallback.copyOnMiss = false + }) + + it('should create a new file in the new bucket', async function() { + await expectPersistorToHaveFile( + app.persistor.primaryPersistor, + bucket, + newFileKey, + constantFileContent + ) + }) + + it('should not copy the old file to the primary with the old key', async function() { + await expectPersistorNotToHaveFile( + app.persistor.primaryPersistor, + bucket, + fileKey + ) + }) }) - it('should create a new file in the new bucket', async function() { - await expectPersistorToHaveFile( - app.persistor.primaryPersistor, - bucket, - newFileKey, - constantFileContent - ) + describe('when copyOnMiss is true', function() { + beforeEach(function() { + Settings.filestore.fallback.copyOnMiss = true + }) + + it('should create a new file in the new bucket', async function() { + await expectPersistorToHaveFile( + app.persistor.primaryPersistor, + bucket, + newFileKey, + constantFileContent + ) + }) + + it('should copy the old file to the primary with the old key', async function() { + await expectPersistorToHaveFile( + app.persistor.primaryPersistor, + bucket, + fileKey, + constantFileContent + ) + }) }) }) }) diff --git a/services/filestore/test/unit/js/MigrationPersistorTests.js b/services/filestore/test/unit/js/MigrationPersistorTests.js index 83159f38ad..db8401c78c 100644 --- a/services/filestore/test/unit/js/MigrationPersistorTests.js +++ b/services/filestore/test/unit/js/MigrationPersistorTests.js @@ -26,8 +26,8 @@ describe('MigrationPersistorTests', function() { let Metrics, Settings, Logger, + Stream, MigrationPersistor, - Minipass, fileStream, newPersistor @@ -82,24 +82,22 @@ describe('MigrationPersistorTests', function() { inc: sinon.stub() } + Stream = { + pipeline: sinon.stub().yields(), + PassThrough: sinon.stub() + } + Logger = { warn: sinon.stub() } - Minipass = sinon.stub() - Minipass.prototype.on = sinon - .stub() - .withArgs('end') - .yields() - Minipass.prototype.pipe = sinon.stub() - MigrationPersistor = SandboxedModule.require(modulePath, { requires: { 'settings-sharelatex': Settings, + stream: Stream, './Errors': Errors, 'metrics-sharelatex': Metrics, - 'logger-sharelatex': Logger, - minipass: Minipass + 'logger-sharelatex': Logger }, globals: { console } }) @@ -155,7 +153,7 @@ describe('MigrationPersistorTests', function() { }) it('should return the file stream', function() { - expect(response).to.equal(fileStream) + expect(response).to.be.an.instanceOf(Stream.PassThrough) }) it('should fetch the file from the primary persistor with the correct options', function() { @@ -215,13 +213,13 @@ describe('MigrationPersistorTests', function() { ).to.have.been.calledWithExactly( bucket, key, - sinon.match.instanceOf(Minipass), + sinon.match.instanceOf(Stream.PassThrough), md5 ) }) it('should send a stream to the client', function() { - expect(returnedStream).to.be.an.instanceOf(Minipass) + expect(returnedStream).to.be.an.instanceOf(Stream.PassThrough) }) }) @@ -476,7 +474,12 @@ describe('MigrationPersistorTests', function() { it('should send the file to the primary', function() { expect( primaryPersistor.promises.sendStream - ).to.have.been.calledWithExactly(bucket, destKey, fileStream, md5) + ).to.have.been.calledWithExactly( + bucket, + destKey, + sinon.match.instanceOf(Stream.PassThrough), + md5 + ) }) }) diff --git a/services/filestore/test/unit/js/S3PersistorTests.js b/services/filestore/test/unit/js/S3PersistorTests.js index 4f700c8797..b9711572c2 100644 --- a/services/filestore/test/unit/js/S3PersistorTests.js +++ b/services/filestore/test/unit/js/S3PersistorTests.js @@ -35,6 +35,7 @@ describe('S3PersistorTests', function() { Meter, MeteredStream, ReadStream, + Stream, S3Persistor, S3Client, S3ReadStream, @@ -43,7 +44,6 @@ describe('S3PersistorTests', function() { FileNotFoundError, EmptyPromise, settings, - Minipass, Hash, crypto @@ -61,6 +61,10 @@ describe('S3PersistorTests', function() { } } + Stream = { + pipeline: sinon.stub().yields() + } + EmptyPromise = { promise: sinon.stub().resolves() } @@ -70,7 +74,11 @@ describe('S3PersistorTests', function() { } ReadStream = { - pipe: sinon.stub().returns('readStream') + pipe: sinon.stub().returns('readStream'), + on: sinon + .stub() + .withArgs('end') + .yields() } FileNotFoundError = new Error('File not found') @@ -132,13 +140,6 @@ describe('S3PersistorTests', function() { createHash: sinon.stub().returns(Hash) } - Minipass = sinon.stub() - Minipass.prototype.on = sinon - .stub() - .withArgs('end') - .yields() - Minipass.prototype.pipe = sinon.stub() - Logger = { warn: sinon.stub() } @@ -151,8 +152,8 @@ describe('S3PersistorTests', function() { './Errors': Errors, fs: Fs, 'stream-meter': Meter, + stream: Stream, 'metrics-sharelatex': Metrics, - minipass: Minipass, crypto }, globals: { console } @@ -456,7 +457,10 @@ describe('S3PersistorTests', function() { }) it('should meter the stream', function() { - expect(Minipass.prototype.pipe).to.have.been.calledWith(MeteredStream) + expect(Stream.pipeline).to.have.been.calledWith( + ReadStream, + MeteredStream + ) }) it('should record an egress metric', function() { @@ -464,7 +468,7 @@ describe('S3PersistorTests', function() { }) it('calculates the md5 hash of the file', function() { - expect(Minipass.prototype.pipe).to.have.been.calledWith(Hash) + expect(Stream.pipeline).to.have.been.calledWith(ReadStream, Hash) }) }) @@ -479,7 +483,10 @@ describe('S3PersistorTests', function() { }) it('should not calculate the md5 hash of the file', function() { - expect(Minipass.prototype.pipe).not.to.have.been.calledWith(Hash) + expect(Stream.pipeline).not.to.have.been.calledWith( + sinon.match.any, + Hash + ) }) it('sends the hash in base64', function() { From 42a5d168dfce029e1978811a84c9f51093936c0b Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 3 Feb 2020 15:55:28 +0000 Subject: [PATCH 410/555] Remove unused packages --- services/filestore/npm-shrinkwrap.json | 291 ------------------------- services/filestore/package.json | 6 - 2 files changed, 297 deletions(-) diff --git a/services/filestore/npm-shrinkwrap.json b/services/filestore/npm-shrinkwrap.json index a4206a94e0..bdc836e237 100644 --- a/services/filestore/npm-shrinkwrap.json +++ b/services/filestore/npm-shrinkwrap.json @@ -606,11 +606,6 @@ "event-target-shim": "^5.0.0" } }, - "accept-encoding": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/accept-encoding/-/accept-encoding-0.1.0.tgz", - "integrity": "sha1-XdiLjfcfHcLlzGuVZezOHjmaMz4=" - }, "accepts": { "version": "1.3.5", "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.5.tgz", @@ -790,11 +785,6 @@ } } }, - "aws-sign": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/aws-sign/-/aws-sign-0.2.1.tgz", - "integrity": "sha1-uWGyLwuqTxXsJBFA83dtbBQoVtA=" - }, "aws-sign2": { "version": "0.7.0", "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", @@ -857,14 +847,6 @@ "tweetnacl": "^0.14.3" } }, - "best-encoding": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/best-encoding/-/best-encoding-0.1.1.tgz", - "integrity": "sha1-GVIT2rysBFgYuAe3ox+Dn63cl04=", - "requires": { - "accept-encoding": "~0.1.0" - } - }, "bignumber.js": { "version": "7.2.1", "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-7.2.1.tgz", @@ -883,14 +865,6 @@ "resolved": "https://registry.npmjs.org/bintrees/-/bintrees-1.0.1.tgz", "integrity": "sha1-DmVcm5wkNeqraL9AJyJtK1WjRSQ=" }, - "bl": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/bl/-/bl-0.7.0.tgz", - "integrity": "sha1-P7BnBgKsKHjrdw3CA58YNr5irls=", - "requires": { - "readable-stream": "~1.0.2" - } - }, "body-parser": { "version": "1.18.3", "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.18.3.tgz", @@ -914,14 +888,6 @@ "integrity": "sha1-tcCeF8rNET0Rt7s+04TMASmU2Gs=", "dev": true }, - "boom": { - "version": "0.3.8", - "resolved": "https://registry.npmjs.org/boom/-/boom-0.3.8.tgz", - "integrity": "sha1-yM2wQUNZEnQWKMBE7Mcy0dF8Ceo=", - "requires": { - "hoek": "0.7.x" - } - }, "brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", @@ -995,11 +961,6 @@ "quick-lru": "^4.0.1" } }, - "caseless": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.3.0.tgz", - "integrity": "sha1-U06XkWOH07cGtk/eu6xGQ4RQk08=" - }, "chai": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/chai/-/chai-4.2.0.tgz", @@ -1117,14 +1078,6 @@ "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", "dev": true }, - "combined-stream": { - "version": "0.0.7", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-0.0.7.tgz", - "integrity": "sha1-ATfmV7qlp1QcV6w3rF/AfXO03B8=", - "requires": { - "delayed-stream": "0.0.5" - } - }, "common-tags": { "version": "1.8.0", "resolved": "https://registry.npmjs.org/common-tags/-/common-tags-1.8.0.tgz", @@ -1171,11 +1124,6 @@ "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz", "integrity": "sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s=" }, - "cookie-jar": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/cookie-jar/-/cookie-jar-0.2.0.tgz", - "integrity": "sha1-ZOzAasl423leS1KQy+SLo3gUAPo=" - }, "cookie-signature": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", @@ -1213,14 +1161,6 @@ } } }, - "cryptiles": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-0.1.3.tgz", - "integrity": "sha1-GlVnNPBtJLo0hirpy55wmjr7/xw=", - "requires": { - "boom": "0.3.x" - } - }, "dashdash": { "version": "1.14.1", "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", @@ -1272,11 +1212,6 @@ "resolved": "https://registry.npmjs.org/delay/-/delay-4.3.0.tgz", "integrity": "sha1-7+6/uPVFV5yzlrOnIkQ+yW0UxQ4=" }, - "delayed-stream": { - "version": "0.0.5", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz", - "integrity": "sha1-1LH0OpPoKW3+AmlPRoC8N6MTxz8=" - }, "depd": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", @@ -2078,28 +2013,6 @@ } } }, - "forever-agent": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.2.0.tgz", - "integrity": "sha1-4cJcetROCcOPIzh2x2/MJP+EOx8=" - }, - "form-data": { - "version": "0.0.10", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-0.0.10.tgz", - "integrity": "sha1-2zRaU3jYau6x7V1VO4aawZLS9e0=", - "requires": { - "async": "~0.2.7", - "combined-stream": "~0.0.4", - "mime": "~1.2.2" - }, - "dependencies": { - "mime": { - "version": "1.2.11", - "resolved": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz", - "integrity": "sha1-WCA+7Ybjpe8XrtK32evUfwpg3RA=" - } - } - }, "forwarded": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.1.2.tgz", @@ -2323,17 +2236,6 @@ "integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==", "dev": true }, - "hawk": { - "version": "0.10.2", - "resolved": "https://registry.npmjs.org/hawk/-/hawk-0.10.2.tgz", - "integrity": "sha1-mzYd7pWpMWQObVBOBWCaj8OsRdI=", - "requires": { - "boom": "0.3.x", - "cryptiles": "0.1.x", - "hoek": "0.7.x", - "sntp": "0.1.x" - } - }, "he": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/he/-/he-1.1.1.tgz", @@ -2349,11 +2251,6 @@ "resolved": "https://registry.npmjs.org/hex2dec/-/hex2dec-1.1.2.tgz", "integrity": "sha1-jhzkvvNqdPfVcjw/swkMKGAHczg=" }, - "hoek": { - "version": "0.7.6", - "resolved": "https://registry.npmjs.org/hoek/-/hoek-0.7.6.tgz", - "integrity": "sha1-YPvZBFV1Qc0rh5Wr8wihs3cOFVo=" - }, "hosted-git-info": { "version": "2.8.5", "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.5.tgz", @@ -2701,28 +2598,6 @@ "graceful-fs": "^4.1.9" } }, - "knox": { - "version": "0.9.2", - "resolved": "https://registry.npmjs.org/knox/-/knox-0.9.2.tgz", - "integrity": "sha1-NzZZNmniTwJP2vcjtqHcSv2DmnE=", - "requires": { - "debug": "^1.0.2", - "mime": "*", - "once": "^1.3.0", - "stream-counter": "^1.0.0", - "xml2js": "^0.4.4" - }, - "dependencies": { - "debug": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/debug/-/debug-1.0.5.tgz", - "integrity": "sha1-9yQSF0MPmd7EwrRz6rkiKOh0wqw=", - "requires": { - "ms": "2.0.0" - } - } - } - }, "levn": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", @@ -3129,21 +3004,6 @@ "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=" }, - "minipass": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.1.tgz", - "integrity": "sha512-UFqVihv6PQgwj8/yTGvl9kPz7xIAY+R5z6XYjRInD3Gk3qx6QGSD6zEcpeG4Dy/lQnv1J6zv8ejV90hyYIKf3w==", - "requires": { - "yallist": "^4.0.0" - }, - "dependencies": { - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" - } - } - }, "mkdirp": { "version": "0.5.1", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", @@ -3368,55 +3228,6 @@ "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.8.4.tgz", "integrity": "sha1-1nOGYrZhvhnicR7wGqOxghLxMDA=" }, - "node-transloadit": { - "version": "0.0.4", - "resolved": "https://registry.npmjs.org/node-transloadit/-/node-transloadit-0.0.4.tgz", - "integrity": "sha1-4ZoHheON94NblO2AANHjXmg7zsE=", - "requires": { - "request": "~2.16.6", - "underscore": "1.2.1" - }, - "dependencies": { - "json-stringify-safe": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-3.0.0.tgz", - "integrity": "sha1-nbew5TDH8onF6MhDKvGRwv91pbM=" - }, - "mime": { - "version": "1.2.11", - "resolved": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz", - "integrity": "sha1-WCA+7Ybjpe8XrtK32evUfwpg3RA=" - }, - "qs": { - "version": "0.5.6", - "resolved": "https://registry.npmjs.org/qs/-/qs-0.5.6.tgz", - "integrity": "sha1-MbGtBYVnZRxSaSFQa5qHk5EaA4Q=" - }, - "request": { - "version": "2.16.6", - "resolved": "https://registry.npmjs.org/request/-/request-2.16.6.tgz", - "integrity": "sha1-hy/kRa5y3iZrN4edatfclI+gHK0=", - "requires": { - "aws-sign": "~0.2.0", - "cookie-jar": "~0.2.0", - "forever-agent": "~0.2.0", - "form-data": "~0.0.3", - "hawk": "~0.10.2", - "json-stringify-safe": "~3.0.0", - "mime": "~1.2.7", - "node-uuid": "~1.4.0", - "oauth-sign": "~0.2.0", - "qs": "~0.5.4", - "tunnel-agent": "~0.2.0" - } - }, - "underscore": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.2.1.tgz", - "integrity": "sha1-/FxrB2VnPZKi1KyLTcCqiHAuK9Q=" - } - } - }, "node-uuid": { "version": "1.4.8", "resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.8.tgz", @@ -3442,11 +3253,6 @@ } } }, - "oauth-sign": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.2.0.tgz", - "integrity": "sha1-oOahcV2u0GLzIrYit/5a/RA1tuI=" - }, "object-inspect": { "version": "1.7.0", "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.7.0.tgz", @@ -4461,29 +4267,6 @@ "read-pkg": "^2.0.0" } }, - "readable-stream": { - "version": "1.0.34", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz", - "integrity": "sha1-Elgg40vIQtLyqq+v5MKRbuMsFXw=", - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.1", - "isarray": "0.0.1", - "string_decoder": "~0.10.x" - }, - "dependencies": { - "isarray": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", - "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" - } - } - }, - "recluster": { - "version": "0.3.7", - "resolved": "https://registry.npmjs.org/recluster/-/recluster-0.3.7.tgz", - "integrity": "sha1-aKRx3ZC2obl3ZjTPdpZAWutWeJU=" - }, "regexpp": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-2.0.1.tgz", @@ -4663,24 +4446,6 @@ "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", "dev": true }, - "response": { - "version": "0.14.0", - "resolved": "https://registry.npmjs.org/response/-/response-0.14.0.tgz", - "integrity": "sha1-BmNS/z5rAm0EdYCUB2Y7Rob9JpY=", - "requires": { - "best-encoding": "^0.1.1", - "bl": "~0.7.0", - "caseless": "^0.3.0", - "mime": "~1.2.11" - }, - "dependencies": { - "mime": { - "version": "1.2.11", - "resolved": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz", - "integrity": "sha1-WCA+7Ybjpe8XrtK32evUfwpg3RA=" - } - } - }, "restore-cursor": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz", @@ -4895,14 +4660,6 @@ } } }, - "sntp": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/sntp/-/sntp-0.1.4.tgz", - "integrity": "sha1-XvSBuVGnspr/30r9fyaDj8ESD4Q=", - "requires": { - "hoek": "0.7.x" - } - }, "source-map": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", @@ -4990,49 +4747,11 @@ "resolved": "https://registry.npmjs.org/stealthy-require/-/stealthy-require-1.1.1.tgz", "integrity": "sha1-NbCYdbT/SfJqd35QmzCQoyJr8ks=" }, - "stream-browserify": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/stream-browserify/-/stream-browserify-2.0.1.tgz", - "integrity": "sha1-ZiZu5fm9uZQKTkUUyvtDu3Hlyds=", - "requires": { - "inherits": "~2.0.1", - "readable-stream": "^2.0.2" - }, - "dependencies": { - "readable-stream": { - "version": "2.3.6", - "resolved": "http://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", - "integrity": "sha1-sRwn2IuP8fvgcGQ8+UsMea4bCq8=", - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha1-nPFhG6YmhdcDCunkujQUnDrwP8g=", - "requires": { - "safe-buffer": "~5.1.0" - } - } - } - }, "stream-buffers": { "version": "0.2.6", "resolved": "https://registry.npmjs.org/stream-buffers/-/stream-buffers-0.2.6.tgz", "integrity": "sha1-GBwI1bs2kARfaUAbmuanoM8zE/w=" }, - "stream-counter": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/stream-counter/-/stream-counter-1.0.0.tgz", - "integrity": "sha1-kc8lac5NxQYf6816yyY5SloRR1E=" - }, "stream-meter": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/stream-meter/-/stream-meter-1.0.4.tgz", @@ -5117,11 +4836,6 @@ "function-bind": "^1.1.1" } }, - "string_decoder": { - "version": "0.10.31", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", - "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=" - }, "strip-ansi": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", @@ -5321,11 +5035,6 @@ "integrity": "sha512-qOebF53frne81cf0S9B41ByenJ3/IuH8yJKngAX35CmiZySA0khhkovshKK+jGCaMnVomla7gVlIcc3EvKPbTQ==", "dev": true }, - "tunnel-agent": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.2.0.tgz", - "integrity": "sha1-aFPCr7GyEJ5FYp5JK9419Fnqaeg=" - }, "tweetnacl": { "version": "0.14.5", "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", diff --git a/services/filestore/package.json b/services/filestore/package.json index 2e9cef8aa0..6f1dde0e8a 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -28,22 +28,16 @@ "fs-extra": "^1.0.0", "glob": "^7.1.6", "heapdump": "^0.3.2", - "knox": "~0.9.1", "logger-sharelatex": "^1.7.0", "metrics-sharelatex": "^2.2.0", - "minipass": "^3.1.1", "mocha": "5.2.0", - "node-transloadit": "0.0.4", "node-uuid": "~1.4.1", "pngcrush": "0.0.3", "range-parser": "^1.0.2", - "recluster": "^0.3.7", "request": "^2.88.0", "request-promise-native": "^1.0.8", - "response": "0.14.0", "rimraf": "2.2.8", "settings-sharelatex": "^1.1.0", - "stream-browserify": "^2.0.1", "stream-buffers": "~0.2.5", "stream-meter": "^1.0.4", "underscore": "~1.5.2" From f4a16cd97230127ca5decf82fdd6763403b74f74 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 3 Feb 2020 16:10:10 +0000 Subject: [PATCH 411/555] Update tests to properly check for copied files --- .../test/acceptance/js/FilestoreTests.js | 74 +++++++++++++------ 1 file changed, 50 insertions(+), 24 deletions(-) diff --git a/services/filestore/test/acceptance/js/FilestoreTests.js b/services/filestore/test/acceptance/js/FilestoreTests.js index 1d927618e5..54e9d457ce 100644 --- a/services/filestore/test/acceptance/js/FilestoreTests.js +++ b/services/filestore/test/acceptance/js/FilestoreTests.js @@ -543,7 +543,7 @@ describe('Filestore', function() { }) describe('when copying a file', function() { - let newFileId, newFileUrl, newFileKey + let newFileId, newFileUrl, newFileKey, opts beforeEach(async function() { const newProjectID = `acceptance_tests_copied_project_${Math.random()}` @@ -551,7 +551,7 @@ describe('Filestore', function() { newFileUrl = `${filestoreUrl}/project/${newProjectID}/file/${directoryName}%2F${newFileId}` newFileKey = `${newProjectID}/${directoryName}/${newFileId}` - const opts = { + opts = { method: 'put', uri: newFileUrl, json: { @@ -561,31 +561,31 @@ describe('Filestore', function() { } } } - - const response = await rp(opts) - expect(response.statusCode).to.equal(200) - }) - - it('should leave the old file in the old bucket', async function() { - await expectPersistorToHaveFile( - app.persistor.fallbackPersistor, - fallbackBucket, - fileKey, - constantFileContent - ) - }) - - it('should not create a new file in the old bucket', async function() { - await expectPersistorNotToHaveFile( - app.persistor.fallbackPersistor, - fallbackBucket, - newFileKey - ) }) describe('when copyOnMiss is false', function() { - beforeEach(function() { + beforeEach(async function() { Settings.filestore.fallback.copyOnMiss = false + + const response = await rp(opts) + expect(response.statusCode).to.equal(200) + }) + + it('should leave the old file in the old bucket', async function() { + await expectPersistorToHaveFile( + app.persistor.fallbackPersistor, + fallbackBucket, + fileKey, + constantFileContent + ) + }) + + it('should not create a new file in the old bucket', async function() { + await expectPersistorNotToHaveFile( + app.persistor.fallbackPersistor, + fallbackBucket, + newFileKey + ) }) it('should create a new file in the new bucket', async function() { @@ -598,6 +598,9 @@ describe('Filestore', function() { }) it('should not copy the old file to the primary with the old key', async function() { + // wait for the file to copy in the background + await promisify(setTimeout)(1000) + await expectPersistorNotToHaveFile( app.persistor.primaryPersistor, bucket, @@ -607,8 +610,28 @@ describe('Filestore', function() { }) describe('when copyOnMiss is true', function() { - beforeEach(function() { + beforeEach(async function() { Settings.filestore.fallback.copyOnMiss = true + + const response = await rp(opts) + expect(response.statusCode).to.equal(200) + }) + + it('should leave the old file in the old bucket', async function() { + await expectPersistorToHaveFile( + app.persistor.fallbackPersistor, + fallbackBucket, + fileKey, + constantFileContent + ) + }) + + it('should not create a new file in the old bucket', async function() { + await expectPersistorNotToHaveFile( + app.persistor.fallbackPersistor, + fallbackBucket, + newFileKey + ) }) it('should create a new file in the new bucket', async function() { @@ -621,6 +644,9 @@ describe('Filestore', function() { }) it('should copy the old file to the primary with the old key', async function() { + // wait for the file to copy in the background + await promisify(setTimeout)(1000) + await expectPersistorToHaveFile( app.persistor.primaryPersistor, bucket, From 6dcf35137786f8191158756553685d5c030168d8 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 3 Feb 2020 16:11:48 +0000 Subject: [PATCH 412/555] Remove unnecessary 'async' --- services/filestore/test/acceptance/js/FilestoreTests.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/test/acceptance/js/FilestoreTests.js b/services/filestore/test/acceptance/js/FilestoreTests.js index 54e9d457ce..e8aac829a6 100644 --- a/services/filestore/test/acceptance/js/FilestoreTests.js +++ b/services/filestore/test/acceptance/js/FilestoreTests.js @@ -545,7 +545,7 @@ describe('Filestore', function() { describe('when copying a file', function() { let newFileId, newFileUrl, newFileKey, opts - beforeEach(async function() { + beforeEach(function() { const newProjectID = `acceptance_tests_copied_project_${Math.random()}` newFileId = Math.random() newFileUrl = `${filestoreUrl}/project/${newProjectID}/file/${directoryName}%2F${newFileId}` From 304fdfd35cdd63023454c7ec4b33086d0dee953c Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Tue, 11 Feb 2020 16:39:03 +0000 Subject: [PATCH 413/555] Explicitly resume stream after adding listener --- services/filestore/test/acceptance/js/FilestoreTests.js | 1 + 1 file changed, 1 insertion(+) diff --git a/services/filestore/test/acceptance/js/FilestoreTests.js b/services/filestore/test/acceptance/js/FilestoreTests.js index e8aac829a6..fd1baed474 100644 --- a/services/filestore/test/acceptance/js/FilestoreTests.js +++ b/services/filestore/test/acceptance/js/FilestoreTests.js @@ -36,6 +36,7 @@ function streamToString(stream) { stream.on('data', chunk => chunks.push(chunk)) stream.on('error', reject) stream.on('end', () => resolve(Buffer.concat(chunks).toString('utf8'))) + stream.resume() }) } From 93cd55fb79f4b595b3ea948444080688fe439fa3 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Wed, 29 Jan 2020 12:23:31 +0000 Subject: [PATCH 414/555] Refactor persistors to use a helper for common things --- services/filestore/app/js/FSPersistor.js | 31 ++-- services/filestore/app/js/PersistorHelper.js | 114 ++++++++++++++ services/filestore/app/js/S3Persistor.js | 141 +++++++----------- .../test/unit/js/FSPersistorTests.js | 5 +- .../test/unit/js/S3PersistorTests.js | 38 ++--- 5 files changed, 203 insertions(+), 126 deletions(-) create mode 100644 services/filestore/app/js/PersistorHelper.js diff --git a/services/filestore/app/js/FSPersistor.js b/services/filestore/app/js/FSPersistor.js index 3f54e2d091..a5b1a35c8c 100644 --- a/services/filestore/app/js/FSPersistor.js +++ b/services/filestore/app/js/FSPersistor.js @@ -8,6 +8,7 @@ const { promisify, callbackify } = require('util') const LocalFileWriter = require('./LocalFileWriter').promises const { NotFoundError, ReadError, WriteError } = require('./Errors') +const PersistorHelper = require('./PersistorHelper') const pipeline = promisify(Stream.pipeline) const fsUnlink = promisify(fs.unlink) @@ -28,7 +29,7 @@ async function sendFile(location, target, source) { const targetStream = fs.createWriteStream(`${location}/${filteredTarget}`) await pipeline(sourceStream, targetStream) } catch (err) { - throw _wrapError( + throw PersistorHelper.wrapError( err, 'failed to copy the specified file', { location, target, source }, @@ -65,7 +66,7 @@ async function getFileStream(location, name, opts) { try { opts.fd = await fsOpen(`${location}/${filteredName}`, 'r') } catch (err) { - throw _wrapError( + throw PersistorHelper.wrapError( err, 'failed to open file for streaming', { location, filteredName, opts }, @@ -83,7 +84,7 @@ async function getFileSize(location, filename) { const stat = await fsStat(fullPath) return stat.size } catch (err) { - throw _wrapError( + throw PersistorHelper.wrapError( err, 'failed to stat file', { location, filename }, @@ -126,7 +127,7 @@ async function copyFile(location, fromName, toName) { const targetStream = fs.createWriteStream(`${location}/${filteredToName}`) await pipeline(sourceStream, targetStream) } catch (err) { - throw _wrapError( + throw PersistorHelper.wrapError( err, 'failed to copy file', { location, filteredFromName, filteredToName }, @@ -140,7 +141,7 @@ async function deleteFile(location, name) { try { await fsUnlink(`${location}/${filteredName}`) } catch (err) { - const wrappedError = _wrapError( + const wrappedError = PersistorHelper.wrapError( err, 'failed to delete file', { location, filteredName }, @@ -161,7 +162,7 @@ async function deleteDirectory(location, name) { try { await rmrf(`${location}/${filteredName}`) } catch (err) { - throw _wrapError( + throw PersistorHelper.wrapError( err, 'failed to delete directory', { location, filteredName }, @@ -179,7 +180,7 @@ async function checkIfFileExists(location, name) { if (err.code === 'ENOENT') { return false } - throw _wrapError( + throw PersistorHelper.wrapError( err, 'failed to stat file', { location, filteredName }, @@ -209,7 +210,7 @@ async function directorySize(location, name) { } } } catch (err) { - throw _wrapError( + throw PersistorHelper.wrapError( err, 'failed to get directory size', { location, name }, @@ -220,20 +221,6 @@ async function directorySize(location, name) { return size } -function _wrapError(error, message, params, ErrorType) { - if (error.code === 'ENOENT') { - return new NotFoundError({ - message: 'no such file or directory', - info: params - }).withCause(error) - } else { - return new ErrorType({ - message: message, - info: params - }).withCause(error) - } -} - module.exports = { sendFile: callbackify(sendFile), sendStream: callbackify(sendStream), diff --git a/services/filestore/app/js/PersistorHelper.js b/services/filestore/app/js/PersistorHelper.js new file mode 100644 index 0000000000..d8beb4a0a9 --- /dev/null +++ b/services/filestore/app/js/PersistorHelper.js @@ -0,0 +1,114 @@ +const crypto = require('crypto') +const meter = require('stream-meter') +const Stream = require('stream') +const logger = require('logger-sharelatex') +const { WriteError, ReadError, NotFoundError } = require('./Errors') +const { promisify } = require('util') + +const pipeline = promisify(Stream.pipeline) + +module.exports = { + calculateStreamMd5, + verifyMd5, + getMeteredStream, + waitForStreamReady, + wrapError +} + +// returns a promise which resolves with the md5 hash of the stream +function calculateStreamMd5(stream) { + const hash = crypto.createHash('md5') + hash.setEncoding('hex') + + return new Promise((resolve, reject) => { + pipeline(stream, hash) + .then(() => { + hash.end() + resolve(hash.read()) + }) + .catch(err => { + reject(err) + }) + }) +} + +// verifies the md5 hash of a file against the supplied md5 or the one stored in +// storage if not supplied - deletes the new file if the md5 does not match and +// throws an error +async function verifyMd5(persistor, bucket, key, sourceMd5, destMd5 = null) { + if (!destMd5) { + destMd5 = await persistor.promises.getFileMd5Hash(bucket, key) + } + + if (sourceMd5 !== destMd5) { + try { + await persistor.promises.deleteFile(bucket, key) + } catch (err) { + logger.warn(err, 'error deleting file for invalid upload') + } + + throw new WriteError({ + message: 'source and destination hashes do not match', + info: { + sourceMd5, + destMd5, + bucket, + key + } + }) + } +} + +// returns the next stream in the pipeline, and calls the callback with the byte count +// when the stream finishes or receives an error +function getMeteredStream(stream, callback) { + const meteredStream = meter() + + pipeline(stream, meteredStream) + .then(() => { + callback(null, meteredStream.bytes) + }) + .catch(err => { + // on error, just send how many bytes we received before the stream stopped + callback(err, meteredStream.bytes) + }) + + return meteredStream +} + +// resolves when a stream is 'readable', or rejects if the stream throws an error +// before that happens - this lets us handle protocol-level errors before trying +// to read them +function waitForStreamReady(stream) { + return new Promise((resolve, reject) => { + const onError = function(err) { + reject(wrapError(err, 'error before stream became ready', {}, ReadError)) + } + const onStreamReady = function() { + stream.removeListener('readable', onStreamReady) + stream.removeListener('error', onError) + resolve(stream) + } + stream.on('readable', onStreamReady) + stream.on('error', onError) + }) +} + +function wrapError(error, message, params, ErrorType) { + if ( + error instanceof NotFoundError || + ['NoSuchKey', 'NotFound', 404, 'AccessDenied', 'ENOENT'].includes( + error.code + ) + ) { + return new NotFoundError({ + message: 'no such file', + info: params + }).withCause(error) + } else { + return new ErrorType({ + message: message, + info: params + }).withCause(error) + } +} diff --git a/services/filestore/app/js/S3Persistor.js b/services/filestore/app/js/S3Persistor.js index a10251a642..196d2aecda 100644 --- a/services/filestore/app/js/S3Persistor.js +++ b/services/filestore/app/js/S3Persistor.js @@ -5,11 +5,11 @@ https.globalAgent.maxSockets = 300 const settings = require('settings-sharelatex') const metrics = require('metrics-sharelatex') -const logger = require('logger-sharelatex') + +const PersistorHelper = require('./PersistorHelper') const meter = require('stream-meter') const Stream = require('stream') -const crypto = require('crypto') const fs = require('fs') const S3 = require('aws-sdk/clients/s3') const { URL } = require('url') @@ -21,7 +21,7 @@ const { SettingsError } = require('./Errors') -module.exports = { +const S3Persistor = { sendFile: callbackify(sendFile), sendStream: callbackify(sendStream), getFileStream: callbackify(getFileStream), @@ -46,6 +46,8 @@ module.exports = { } } +module.exports = S3Persistor + const pipeline = promisify(Stream.pipeline) function hexToBase64(hex) { @@ -57,7 +59,7 @@ async function sendFile(bucketName, key, fsPath) { try { readStream = fs.createReadStream(fsPath) } catch (err) { - throw _wrapError( + throw PersistorHelper.wrapError( err, 'error reading file from disk', { bucketName, key, fsPath }, @@ -76,27 +78,14 @@ async function sendStream(bucketName, key, readStream, sourceMd5) { if (sourceMd5) { b64Hash = hexToBase64(sourceMd5) } else { - const hash = crypto.createHash('md5') - hash.setEncoding('hex') - pipeline(readStream, hash) - hashPromise = new Promise((resolve, reject) => { - readStream.on('end', () => { - hash.end() - resolve(hash.read()) - }) - readStream.on('error', err => { - reject(err) - }) - }) + hashPromise = PersistorHelper.calculateStreamMd5(readStream) } - const meteredStream = meter() - meteredStream.on('finish', () => { - metrics.count('s3.egress', meteredStream.bytes) + const meteredStream = PersistorHelper.getMeteredStream(readStream, (_, byteCount) => { + // ignore the error parameter and just log the byte count + metrics.count('s3.egress', byteCount) }) - pipeline(readStream, meteredStream) - // if we have an md5 hash, pass this to S3 to verify the upload const uploadOptions = { Bucket: bucketName, @@ -112,30 +101,21 @@ async function sendStream(bucketName, key, readStream, sourceMd5) { .promise() const destMd5 = _md5FromResponse(response) - // if we didn't have an md5 hash, compare our computed one with S3's + // if we didn't have an md5 hash, we should compare our computed one with S3's + // as we couldn't tell S3 about it beforehand if (hashPromise) { sourceMd5 = await hashPromise - - if (sourceMd5 !== destMd5) { - try { - await deleteFile(bucketName, key) - } catch (err) { - logger.warn(err, 'error deleting file for invalid upload') - } - - throw new WriteError({ - message: 'source and destination hashes do not match', - info: { - sourceMd5, - destMd5, - bucketName, - key - } - }) - } + // throws on mismatch + await PersistorHelper.verifyMd5( + S3Persistor, + bucketName, + key, + sourceMd5, + destMd5 + ) } } catch (err) { - throw _wrapError( + throw PersistorHelper.wrapError( err, 'upload to S3 failed', { bucketName, key }, @@ -155,25 +135,29 @@ async function getFileStream(bucketName, key, opts) { params.Range = `bytes=${opts.start}-${opts.end}` } - return new Promise((resolve, reject) => { - const stream = _getClientForBucket(bucketName) - .getObject(params) - .createReadStream() + const stream = _getClientForBucket(bucketName) + .getObject(params) + .createReadStream() - const meteredStream = meter() - meteredStream.on('finish', () => { - metrics.count('s3.ingress', meteredStream.bytes) - }) - - const onStreamReady = function() { - stream.removeListener('readable', onStreamReady) - resolve(stream.pipe(meteredStream)) + const meteredStream = PersistorHelper.getMeteredStream( + stream, + (_, byteCount) => { + // ignore the error parameter and just log the byte count + metrics.count('s3.ingress', byteCount) } - stream.on('readable', onStreamReady) - stream.on('error', err => { - reject(_wrapError(err, 'error reading from S3', params, ReadError)) - }) - }) + ) + + try { + await PersistorHelper.waitForStreamReady(stream) + return meteredStream + } catch (err) { + throw PersistorHelper.wrapError( + err, + 'error reading file from S3', + { bucketName, key, opts }, + ReadError + ) + } } async function deleteDirectory(bucketName, key) { @@ -184,7 +168,7 @@ async function deleteDirectory(bucketName, key) { .listObjects({ Bucket: bucketName, Prefix: key }) .promise() } catch (err) { - throw _wrapError( + throw PersistorHelper.wrapError( err, 'failed to list objects in S3', { bucketName, key }, @@ -205,7 +189,7 @@ async function deleteDirectory(bucketName, key) { }) .promise() } catch (err) { - throw _wrapError( + throw PersistorHelper.wrapError( err, 'failed to delete objects in S3', { bucketName, key }, @@ -222,7 +206,7 @@ async function getFileSize(bucketName, key) { .promise() return response.ContentLength } catch (err) { - throw _wrapError( + throw PersistorHelper.wrapError( err, 'error getting size of s3 object', { bucketName, key }, @@ -239,7 +223,7 @@ async function getFileMd5Hash(bucketName, key) { const md5 = _md5FromResponse(response) return md5 } catch (err) { - throw _wrapError( + throw PersistorHelper.wrapError( err, 'error getting hash of s3 object', { bucketName, key }, @@ -255,7 +239,7 @@ async function deleteFile(bucketName, key) { .promise() } catch (err) { // s3 does not give us a NotFoundError here - throw _wrapError( + throw PersistorHelper.wrapError( err, 'failed to delete file in S3', { bucketName, key }, @@ -275,7 +259,12 @@ async function copyFile(bucketName, sourceKey, destKey) { .copyObject(params) .promise() } catch (err) { - throw _wrapError(err, 'failed to copy file in S3', params, WriteError) + throw PersistorHelper.wrapError( + err, + 'failed to copy file in S3', + params, + WriteError + ) } } @@ -287,7 +276,7 @@ async function checkIfFileExists(bucketName, key) { if (err instanceof NotFoundError) { return false } - throw _wrapError( + throw PersistorHelper.wrapError( err, 'error checking whether S3 object exists', { bucketName, key }, @@ -304,7 +293,7 @@ async function directorySize(bucketName, key) { return response.Contents.reduce((acc, item) => item.Size + acc, 0) } catch (err) { - throw _wrapError( + throw PersistorHelper.wrapError( err, 'error getting directory size in S3', { bucketName, key }, @@ -313,26 +302,6 @@ async function directorySize(bucketName, key) { } } -function _wrapError(error, message, params, ErrorType) { - // the AWS client can return one of 'NoSuchKey', 'NotFound' or 404 (integer) - // when something is not found, depending on the endpoint - if ( - ['NoSuchKey', 'NotFound', 404, 'AccessDenied', 'ENOENT'].includes( - error.code - ) - ) { - return new NotFoundError({ - message: 'no such file', - info: params - }).withCause(error) - } else { - return new ErrorType({ - message: message, - info: params - }).withCause(error) - } -} - const _clients = new Map() let _defaultClient diff --git a/services/filestore/test/unit/js/FSPersistorTests.js b/services/filestore/test/unit/js/FSPersistorTests.js index 1be8eea3e2..0a09869bc0 100644 --- a/services/filestore/test/unit/js/FSPersistorTests.js +++ b/services/filestore/test/unit/js/FSPersistorTests.js @@ -70,7 +70,10 @@ describe('FSPersistorTests', function() { glob, rimraf, stream, - crypto + crypto, + // imported by PersistorHelper but otherwise unused here + 'stream-meter': {}, + 'logger-sharelatex': {} }, globals: { console } }) diff --git a/services/filestore/test/unit/js/S3PersistorTests.js b/services/filestore/test/unit/js/S3PersistorTests.js index b9711572c2..9686deed5f 100644 --- a/services/filestore/test/unit/js/S3PersistorTests.js +++ b/services/filestore/test/unit/js/S3PersistorTests.js @@ -89,6 +89,7 @@ describe('S3PersistorTests', function() { } MeteredStream = { + type: 'metered', on: sinon.stub(), bytes: objectSize } @@ -103,7 +104,7 @@ describe('S3PersistorTests', function() { S3ReadStream = { on: sinon.stub(), - pipe: sinon.stub().returns('s3Stream'), + pipe: sinon.stub(), removeListener: sinon.stub() } S3ReadStream.on.withArgs('readable').yields() @@ -168,8 +169,8 @@ describe('S3PersistorTests', function() { stream = await S3Persistor.promises.getFileStream(bucket, key) }) - it('returns a stream', function() { - expect(stream).to.equal('s3Stream') + it('returns a metered stream', function() { + expect(stream).to.equal(MeteredStream) }) it('sets the AWS client up with credentials from settings', function() { @@ -184,7 +185,10 @@ describe('S3PersistorTests', function() { }) it('pipes the stream through the meter', function() { - expect(S3ReadStream.pipe).to.have.been.calledWith(MeteredStream) + expect(Stream.pipeline).to.have.been.calledWith( + S3ReadStream, + MeteredStream + ) }) it('records an ingress metric', function() { @@ -202,8 +206,8 @@ describe('S3PersistorTests', function() { }) }) - it('returns a stream', function() { - expect(stream).to.equal('s3Stream') + it('returns a metered stream', function() { + expect(stream).to.equal(MeteredStream) }) it('passes the byte range on to S3', function() { @@ -236,8 +240,8 @@ describe('S3PersistorTests', function() { stream = await S3Persistor.promises.getFileStream(bucket, key) }) - it('returns a stream', function() { - expect(stream).to.equal('s3Stream') + it('returns a metered stream', function() { + expect(stream).to.equal(MeteredStream) }) it('sets the AWS client up with the alternative credentials', function() { @@ -305,12 +309,12 @@ describe('S3PersistorTests', function() { expect(error).to.be.an.instanceOf(Errors.NotFoundError) }) - it('wraps the error from S3', function() { - expect(error.cause).to.equal(S3NotFoundError) + it('wraps the error', function() { + expect(error.cause).to.exist }) it('stores the bucket and key in the error', function() { - expect(error.info).to.deep.equal({ Bucket: bucket, Key: key }) + expect(error.info).to.include({ bucketName: bucket, key: key }) }) }) @@ -335,12 +339,12 @@ describe('S3PersistorTests', function() { expect(error).to.be.an.instanceOf(Errors.NotFoundError) }) - it('wraps the error from S3', function() { - expect(error.cause).to.equal(S3AccessDeniedError) + it('wraps the error', function() { + expect(error.cause).to.exist }) it('stores the bucket and key in the error', function() { - expect(error.info).to.deep.equal({ Bucket: bucket, Key: key }) + expect(error.info).to.include({ bucketName: bucket, key: key }) }) }) @@ -365,12 +369,12 @@ describe('S3PersistorTests', function() { expect(error).to.be.an.instanceOf(Errors.ReadError) }) - it('wraps the error from S3', function() { - expect(error.cause).to.equal(genericError) + it('wraps the error', function() { + expect(error.cause).to.exist }) it('stores the bucket and key in the error', function() { - expect(error.info).to.deep.equal({ Bucket: bucket, Key: key }) + expect(error.info).to.include({ bucketName: bucket, key: key }) }) }) }) From 49ad408b30d1497da456439a8ae3b5fca996b909 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Wed, 12 Feb 2020 10:32:26 +0000 Subject: [PATCH 415/555] Remove unused imports and format correctly --- services/filestore/app/js/S3Persistor.js | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/services/filestore/app/js/S3Persistor.js b/services/filestore/app/js/S3Persistor.js index 196d2aecda..891d7be68e 100644 --- a/services/filestore/app/js/S3Persistor.js +++ b/services/filestore/app/js/S3Persistor.js @@ -8,12 +8,10 @@ const metrics = require('metrics-sharelatex') const PersistorHelper = require('./PersistorHelper') -const meter = require('stream-meter') -const Stream = require('stream') const fs = require('fs') const S3 = require('aws-sdk/clients/s3') const { URL } = require('url') -const { callbackify, promisify } = require('util') +const { callbackify } = require('util') const { WriteError, ReadError, @@ -48,8 +46,6 @@ const S3Persistor = { module.exports = S3Persistor -const pipeline = promisify(Stream.pipeline) - function hexToBase64(hex) { return Buffer.from(hex, 'hex').toString('base64') } @@ -81,10 +77,13 @@ async function sendStream(bucketName, key, readStream, sourceMd5) { hashPromise = PersistorHelper.calculateStreamMd5(readStream) } - const meteredStream = PersistorHelper.getMeteredStream(readStream, (_, byteCount) => { - // ignore the error parameter and just log the byte count - metrics.count('s3.egress', byteCount) - }) + const meteredStream = PersistorHelper.getMeteredStream( + readStream, + (_, byteCount) => { + // ignore the error parameter and just log the byte count + metrics.count('s3.egress', byteCount) + } + ) // if we have an md5 hash, pass this to S3 to verify the upload const uploadOptions = { From 3b011258d207102620d0388d8f0098c77062648c Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Wed, 12 Feb 2020 12:02:06 +0000 Subject: [PATCH 416/555] Tidy up md5 hash generation --- services/filestore/app/js/FSPersistor.js | 19 +++++-------------- services/filestore/app/js/PersistorHelper.js | 11 +---------- 2 files changed, 6 insertions(+), 24 deletions(-) diff --git a/services/filestore/app/js/FSPersistor.js b/services/filestore/app/js/FSPersistor.js index a5b1a35c8c..973c670efd 100644 --- a/services/filestore/app/js/FSPersistor.js +++ b/services/filestore/app/js/FSPersistor.js @@ -1,7 +1,6 @@ const fs = require('fs') const glob = require('glob') const path = require('path') -const crypto = require('crypto') const rimraf = require('rimraf') const Stream = require('stream') const { promisify, callbackify } = require('util') @@ -105,19 +104,6 @@ async function getFileMd5Hash(location, filename) { } } -async function _getFileMd5HashForPath(fullPath) { - return new Promise((resolve, reject) => { - const readStream = fs.createReadStream(fullPath) - const hash = crypto.createHash('md5') - hash.setEncoding('hex') - readStream.on('end', () => { - hash.end() - resolve(hash.read()) - }) - pipeline(readStream, hash).catch(reject) - }) -} - async function copyFile(location, fromName, toName) { const filteredFromName = filterName(fromName) const filteredToName = filterName(toName) @@ -245,3 +231,8 @@ module.exports = { directorySize } } + +async function _getFileMd5HashForPath(fullPath) { + const stream = fs.createReadStream(fullPath) + return PersistorHelper.calculateStreamMd5(stream) +} diff --git a/services/filestore/app/js/PersistorHelper.js b/services/filestore/app/js/PersistorHelper.js index d8beb4a0a9..ea8132a9c9 100644 --- a/services/filestore/app/js/PersistorHelper.js +++ b/services/filestore/app/js/PersistorHelper.js @@ -20,16 +20,7 @@ function calculateStreamMd5(stream) { const hash = crypto.createHash('md5') hash.setEncoding('hex') - return new Promise((resolve, reject) => { - pipeline(stream, hash) - .then(() => { - hash.end() - resolve(hash.read()) - }) - .catch(err => { - reject(err) - }) - }) + return pipeline(stream, hash).then(() => hash.read()) } // verifies the md5 hash of a file against the supplied md5 or the one stored in From 84a80930eb30dc02c2103a1dc6117c9eb2807f14 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Wed, 12 Feb 2020 14:39:54 +0100 Subject: [PATCH 417/555] [misc] rename npm-shrinkwrap.json to package-lock.json and run npm i --- services/filestore/{npm-shrinkwrap.json => package-lock.json} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename services/filestore/{npm-shrinkwrap.json => package-lock.json} (100%) diff --git a/services/filestore/npm-shrinkwrap.json b/services/filestore/package-lock.json similarity index 100% rename from services/filestore/npm-shrinkwrap.json rename to services/filestore/package-lock.json From 31f2572e579844440821494b4318824d7e5f9969 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 14 Feb 2020 09:25:14 +0000 Subject: [PATCH 418/555] Bump lodash from 4.17.11 to 4.17.15 Bumps [lodash](https://github.com/lodash/lodash) from 4.17.11 to 4.17.15. - [Release notes](https://github.com/lodash/lodash/releases) - [Commits](https://github.com/lodash/lodash/compare/4.17.11...4.17.15) Signed-off-by: dependabot[bot] --- services/filestore/package-lock.json | 70 ++-------------------------- 1 file changed, 5 insertions(+), 65 deletions(-) diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index bdc836e237..64902d42af 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -25,12 +25,6 @@ "source-map": "^0.5.0" }, "dependencies": { - "lodash": { - "version": "4.17.15", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", - "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==", - "dev": true - }, "source-map": { "version": "0.5.7", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", @@ -128,12 +122,6 @@ "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", "dev": true }, - "lodash": { - "version": "4.17.15", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", - "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==", - "dev": true - }, "ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", @@ -151,14 +139,6 @@ "esutils": "^2.0.2", "lodash": "^4.17.13", "to-fast-properties": "^2.0.0" - }, - "dependencies": { - "lodash": { - "version": "4.17.15", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", - "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==", - "dev": true - } } }, "@google-cloud/common": { @@ -1272,7 +1252,7 @@ "dependencies": { "readable-stream": { "version": "2.3.6", - "resolved": "http://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", "integrity": "sha1-sRwn2IuP8fvgcGQ8+UsMea4bCq8=", "requires": { "core-util-is": "~1.0.0", @@ -1458,12 +1438,6 @@ "ms": "^2.1.1" } }, - "lodash": { - "version": "4.17.15", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", - "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==", - "dev": true - }, "ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", @@ -2376,14 +2350,6 @@ "string-width": "^2.1.0", "strip-ansi": "^5.1.0", "through": "^2.3.6" - }, - "dependencies": { - "lodash": { - "version": "4.17.15", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", - "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==", - "dev": true - } } }, "ipaddr.js": { @@ -2639,10 +2605,9 @@ } }, "lodash": { - "version": "4.17.11", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.11.tgz", - "integrity": "sha1-s56mIp72B+zYniyN8SU2iRysm40=", - "dev": true + "version": "4.17.15", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", + "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==" }, "lodash.get": { "version": "4.4.2", @@ -3673,12 +3638,6 @@ "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", "dev": true }, - "lodash": { - "version": "4.17.15", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", - "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==", - "dev": true - }, "strip-ansi": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", @@ -3972,12 +3931,6 @@ "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", "dev": true }, - "lodash": { - "version": "4.17.15", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", - "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==", - "dev": true - }, "strip-ansi": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", @@ -4364,13 +4317,6 @@ "integrity": "sha512-QIs2+ArIGQVp5ZYbWD5ZLCY29D5CfWizP8eWnm8FoGD1TX61veauETVQbrV60662V0oFBkrDOuaBI8XgtuyYAQ==", "requires": { "lodash": "^4.17.15" - }, - "dependencies": { - "lodash": { - "version": "4.17.15", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", - "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==" - } } }, "request-promise-native": { @@ -4910,12 +4856,6 @@ "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", "dev": true }, - "lodash": { - "version": "4.17.15", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", - "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==", - "dev": true - }, "string-width": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", @@ -4976,7 +4916,7 @@ "dependencies": { "readable-stream": { "version": "2.3.6", - "resolved": "http://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", "integrity": "sha1-sRwn2IuP8fvgcGQ8+UsMea4bCq8=", "requires": { "core-util-is": "~1.0.0", From e4926e09a7cea0e667412287c3e56940c7813731 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Fri, 14 Feb 2020 10:50:52 +0000 Subject: [PATCH 419/555] Move error handler to be the last middleware --- services/filestore/app.js | 4 +++- services/filestore/app/js/RequestLogger.js | 7 +------ 2 files changed, 4 insertions(+), 7 deletions(-) diff --git a/services/filestore/app.js b/services/filestore/app.js index 84182c3dcf..e48e8ae3c8 100644 --- a/services/filestore/app.js +++ b/services/filestore/app.js @@ -16,7 +16,7 @@ const RequestLogger = require('./app/js/RequestLogger') const app = express() -RequestLogger.attach(app) +app.use(RequestLogger.middleware) if (settings.sentry && settings.sentry.dsn) { logger.initializeErrorReporting(settings.sentry.dsn) @@ -140,6 +140,8 @@ app.get('/status', function(req, res) { app.get('/health_check', healthCheckController.check) +app.use(RequestLogger.errorHandler) + const port = settings.internal.filestore.port || 3009 const host = '0.0.0.0' diff --git a/services/filestore/app/js/RequestLogger.js b/services/filestore/app/js/RequestLogger.js index fd72d85d17..fa1fc14426 100644 --- a/services/filestore/app/js/RequestLogger.js +++ b/services/filestore/app/js/RequestLogger.js @@ -15,13 +15,8 @@ class RequestLogger { this._logMessage = message } - static attach(app) { - app.use(RequestLogger.middleware) - app.use(RequestLogger.errorHandler) - } - static errorHandler(err, req, res, next) { - req.requestLogger._logInfo.error = err + req.requestLogger.addFields({ error: err }) res .send(err.message) .status(500) From df2ddbe0e12b906abc26dce35ae03081e66f08da Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Fri, 14 Feb 2020 14:26:33 +0000 Subject: [PATCH 420/555] Revert "Add Migration Persistor, to send missing file requests to a fallback persistor" --- services/filestore/.eslintrc | 3 +- .../{FSPersistor.js => FSPersistorManager.js} | 68 +-- .../filestore/app/js/MigrationPersistor.js | 228 -------- services/filestore/app/js/PersistorHelper.js | 105 ---- services/filestore/app/js/PersistorManager.js | 35 +- .../{S3Persistor.js => S3PersistorManager.js} | 179 ++---- .../filestore/config/settings.defaults.coffee | 61 +- services/filestore/package-lock.json | 282 +++++++++- services/filestore/package.json | 8 +- .../test/acceptance/js/FilestoreApp.js | 1 - .../test/acceptance/js/FilestoreTests.js | 490 +---------------- ...torTests.js => FSPersistorManagerTests.js} | 147 ++--- .../test/unit/js/MigrationPersistorTests.js | 519 ------------------ .../test/unit/js/PersistorManagerTests.js | 24 +- ...torTests.js => S3PersistorManagerTests.js} | 213 +++---- 15 files changed, 581 insertions(+), 1782 deletions(-) rename services/filestore/app/js/{FSPersistor.js => FSPersistorManager.js} (75%) delete mode 100644 services/filestore/app/js/MigrationPersistor.js delete mode 100644 services/filestore/app/js/PersistorHelper.js rename services/filestore/app/js/{S3Persistor.js => S3PersistorManager.js} (63%) rename services/filestore/test/unit/js/{FSPersistorTests.js => FSPersistorManagerTests.js} (69%) delete mode 100644 services/filestore/test/unit/js/MigrationPersistorTests.js rename services/filestore/test/unit/js/{S3PersistorTests.js => S3PersistorManagerTests.js} (78%) diff --git a/services/filestore/.eslintrc b/services/filestore/.eslintrc index 73103de7f6..42a4b5cace 100644 --- a/services/filestore/.eslintrc +++ b/services/filestore/.eslintrc @@ -23,8 +23,7 @@ "rules": { // Swap the no-unused-expressions rule with a more chai-friendly one "no-unused-expressions": 0, - "chai-friendly/no-unused-expressions": "error", - "no-console": "error" + "chai-friendly/no-unused-expressions": "error" }, "overrides": [ { diff --git a/services/filestore/app/js/FSPersistor.js b/services/filestore/app/js/FSPersistorManager.js similarity index 75% rename from services/filestore/app/js/FSPersistor.js rename to services/filestore/app/js/FSPersistorManager.js index 973c670efd..862acb9bcb 100644 --- a/services/filestore/app/js/FSPersistor.js +++ b/services/filestore/app/js/FSPersistorManager.js @@ -7,7 +7,6 @@ const { promisify, callbackify } = require('util') const LocalFileWriter = require('./LocalFileWriter').promises const { NotFoundError, ReadError, WriteError } = require('./Errors') -const PersistorHelper = require('./PersistorHelper') const pipeline = promisify(Stream.pipeline) const fsUnlink = promisify(fs.unlink) @@ -28,7 +27,7 @@ async function sendFile(location, target, source) { const targetStream = fs.createWriteStream(`${location}/${filteredTarget}`) await pipeline(sourceStream, targetStream) } catch (err) { - throw PersistorHelper.wrapError( + throw _wrapError( err, 'failed to copy the specified file', { location, target, source }, @@ -37,22 +36,11 @@ async function sendFile(location, target, source) { } } -async function sendStream(location, target, sourceStream, sourceMd5) { +async function sendStream(location, target, sourceStream) { const fsPath = await LocalFileWriter.writeStream(sourceStream) - if (!sourceMd5) { - sourceMd5 = await _getFileMd5HashForPath(fsPath) - } try { await sendFile(location, target, fsPath) - const destMd5 = await getFileMd5Hash(location, target) - if (sourceMd5 !== destMd5) { - await LocalFileWriter.deleteFile(`${location}/${filterName(target)}`) - throw new WriteError({ - message: 'md5 hash mismatch', - info: { sourceMd5, destMd5, location, target } - }) - } } finally { await LocalFileWriter.deleteFile(fsPath) } @@ -65,7 +53,7 @@ async function getFileStream(location, name, opts) { try { opts.fd = await fsOpen(`${location}/${filteredName}`, 'r') } catch (err) { - throw PersistorHelper.wrapError( + throw _wrapError( err, 'failed to open file for streaming', { location, filteredName, opts }, @@ -83,7 +71,7 @@ async function getFileSize(location, filename) { const stat = await fsStat(fullPath) return stat.size } catch (err) { - throw PersistorHelper.wrapError( + throw _wrapError( err, 'failed to stat file', { location, filename }, @@ -92,18 +80,6 @@ async function getFileSize(location, filename) { } } -async function getFileMd5Hash(location, filename) { - const fullPath = path.join(location, filterName(filename)) - try { - return await _getFileMd5HashForPath(fullPath) - } catch (err) { - throw new ReadError({ - message: 'unable to get md5 hash from file', - info: { location, filename } - }).withCause(err) - } -} - async function copyFile(location, fromName, toName) { const filteredFromName = filterName(fromName) const filteredToName = filterName(toName) @@ -113,7 +89,7 @@ async function copyFile(location, fromName, toName) { const targetStream = fs.createWriteStream(`${location}/${filteredToName}`) await pipeline(sourceStream, targetStream) } catch (err) { - throw PersistorHelper.wrapError( + throw _wrapError( err, 'failed to copy file', { location, filteredFromName, filteredToName }, @@ -127,17 +103,12 @@ async function deleteFile(location, name) { try { await fsUnlink(`${location}/${filteredName}`) } catch (err) { - const wrappedError = PersistorHelper.wrapError( + throw _wrapError( err, 'failed to delete file', { location, filteredName }, WriteError ) - if (!(wrappedError instanceof NotFoundError)) { - // S3 doesn't give us a 404 when a file wasn't there to be deleted, so we - // should be consistent here as well - throw wrappedError - } } } @@ -148,7 +119,7 @@ async function deleteDirectory(location, name) { try { await rmrf(`${location}/${filteredName}`) } catch (err) { - throw PersistorHelper.wrapError( + throw _wrapError( err, 'failed to delete directory', { location, filteredName }, @@ -166,7 +137,7 @@ async function checkIfFileExists(location, name) { if (err.code === 'ENOENT') { return false } - throw PersistorHelper.wrapError( + throw _wrapError( err, 'failed to stat file', { location, filteredName }, @@ -196,7 +167,7 @@ async function directorySize(location, name) { } } } catch (err) { - throw PersistorHelper.wrapError( + throw _wrapError( err, 'failed to get directory size', { location, name }, @@ -207,12 +178,25 @@ async function directorySize(location, name) { return size } +function _wrapError(error, message, params, ErrorType) { + if (error.code === 'ENOENT') { + return new NotFoundError({ + message: 'no such file or directory', + info: params + }).withCause(error) + } else { + return new ErrorType({ + message: message, + info: params + }).withCause(error) + } +} + module.exports = { sendFile: callbackify(sendFile), sendStream: callbackify(sendStream), getFileStream: callbackify(getFileStream), getFileSize: callbackify(getFileSize), - getFileMd5Hash: callbackify(getFileMd5Hash), copyFile: callbackify(copyFile), deleteFile: callbackify(deleteFile), deleteDirectory: callbackify(deleteDirectory), @@ -223,7 +207,6 @@ module.exports = { sendStream, getFileStream, getFileSize, - getFileMd5Hash, copyFile, deleteFile, deleteDirectory, @@ -231,8 +214,3 @@ module.exports = { directorySize } } - -async function _getFileMd5HashForPath(fullPath) { - const stream = fs.createReadStream(fullPath) - return PersistorHelper.calculateStreamMd5(stream) -} diff --git a/services/filestore/app/js/MigrationPersistor.js b/services/filestore/app/js/MigrationPersistor.js deleted file mode 100644 index 3ddc762922..0000000000 --- a/services/filestore/app/js/MigrationPersistor.js +++ /dev/null @@ -1,228 +0,0 @@ -const metrics = require('metrics-sharelatex') -const Settings = require('settings-sharelatex') -const logger = require('logger-sharelatex') -const Stream = require('stream') -const { callbackify, promisify } = require('util') -const { NotFoundError, WriteError } = require('./Errors') - -const pipeline = promisify(Stream.pipeline) - -// Persistor that wraps two other persistors. Talks to the 'primary' by default, -// but will fall back to an older persistor in the case of a not-found error. -// If `Settings.filestore.fallback.copyOnMiss` is set, this will copy files from the fallback -// to the primary, in the event that they are missing. -// -// It is unlikely that the bucket/location name will be the same on the fallback -// as the primary. The bucket names should be overridden in `Settings.filestore.fallback.buckets` -// e.g. -// Settings.filestore.fallback.buckets = { -// myBucketOnS3: 'myBucketOnGCS' -// } - -module.exports = function(primary, fallback) { - function _wrapMethodOnBothPersistors(method) { - return async function(bucket, key, ...moreArgs) { - const fallbackBucket = _getFallbackBucket(bucket) - - await Promise.all([ - primary.promises[method](bucket, key, ...moreArgs), - fallback.promises[method](fallbackBucket, key, ...moreArgs) - ]) - } - } - - async function getFileStreamWithFallback(bucket, key, opts) { - const shouldCopy = - Settings.filestore.fallback.copyOnMiss && !opts.start && !opts.end - - try { - return await primary.promises.getFileStream(bucket, key, opts) - } catch (err) { - if (err instanceof NotFoundError) { - const fallbackBucket = _getFallbackBucket(bucket) - const fallbackStream = await fallback.promises.getFileStream( - fallbackBucket, - key, - opts - ) - // tee the stream to the client, and as a copy to the primary (if necessary) - // start listening on both straight away so that we don't consume bytes - // in one place before the other - const returnStream = new Stream.PassThrough() - pipeline(fallbackStream, returnStream) - - if (shouldCopy) { - const copyStream = new Stream.PassThrough() - pipeline(fallbackStream, copyStream) - - _copyStreamFromFallbackAndVerify( - copyStream, - fallbackBucket, - bucket, - key, - key - ).catch(() => { - // swallow errors, as this runs in the background and will log a warning - }) - } - return returnStream - } - throw err - } - } - - async function copyFileWithFallback(bucket, sourceKey, destKey) { - try { - return await primary.promises.copyFile(bucket, sourceKey, destKey) - } catch (err) { - if (err instanceof NotFoundError) { - const fallbackBucket = _getFallbackBucket(bucket) - const fallbackStream = await fallback.promises.getFileStream( - fallbackBucket, - sourceKey, - {} - ) - - const copyStream = new Stream.PassThrough() - pipeline(fallbackStream, copyStream) - - if (Settings.filestore.fallback.copyOnMiss) { - const missStream = new Stream.PassThrough() - pipeline(fallbackStream, missStream) - - // copy from sourceKey -> sourceKey - _copyStreamFromFallbackAndVerify( - missStream, - fallbackBucket, - bucket, - sourceKey, - sourceKey - ).then(() => { - // swallow errors, as this runs in the background and will log a warning - }) - } - // copy from sourceKey -> destKey - return _copyStreamFromFallbackAndVerify( - copyStream, - fallbackBucket, - bucket, - sourceKey, - destKey - ) - } - throw err - } - } - - function _getFallbackBucket(bucket) { - return Settings.filestore.fallback.buckets[bucket] - } - - function _wrapFallbackMethod(method) { - return async function(bucket, key, ...moreArgs) { - try { - return await primary.promises[method](bucket, key, ...moreArgs) - } catch (err) { - if (err instanceof NotFoundError) { - const fallbackBucket = _getFallbackBucket(bucket) - if (Settings.filestore.fallback.copyOnMiss) { - const fallbackStream = await fallback.promises.getFileStream( - fallbackBucket, - key, - {} - ) - // run in background - _copyStreamFromFallbackAndVerify( - fallbackStream, - fallbackBucket, - bucket, - key, - key - ).catch(err => { - logger.warn({ err }, 'failed to copy file from fallback') - }) - } - return fallback.promises[method](fallbackBucket, key, ...moreArgs) - } - throw err - } - } - } - - async function _copyStreamFromFallbackAndVerify( - stream, - sourceBucket, - destBucket, - sourceKey, - destKey - ) { - try { - let sourceMd5 - try { - sourceMd5 = await fallback.promises.getFileMd5Hash( - sourceBucket, - sourceKey - ) - } catch (err) { - logger.warn(err, 'error getting md5 hash from fallback persistor') - } - - await primary.promises.sendStream(destBucket, destKey, stream, sourceMd5) - } catch (err) { - const error = new WriteError({ - message: 'unable to copy file to destination persistor', - info: { - sourceBucket, - destBucket, - sourceKey, - destKey - } - }).withCause(err) - metrics.inc('fallback.copy.failure') - - try { - await primary.promises.deleteFile(destBucket, destKey) - } catch (err) { - error.info.cleanupError = new WriteError({ - message: 'unable to clean up destination copy artifact', - info: { - destBucket, - destKey - } - }).withCause(err) - } - - logger.warn({ error }, 'failed to copy file from fallback') - throw error - } - } - - return { - primaryPersistor: primary, - fallbackPersistor: fallback, - sendFile: primary.sendFile, - sendStream: primary.sendStream, - getFileStream: callbackify(getFileStreamWithFallback), - getFileMd5Hash: callbackify(_wrapFallbackMethod('getFileMd5Hash')), - deleteDirectory: callbackify( - _wrapMethodOnBothPersistors('deleteDirectory') - ), - getFileSize: callbackify(_wrapFallbackMethod('getFileSize')), - deleteFile: callbackify(_wrapMethodOnBothPersistors('deleteFile')), - copyFile: callbackify(copyFileWithFallback), - checkIfFileExists: callbackify(_wrapFallbackMethod('checkIfFileExists')), - directorySize: callbackify(_wrapFallbackMethod('directorySize')), - promises: { - sendFile: primary.promises.sendFile, - sendStream: primary.promises.sendStream, - getFileStream: getFileStreamWithFallback, - getFileMd5Hash: _wrapFallbackMethod('getFileMd5Hash'), - deleteDirectory: _wrapMethodOnBothPersistors('deleteDirectory'), - getFileSize: _wrapFallbackMethod('getFileSize'), - deleteFile: _wrapMethodOnBothPersistors('deleteFile'), - copyFile: copyFileWithFallback, - checkIfFileExists: _wrapFallbackMethod('checkIfFileExists'), - directorySize: _wrapFallbackMethod('directorySize') - } - } -} diff --git a/services/filestore/app/js/PersistorHelper.js b/services/filestore/app/js/PersistorHelper.js deleted file mode 100644 index ea8132a9c9..0000000000 --- a/services/filestore/app/js/PersistorHelper.js +++ /dev/null @@ -1,105 +0,0 @@ -const crypto = require('crypto') -const meter = require('stream-meter') -const Stream = require('stream') -const logger = require('logger-sharelatex') -const { WriteError, ReadError, NotFoundError } = require('./Errors') -const { promisify } = require('util') - -const pipeline = promisify(Stream.pipeline) - -module.exports = { - calculateStreamMd5, - verifyMd5, - getMeteredStream, - waitForStreamReady, - wrapError -} - -// returns a promise which resolves with the md5 hash of the stream -function calculateStreamMd5(stream) { - const hash = crypto.createHash('md5') - hash.setEncoding('hex') - - return pipeline(stream, hash).then(() => hash.read()) -} - -// verifies the md5 hash of a file against the supplied md5 or the one stored in -// storage if not supplied - deletes the new file if the md5 does not match and -// throws an error -async function verifyMd5(persistor, bucket, key, sourceMd5, destMd5 = null) { - if (!destMd5) { - destMd5 = await persistor.promises.getFileMd5Hash(bucket, key) - } - - if (sourceMd5 !== destMd5) { - try { - await persistor.promises.deleteFile(bucket, key) - } catch (err) { - logger.warn(err, 'error deleting file for invalid upload') - } - - throw new WriteError({ - message: 'source and destination hashes do not match', - info: { - sourceMd5, - destMd5, - bucket, - key - } - }) - } -} - -// returns the next stream in the pipeline, and calls the callback with the byte count -// when the stream finishes or receives an error -function getMeteredStream(stream, callback) { - const meteredStream = meter() - - pipeline(stream, meteredStream) - .then(() => { - callback(null, meteredStream.bytes) - }) - .catch(err => { - // on error, just send how many bytes we received before the stream stopped - callback(err, meteredStream.bytes) - }) - - return meteredStream -} - -// resolves when a stream is 'readable', or rejects if the stream throws an error -// before that happens - this lets us handle protocol-level errors before trying -// to read them -function waitForStreamReady(stream) { - return new Promise((resolve, reject) => { - const onError = function(err) { - reject(wrapError(err, 'error before stream became ready', {}, ReadError)) - } - const onStreamReady = function() { - stream.removeListener('readable', onStreamReady) - stream.removeListener('error', onError) - resolve(stream) - } - stream.on('readable', onStreamReady) - stream.on('error', onError) - }) -} - -function wrapError(error, message, params, ErrorType) { - if ( - error instanceof NotFoundError || - ['NoSuchKey', 'NotFound', 404, 'AccessDenied', 'ENOENT'].includes( - error.code - ) - ) { - return new NotFoundError({ - message: 'no such file', - info: params - }).withCause(error) - } else { - return new ErrorType({ - message: message, - info: params - }).withCause(error) - } -} diff --git a/services/filestore/app/js/PersistorManager.js b/services/filestore/app/js/PersistorManager.js index 32f6cd41f8..cca0cf0f36 100644 --- a/services/filestore/app/js/PersistorManager.js +++ b/services/filestore/app/js/PersistorManager.js @@ -3,8 +3,7 @@ const logger = require('logger-sharelatex') logger.log( { - backend: settings.filestore.backend, - fallback: settings.filestore.fallback && settings.filestore.fallback.backend + backend: settings.filestore.backend }, 'Loading backend' ) @@ -12,26 +11,14 @@ if (!settings.filestore.backend) { throw new Error('no backend specified - config incomplete') } -function getPersistor(backend) { - switch (backend) { - case 'aws-sdk': - case 's3': - return require('./S3Persistor') - case 'fs': - return require('./FSPersistor') - default: - throw new Error(`unknown filestore backend: ${backend}`) - } +switch (settings.filestore.backend) { + case 'aws-sdk': + case 's3': + module.exports = require('./S3PersistorManager') + break + case 'fs': + module.exports = require('./FSPersistorManager') + break + default: + throw new Error(`unknown filestore backend: ${settings.filestore.backend}`) } - -let persistor = getPersistor(settings.filestore.backend) - -if (settings.filestore.fallback && settings.filestore.fallback.backend) { - const migrationPersistor = require('./MigrationPersistor') - persistor = migrationPersistor( - persistor, - getPersistor(settings.filestore.fallback.backend) - ) -} - -module.exports = persistor diff --git a/services/filestore/app/js/S3Persistor.js b/services/filestore/app/js/S3PersistorManager.js similarity index 63% rename from services/filestore/app/js/S3Persistor.js rename to services/filestore/app/js/S3PersistorManager.js index 891d7be68e..52cadfbfbd 100644 --- a/services/filestore/app/js/S3Persistor.js +++ b/services/filestore/app/js/S3PersistorManager.js @@ -6,8 +6,7 @@ https.globalAgent.maxSockets = 300 const settings = require('settings-sharelatex') const metrics = require('metrics-sharelatex') -const PersistorHelper = require('./PersistorHelper') - +const meter = require('stream-meter') const fs = require('fs') const S3 = require('aws-sdk/clients/s3') const { URL } = require('url') @@ -19,11 +18,10 @@ const { SettingsError } = require('./Errors') -const S3Persistor = { +module.exports = { sendFile: callbackify(sendFile), sendStream: callbackify(sendStream), getFileStream: callbackify(getFileStream), - getFileMd5Hash: callbackify(getFileMd5Hash), deleteDirectory: callbackify(deleteDirectory), getFileSize: callbackify(getFileSize), deleteFile: callbackify(deleteFile), @@ -34,7 +32,6 @@ const S3Persistor = { sendFile, sendStream, getFileStream, - getFileMd5Hash, deleteDirectory, getFileSize, deleteFile, @@ -44,18 +41,12 @@ const S3Persistor = { } } -module.exports = S3Persistor - -function hexToBase64(hex) { - return Buffer.from(hex, 'hex').toString('base64') -} - async function sendFile(bucketName, key, fsPath) { let readStream try { readStream = fs.createReadStream(fsPath) } catch (err) { - throw PersistorHelper.wrapError( + throw _wrapError( err, 'error reading file from disk', { bucketName, key, fsPath }, @@ -65,56 +56,22 @@ async function sendFile(bucketName, key, fsPath) { return sendStream(bucketName, key, readStream) } -async function sendStream(bucketName, key, readStream, sourceMd5) { +async function sendStream(bucketName, key, readStream) { try { - // if there is no supplied md5 hash, we calculate the hash as the data passes through - let hashPromise - let b64Hash + const meteredStream = meter() + meteredStream.on('finish', () => { + metrics.count('s3.egress', meteredStream.bytes) + }) - if (sourceMd5) { - b64Hash = hexToBase64(sourceMd5) - } else { - hashPromise = PersistorHelper.calculateStreamMd5(readStream) - } - - const meteredStream = PersistorHelper.getMeteredStream( - readStream, - (_, byteCount) => { - // ignore the error parameter and just log the byte count - metrics.count('s3.egress', byteCount) - } - ) - - // if we have an md5 hash, pass this to S3 to verify the upload - const uploadOptions = { - Bucket: bucketName, - Key: key, - Body: meteredStream - } - if (b64Hash) { - uploadOptions.ContentMD5 = b64Hash - } - - const response = await _getClientForBucket(bucketName) - .upload(uploadOptions) + await _getClientForBucket(bucketName) + .upload({ + Bucket: bucketName, + Key: key, + Body: readStream.pipe(meteredStream) + }) .promise() - const destMd5 = _md5FromResponse(response) - - // if we didn't have an md5 hash, we should compare our computed one with S3's - // as we couldn't tell S3 about it beforehand - if (hashPromise) { - sourceMd5 = await hashPromise - // throws on mismatch - await PersistorHelper.verifyMd5( - S3Persistor, - bucketName, - key, - sourceMd5, - destMd5 - ) - } } catch (err) { - throw PersistorHelper.wrapError( + throw _wrapError( err, 'upload to S3 failed', { bucketName, key }, @@ -134,29 +91,25 @@ async function getFileStream(bucketName, key, opts) { params.Range = `bytes=${opts.start}-${opts.end}` } - const stream = _getClientForBucket(bucketName) - .getObject(params) - .createReadStream() + return new Promise((resolve, reject) => { + const stream = _getClientForBucket(bucketName) + .getObject(params) + .createReadStream() - const meteredStream = PersistorHelper.getMeteredStream( - stream, - (_, byteCount) => { - // ignore the error parameter and just log the byte count - metrics.count('s3.ingress', byteCount) + const meteredStream = meter() + meteredStream.on('finish', () => { + metrics.count('s3.ingress', meteredStream.bytes) + }) + + const onStreamReady = function() { + stream.removeListener('readable', onStreamReady) + resolve(stream.pipe(meteredStream)) } - ) - - try { - await PersistorHelper.waitForStreamReady(stream) - return meteredStream - } catch (err) { - throw PersistorHelper.wrapError( - err, - 'error reading file from S3', - { bucketName, key, opts }, - ReadError - ) - } + stream.on('readable', onStreamReady) + stream.on('error', err => { + reject(_wrapError(err, 'error reading from S3', params, ReadError)) + }) + }) } async function deleteDirectory(bucketName, key) { @@ -167,7 +120,7 @@ async function deleteDirectory(bucketName, key) { .listObjects({ Bucket: bucketName, Prefix: key }) .promise() } catch (err) { - throw PersistorHelper.wrapError( + throw _wrapError( err, 'failed to list objects in S3', { bucketName, key }, @@ -188,7 +141,7 @@ async function deleteDirectory(bucketName, key) { }) .promise() } catch (err) { - throw PersistorHelper.wrapError( + throw _wrapError( err, 'failed to delete objects in S3', { bucketName, key }, @@ -205,7 +158,7 @@ async function getFileSize(bucketName, key) { .promise() return response.ContentLength } catch (err) { - throw PersistorHelper.wrapError( + throw _wrapError( err, 'error getting size of s3 object', { bucketName, key }, @@ -214,31 +167,13 @@ async function getFileSize(bucketName, key) { } } -async function getFileMd5Hash(bucketName, key) { - try { - const response = await _getClientForBucket(bucketName) - .headObject({ Bucket: bucketName, Key: key }) - .promise() - const md5 = _md5FromResponse(response) - return md5 - } catch (err) { - throw PersistorHelper.wrapError( - err, - 'error getting hash of s3 object', - { bucketName, key }, - ReadError - ) - } -} - async function deleteFile(bucketName, key) { try { await _getClientForBucket(bucketName) .deleteObject({ Bucket: bucketName, Key: key }) .promise() } catch (err) { - // s3 does not give us a NotFoundError here - throw PersistorHelper.wrapError( + throw _wrapError( err, 'failed to delete file in S3', { bucketName, key }, @@ -258,12 +193,7 @@ async function copyFile(bucketName, sourceKey, destKey) { .copyObject(params) .promise() } catch (err) { - throw PersistorHelper.wrapError( - err, - 'failed to copy file in S3', - params, - WriteError - ) + throw _wrapError(err, 'failed to copy file in S3', params, WriteError) } } @@ -275,7 +205,7 @@ async function checkIfFileExists(bucketName, key) { if (err instanceof NotFoundError) { return false } - throw PersistorHelper.wrapError( + throw _wrapError( err, 'error checking whether S3 object exists', { bucketName, key }, @@ -292,7 +222,7 @@ async function directorySize(bucketName, key) { return response.Contents.reduce((acc, item) => item.Size + acc, 0) } catch (err) { - throw PersistorHelper.wrapError( + throw _wrapError( err, 'error getting directory size in S3', { bucketName, key }, @@ -301,6 +231,22 @@ async function directorySize(bucketName, key) { } } +function _wrapError(error, message, params, ErrorType) { + if ( + ['NoSuchKey', 'NotFound', 'AccessDenied', 'ENOENT'].includes(error.code) + ) { + return new NotFoundError({ + message: 'no such file', + info: params + }).withCause(error) + } else { + return new ErrorType({ + message: message, + info: params + }).withCause(error) + } +} + const _clients = new Map() let _defaultClient @@ -363,18 +309,3 @@ function _buildClientOptions(bucketCredentials) { return options } - -function _md5FromResponse(response) { - const md5 = (response.ETag || '').replace(/[ "]/g, '') - if (!md5.match(/^[a-f0-9]{32}$/)) { - throw new ReadError({ - message: 's3 etag not in md5-hash format', - info: { - md5, - eTag: response.ETag - } - }) - } - - return md5 -} diff --git a/services/filestore/config/settings.defaults.coffee b/services/filestore/config/settings.defaults.coffee index bb124ae8e0..206f932a76 100644 --- a/services/filestore/config/settings.defaults.coffee +++ b/services/filestore/config/settings.defaults.coffee @@ -7,19 +7,6 @@ if process.env['AWS_KEY'] && !process.env['AWS_ACCESS_KEY_ID'] if process.env['AWS_SECRET'] && !process.env['AWS_SECRET_ACCESS_KEY'] process.env['AWS_SECRET_ACCESS_KEY'] = process.env['AWS_SECRET'] -# pre-backend setting, fall back to old behaviour -unless process.env['BACKEND']? - if process.env['AWS_ACCESS_KEY_ID']? or process.env['S3_BUCKET_CREDENTIALS']? - process.env['BACKEND'] = "s3" - process.env['USER_FILES_BUCKET_NAME'] = process.env['AWS_S3_USER_FILES_BUCKET_NAME'] - process.env['TEMPLATE_FILES_BUCKET_NAME'] = process.env['AWS_S3_TEMPLATE_FILES_BUCKET_NAME'] - process.env['PUBLIC_FILES_BUCKET_NAME'] = process.env['AWS_S3_PUBLIC_FILES_BUCKET_NAME'] - else - process.env['BACKEND'] = "fs" - process.env['USER_FILES_BUCKET_NAME'] = Path.resolve(__dirname + "/../user_files") - process.env['TEMPLATE_FILES_BUCKET_NAME'] = Path.resolve(__dirname + "/../template_files") - process.env['PUBLIC_FILES_BUCKET_NAME'] = Path.resolve(__dirname + "/../public_files") - settings = internal: filestore: @@ -31,28 +18,38 @@ settings = # Choices are # s3 - Amazon S3 # fs - local filesystem - backend: process.env['BACKEND'] - - s3: - if process.env['AWS_ACCESS_KEY_ID']? or process.env['S3_BUCKET_CREDENTIALS']? + if process.env['AWS_ACCESS_KEY_ID']? or process.env['S3_BUCKET_CREDENTIALS']? + backend: "s3" + s3: key: process.env['AWS_ACCESS_KEY_ID'] secret: process.env['AWS_SECRET_ACCESS_KEY'] endpoint: process.env['AWS_S3_ENDPOINT'] - - stores: - user_files: process.env['USER_FILES_BUCKET_NAME'] - template_files: process.env['TEMPLATE_FILES_BUCKET_NAME'] - public_files: process.env['PUBLIC_FILES_BUCKET_NAME'] - - s3BucketCreds: JSON.parse process.env['S3_BUCKET_CREDENTIALS'] if process.env['S3_BUCKET_CREDENTIALS']? - - fallback: - if process.env['FALLBACK_BACKEND']? - backend: process.env['FALLBACK_BACKEND'] - # mapping of bucket names on the fallback, to bucket names on the primary. - # e.g. { myS3UserFilesBucketName: 'myGoogleUserFilesBucketName' } - buckets: JSON.parse(process.env['FALLBACK_BUCKET_MAPPING'] || '{}') - copyOnMiss: process.env['COPY_ON_MISS'] == 'true' + stores: + user_files: process.env['AWS_S3_USER_FILES_BUCKET_NAME'] + template_files: process.env['AWS_S3_TEMPLATE_FILES_BUCKET_NAME'] + public_files: process.env['AWS_S3_PUBLIC_FILES_BUCKET_NAME'] + # if you are using S3, then fill in your S3 details below, + # or use env var with the same structure. + # s3: + # key: "" # default + # secret: "" # default + # + # s3BucketCreds: + # bucketname1: # secrets for bucketname1 + # auth_key: "" + # auth_secret: "" + # bucketname2: # secrets for bucketname2... + s3BucketCreds: JSON.parse process.env['S3_BUCKET_CREDENTIALS'] if process.env['S3_BUCKET_CREDENTIALS']? + else + backend: "fs" + stores: + # + # For local filesystem this is the directory to store the files in. + # Must contain full path, e.g. "/var/lib/sharelatex/data". + # This path must exist, not be tmpfs and be writable to by the user sharelatex is run as. + user_files: Path.resolve(__dirname + "/../user_files") + public_files: Path.resolve(__dirname + "/../public_files") + template_files: Path.resolve(__dirname + "/../template_files") path: uploadFolder: Path.resolve(__dirname + "/../uploads") diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index 64902d42af..7adbe68767 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -586,6 +586,11 @@ "event-target-shim": "^5.0.0" } }, + "accept-encoding": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/accept-encoding/-/accept-encoding-0.1.0.tgz", + "integrity": "sha1-XdiLjfcfHcLlzGuVZezOHjmaMz4=" + }, "accepts": { "version": "1.3.5", "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.5.tgz", @@ -765,6 +770,11 @@ } } }, + "aws-sign": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/aws-sign/-/aws-sign-0.2.1.tgz", + "integrity": "sha1-uWGyLwuqTxXsJBFA83dtbBQoVtA=" + }, "aws-sign2": { "version": "0.7.0", "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", @@ -827,6 +837,14 @@ "tweetnacl": "^0.14.3" } }, + "best-encoding": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/best-encoding/-/best-encoding-0.1.1.tgz", + "integrity": "sha1-GVIT2rysBFgYuAe3ox+Dn63cl04=", + "requires": { + "accept-encoding": "~0.1.0" + } + }, "bignumber.js": { "version": "7.2.1", "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-7.2.1.tgz", @@ -845,6 +863,14 @@ "resolved": "https://registry.npmjs.org/bintrees/-/bintrees-1.0.1.tgz", "integrity": "sha1-DmVcm5wkNeqraL9AJyJtK1WjRSQ=" }, + "bl": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/bl/-/bl-0.7.0.tgz", + "integrity": "sha1-P7BnBgKsKHjrdw3CA58YNr5irls=", + "requires": { + "readable-stream": "~1.0.2" + } + }, "body-parser": { "version": "1.18.3", "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.18.3.tgz", @@ -868,6 +894,14 @@ "integrity": "sha1-tcCeF8rNET0Rt7s+04TMASmU2Gs=", "dev": true }, + "boom": { + "version": "0.3.8", + "resolved": "https://registry.npmjs.org/boom/-/boom-0.3.8.tgz", + "integrity": "sha1-yM2wQUNZEnQWKMBE7Mcy0dF8Ceo=", + "requires": { + "hoek": "0.7.x" + } + }, "brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", @@ -941,6 +975,11 @@ "quick-lru": "^4.0.1" } }, + "caseless": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.3.0.tgz", + "integrity": "sha1-U06XkWOH07cGtk/eu6xGQ4RQk08=" + }, "chai": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/chai/-/chai-4.2.0.tgz", @@ -1058,6 +1097,14 @@ "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", "dev": true }, + "combined-stream": { + "version": "0.0.7", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-0.0.7.tgz", + "integrity": "sha1-ATfmV7qlp1QcV6w3rF/AfXO03B8=", + "requires": { + "delayed-stream": "0.0.5" + } + }, "common-tags": { "version": "1.8.0", "resolved": "https://registry.npmjs.org/common-tags/-/common-tags-1.8.0.tgz", @@ -1104,6 +1151,11 @@ "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz", "integrity": "sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s=" }, + "cookie-jar": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/cookie-jar/-/cookie-jar-0.2.0.tgz", + "integrity": "sha1-ZOzAasl423leS1KQy+SLo3gUAPo=" + }, "cookie-signature": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", @@ -1141,6 +1193,14 @@ } } }, + "cryptiles": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-0.1.3.tgz", + "integrity": "sha1-GlVnNPBtJLo0hirpy55wmjr7/xw=", + "requires": { + "boom": "0.3.x" + } + }, "dashdash": { "version": "1.14.1", "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", @@ -1192,6 +1252,11 @@ "resolved": "https://registry.npmjs.org/delay/-/delay-4.3.0.tgz", "integrity": "sha1-7+6/uPVFV5yzlrOnIkQ+yW0UxQ4=" }, + "delayed-stream": { + "version": "0.0.5", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz", + "integrity": "sha1-1LH0OpPoKW3+AmlPRoC8N6MTxz8=" + }, "depd": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", @@ -1987,6 +2052,28 @@ } } }, + "forever-agent": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.2.0.tgz", + "integrity": "sha1-4cJcetROCcOPIzh2x2/MJP+EOx8=" + }, + "form-data": { + "version": "0.0.10", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-0.0.10.tgz", + "integrity": "sha1-2zRaU3jYau6x7V1VO4aawZLS9e0=", + "requires": { + "async": "~0.2.7", + "combined-stream": "~0.0.4", + "mime": "~1.2.2" + }, + "dependencies": { + "mime": { + "version": "1.2.11", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz", + "integrity": "sha1-WCA+7Ybjpe8XrtK32evUfwpg3RA=" + } + } + }, "forwarded": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.1.2.tgz", @@ -2210,6 +2297,17 @@ "integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==", "dev": true }, + "hawk": { + "version": "0.10.2", + "resolved": "https://registry.npmjs.org/hawk/-/hawk-0.10.2.tgz", + "integrity": "sha1-mzYd7pWpMWQObVBOBWCaj8OsRdI=", + "requires": { + "boom": "0.3.x", + "cryptiles": "0.1.x", + "hoek": "0.7.x", + "sntp": "0.1.x" + } + }, "he": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/he/-/he-1.1.1.tgz", @@ -2225,6 +2323,11 @@ "resolved": "https://registry.npmjs.org/hex2dec/-/hex2dec-1.1.2.tgz", "integrity": "sha1-jhzkvvNqdPfVcjw/swkMKGAHczg=" }, + "hoek": { + "version": "0.7.6", + "resolved": "https://registry.npmjs.org/hoek/-/hoek-0.7.6.tgz", + "integrity": "sha1-YPvZBFV1Qc0rh5Wr8wihs3cOFVo=" + }, "hosted-git-info": { "version": "2.8.5", "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.5.tgz", @@ -2564,6 +2667,28 @@ "graceful-fs": "^4.1.9" } }, + "knox": { + "version": "0.9.2", + "resolved": "https://registry.npmjs.org/knox/-/knox-0.9.2.tgz", + "integrity": "sha1-NzZZNmniTwJP2vcjtqHcSv2DmnE=", + "requires": { + "debug": "^1.0.2", + "mime": "*", + "once": "^1.3.0", + "stream-counter": "^1.0.0", + "xml2js": "^0.4.4" + }, + "dependencies": { + "debug": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/debug/-/debug-1.0.5.tgz", + "integrity": "sha1-9yQSF0MPmd7EwrRz6rkiKOh0wqw=", + "requires": { + "ms": "2.0.0" + } + } + } + }, "levn": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", @@ -3193,6 +3318,55 @@ "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.8.4.tgz", "integrity": "sha1-1nOGYrZhvhnicR7wGqOxghLxMDA=" }, + "node-transloadit": { + "version": "0.0.4", + "resolved": "https://registry.npmjs.org/node-transloadit/-/node-transloadit-0.0.4.tgz", + "integrity": "sha1-4ZoHheON94NblO2AANHjXmg7zsE=", + "requires": { + "request": "~2.16.6", + "underscore": "1.2.1" + }, + "dependencies": { + "json-stringify-safe": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-3.0.0.tgz", + "integrity": "sha1-nbew5TDH8onF6MhDKvGRwv91pbM=" + }, + "mime": { + "version": "1.2.11", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz", + "integrity": "sha1-WCA+7Ybjpe8XrtK32evUfwpg3RA=" + }, + "qs": { + "version": "0.5.6", + "resolved": "https://registry.npmjs.org/qs/-/qs-0.5.6.tgz", + "integrity": "sha1-MbGtBYVnZRxSaSFQa5qHk5EaA4Q=" + }, + "request": { + "version": "2.16.6", + "resolved": "https://registry.npmjs.org/request/-/request-2.16.6.tgz", + "integrity": "sha1-hy/kRa5y3iZrN4edatfclI+gHK0=", + "requires": { + "aws-sign": "~0.2.0", + "cookie-jar": "~0.2.0", + "forever-agent": "~0.2.0", + "form-data": "~0.0.3", + "hawk": "~0.10.2", + "json-stringify-safe": "~3.0.0", + "mime": "~1.2.7", + "node-uuid": "~1.4.0", + "oauth-sign": "~0.2.0", + "qs": "~0.5.4", + "tunnel-agent": "~0.2.0" + } + }, + "underscore": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.2.1.tgz", + "integrity": "sha1-/FxrB2VnPZKi1KyLTcCqiHAuK9Q=" + } + } + }, "node-uuid": { "version": "1.4.8", "resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.8.tgz", @@ -3218,6 +3392,11 @@ } } }, + "oauth-sign": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.2.0.tgz", + "integrity": "sha1-oOahcV2u0GLzIrYit/5a/RA1tuI=" + }, "object-inspect": { "version": "1.7.0", "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.7.0.tgz", @@ -4220,6 +4399,29 @@ "read-pkg": "^2.0.0" } }, + "readable-stream": { + "version": "1.0.34", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz", + "integrity": "sha1-Elgg40vIQtLyqq+v5MKRbuMsFXw=", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + }, + "dependencies": { + "isarray": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" + } + } + }, + "recluster": { + "version": "0.3.7", + "resolved": "https://registry.npmjs.org/recluster/-/recluster-0.3.7.tgz", + "integrity": "sha1-aKRx3ZC2obl3ZjTPdpZAWutWeJU=" + }, "regexpp": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-2.0.1.tgz", @@ -4392,6 +4594,24 @@ "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", "dev": true }, + "response": { + "version": "0.14.0", + "resolved": "https://registry.npmjs.org/response/-/response-0.14.0.tgz", + "integrity": "sha1-BmNS/z5rAm0EdYCUB2Y7Rob9JpY=", + "requires": { + "best-encoding": "^0.1.1", + "bl": "~0.7.0", + "caseless": "^0.3.0", + "mime": "~1.2.11" + }, + "dependencies": { + "mime": { + "version": "1.2.11", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz", + "integrity": "sha1-WCA+7Ybjpe8XrtK32evUfwpg3RA=" + } + } + }, "restore-cursor": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz", @@ -4606,6 +4826,14 @@ } } }, + "sntp": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/sntp/-/sntp-0.1.4.tgz", + "integrity": "sha1-XvSBuVGnspr/30r9fyaDj8ESD4Q=", + "requires": { + "hoek": "0.7.x" + } + }, "source-map": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", @@ -4693,11 +4921,49 @@ "resolved": "https://registry.npmjs.org/stealthy-require/-/stealthy-require-1.1.1.tgz", "integrity": "sha1-NbCYdbT/SfJqd35QmzCQoyJr8ks=" }, + "stream-browserify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/stream-browserify/-/stream-browserify-2.0.1.tgz", + "integrity": "sha1-ZiZu5fm9uZQKTkUUyvtDu3Hlyds=", + "requires": { + "inherits": "~2.0.1", + "readable-stream": "^2.0.2" + }, + "dependencies": { + "readable-stream": { + "version": "2.3.6", + "resolved": "http://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", + "integrity": "sha1-sRwn2IuP8fvgcGQ8+UsMea4bCq8=", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha1-nPFhG6YmhdcDCunkujQUnDrwP8g=", + "requires": { + "safe-buffer": "~5.1.0" + } + } + } + }, "stream-buffers": { "version": "0.2.6", "resolved": "https://registry.npmjs.org/stream-buffers/-/stream-buffers-0.2.6.tgz", "integrity": "sha1-GBwI1bs2kARfaUAbmuanoM8zE/w=" }, + "stream-counter": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/stream-counter/-/stream-counter-1.0.0.tgz", + "integrity": "sha1-kc8lac5NxQYf6816yyY5SloRR1E=" + }, "stream-meter": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/stream-meter/-/stream-meter-1.0.4.tgz", @@ -4735,12 +5001,6 @@ "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.0.tgz", "integrity": "sha1-1cdSgl5TZ+eG944Y5EXqIjoVWVI=" }, - "streamifier": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/streamifier/-/streamifier-0.1.1.tgz", - "integrity": "sha1-l+mNj6TRBdYqJpHR3AfoINuN/E8=", - "dev": true - }, "string-width": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", @@ -4782,6 +5042,11 @@ "function-bind": "^1.1.1" } }, + "string_decoder": { + "version": "0.10.31", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=" + }, "strip-ansi": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", @@ -4975,6 +5240,11 @@ "integrity": "sha512-qOebF53frne81cf0S9B41ByenJ3/IuH8yJKngAX35CmiZySA0khhkovshKK+jGCaMnVomla7gVlIcc3EvKPbTQ==", "dev": true }, + "tunnel-agent": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.2.0.tgz", + "integrity": "sha1-aFPCr7GyEJ5FYp5JK9419Fnqaeg=" + }, "tweetnacl": { "version": "0.14.5", "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", diff --git a/services/filestore/package.json b/services/filestore/package.json index 6f1dde0e8a..14e35cd8a2 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -28,16 +28,21 @@ "fs-extra": "^1.0.0", "glob": "^7.1.6", "heapdump": "^0.3.2", + "knox": "~0.9.1", "logger-sharelatex": "^1.7.0", "metrics-sharelatex": "^2.2.0", "mocha": "5.2.0", + "node-transloadit": "0.0.4", "node-uuid": "~1.4.1", "pngcrush": "0.0.3", "range-parser": "^1.0.2", + "recluster": "^0.3.7", "request": "^2.88.0", "request-promise-native": "^1.0.8", + "response": "0.14.0", "rimraf": "2.2.8", "settings-sharelatex": "^1.1.0", + "stream-browserify": "^2.0.1", "stream-buffers": "~0.2.5", "stream-meter": "^1.0.4", "underscore": "~1.5.2" @@ -63,7 +68,6 @@ "prettier-eslint-cli": "^5.0.0", "sandboxed-module": "2.0.3", "sinon": "7.1.1", - "sinon-chai": "^3.3.0", - "streamifier": "^0.1.1" + "sinon-chai": "^3.3.0" } } diff --git a/services/filestore/test/acceptance/js/FilestoreApp.js b/services/filestore/test/acceptance/js/FilestoreApp.js index 20564e2d40..718d53bcf8 100644 --- a/services/filestore/test/acceptance/js/FilestoreApp.js +++ b/services/filestore/test/acceptance/js/FilestoreApp.js @@ -56,7 +56,6 @@ class FilestoreApp { } this.initing = false - this.persistor = require('../../../app/js/PersistorManager') } async waitForInit() { diff --git a/services/filestore/test/acceptance/js/FilestoreTests.js b/services/filestore/test/acceptance/js/FilestoreTests.js index fd1baed474..d7dfbce57c 100644 --- a/services/filestore/test/acceptance/js/FilestoreTests.js +++ b/services/filestore/test/acceptance/js/FilestoreTests.js @@ -11,7 +11,6 @@ const S3 = require('aws-sdk/clients/s3') const Stream = require('stream') const request = require('request') const { promisify } = require('util') -const streamifier = require('streamifier') chai.use(require('chai-as-promised')) const fsWriteFile = promisify(fs.writeFile) @@ -26,20 +25,6 @@ async function getMetric(filestoreUrl, metric) { return parseInt(found ? found[1] : 0) || 0 } -if (!process.env.AWS_ACCESS_KEY_ID) { - throw new Error('please provide credentials for the AWS S3 test server') -} - -function streamToString(stream) { - const chunks = [] - return new Promise((resolve, reject) => { - stream.on('data', chunk => chunks.push(chunk)) - stream.on('error', reject) - stream.on('end', () => resolve(Buffer.concat(chunks).toString('utf8'))) - stream.resume() - }) -} - // store settings for multiple backends, so that we can test each one. // fs will always be available - add others if they are configured const BackendSettings = { @@ -50,8 +35,11 @@ const BackendSettings = { public_files: Path.resolve(__dirname, '../../../public_files'), template_files: Path.resolve(__dirname, '../../../template_files') } - }, - S3Persistor: { + } +} + +if (process.env.AWS_ACCESS_KEY_ID) { + BackendSettings.S3Persistor = { backend: 's3', s3: { key: process.env.AWS_ACCESS_KEY_ID, @@ -64,62 +52,6 @@ const BackendSettings = { template_files: process.env.AWS_S3_TEMPLATE_FILES_BUCKET_NAME, public_files: process.env.AWS_S3_PUBLIC_FILES_BUCKET_NAME } - }, - FallbackS3ToFSPersistor: { - backend: 's3', - s3: { - key: process.env.AWS_ACCESS_KEY_ID, - secret: process.env.AWS_SECRET_ACCESS_KEY, - endpoint: process.env.AWS_S3_ENDPOINT, - pathStyle: true - }, - stores: { - user_files: process.env.AWS_S3_USER_FILES_BUCKET_NAME, - template_files: process.env.AWS_S3_TEMPLATE_FILES_BUCKET_NAME, - public_files: process.env.AWS_S3_PUBLIC_FILES_BUCKET_NAME - }, - fallback: { - backend: 'fs', - buckets: { - [process.env.AWS_S3_USER_FILES_BUCKET_NAME]: Path.resolve( - __dirname, - '../../../user_files' - ), - [process.env.AWS_S3_PUBLIC_FILES_BUCKET_NAME]: Path.resolve( - __dirname, - '../../../public_files' - ), - [process.env.AWS_S3_TEMPLATE_FILES_BUCKET_NAME]: Path.resolve( - __dirname, - '../../../template_files' - ) - } - } - }, - FallbackFSToS3Persistor: { - backend: 'fs', - s3: { - key: process.env.AWS_ACCESS_KEY_ID, - secret: process.env.AWS_SECRET_ACCESS_KEY, - endpoint: process.env.AWS_S3_ENDPOINT, - pathStyle: true - }, - stores: { - user_files: Path.resolve(__dirname, '../../../user_files'), - public_files: Path.resolve(__dirname, '../../../public_files'), - template_files: Path.resolve(__dirname, '../../../template_files') - }, - fallback: { - backend: 's3', - buckets: { - [Path.resolve(__dirname, '../../../user_files')]: process.env - .AWS_S3_USER_FILES_BUCKET_NAME, - [Path.resolve(__dirname, '../../../public_files')]: process.env - .AWS_S3_PUBLIC_FILES_BUCKET_NAME, - [Path.resolve(__dirname, '../../../template_files')]: process.env - .AWS_S3_TEMPLATE_FILES_BUCKET_NAME - } - } } } @@ -131,7 +63,7 @@ describe('Filestore', function() { // redefine the test suite for every available backend Object.keys(BackendSettings).forEach(backend => { describe(backend, function() { - let app, previousEgress, previousIngress, projectId + let app, previousEgress, previousIngress before(async function() { // create the app with the relevant filestore settings @@ -152,7 +84,6 @@ describe('Filestore', function() { getMetric(filestoreUrl, 's3_ingress') ]) } - projectId = `acceptance_tests_${Math.random()}` }) it('should send a 200 for the status endpoint', async function() { @@ -169,21 +100,23 @@ describe('Filestore', function() { }) describe('with a file on the server', function() { - let fileId, fileUrl, constantFileContent + let fileId, fileUrl const localFileReadPath = '/tmp/filestore_acceptance_tests_file_read.txt' + const constantFileContent = [ + 'hello world', + `line 2 goes here ${Math.random()}`, + 'there are 3 lines in all' + ].join('\n') + + before(async function() { + await fsWriteFile(localFileReadPath, constantFileContent) + }) beforeEach(async function() { fileId = Math.random() - fileUrl = `${filestoreUrl}/project/${projectId}/file/${directoryName}%2F${fileId}` - constantFileContent = [ - 'hello world', - `line 2 goes here ${Math.random()}`, - 'there are 3 lines in all' - ].join('\n') - - await fsWriteFile(localFileReadPath, constantFileContent) + fileUrl = `${filestoreUrl}/project/acceptance_tests/file/${directoryName}%2F${fileId}` const writeStream = request.post(fileUrl) const readStream = fs.createReadStream(localFileReadPath) @@ -244,7 +177,7 @@ describe('Filestore', function() { }) it('should be able to copy files', async function() { - const newProjectID = `acceptance_tests_copied_project_${Math.random()}` + const newProjectID = 'acceptance_tests_copyied_project' const newFileId = Math.random() const newFileUrl = `${filestoreUrl}/project/${newProjectID}/file/${directoryName}%2F${newFileId}` const opts = { @@ -252,7 +185,7 @@ describe('Filestore', function() { uri: newFileUrl, json: { source: { - project_id: projectId, + project_id: 'acceptance_tests', file_id: `${directoryName}/${fileId}` } } @@ -265,18 +198,6 @@ describe('Filestore', function() { expect(response.body).to.equal(constantFileContent) }) - it('should be able to overwrite the file', async function() { - const newContent = `here is some different content, ${Math.random()}` - const writeStream = request.post(fileUrl) - const readStream = streamifier.createReadStream(newContent) - // hack to consume the result to ensure the http request has been fully processed - const resultStream = fs.createWriteStream('/dev/null') - await pipeline(readStream, writeStream, resultStream) - - const response = await rp.get(fileUrl) - expect(response.body).to.equal(newContent) - }) - if (backend === 'S3Persistor') { it('should record an egress metric for the upload', async function() { const metric = await getMetric(filestoreUrl, 's3_egress') @@ -306,7 +227,7 @@ describe('Filestore', function() { }) describe('with multiple files', function() { - let fileIds, fileUrls + let fileIds, fileUrls, project const directoryName = 'directory' const localFileReadPaths = [ '/tmp/filestore_acceptance_tests_file_read_1.txt', @@ -333,10 +254,11 @@ describe('Filestore', function() { }) beforeEach(async function() { + project = `acceptance_tests_${Math.random()}` fileIds = [Math.random(), Math.random()] fileUrls = [ - `${filestoreUrl}/project/${projectId}/file/${directoryName}%2F${fileIds[0]}`, - `${filestoreUrl}/project/${projectId}/file/${directoryName}%2F${fileIds[1]}` + `${filestoreUrl}/project/${project}/file/${directoryName}%2F${fileIds[0]}`, + `${filestoreUrl}/project/${project}/file/${directoryName}%2F${fileIds[1]}` ] const writeStreams = [ @@ -360,7 +282,7 @@ describe('Filestore', function() { it('should get the directory size', async function() { const response = await rp.get( - `${filestoreUrl}/project/${projectId}/size` + `${filestoreUrl}/project/${project}/size` ) expect(parseInt(JSON.parse(response.body)['total bytes'])).to.equal( constantFileContents[0].length + constantFileContents[1].length @@ -370,10 +292,10 @@ describe('Filestore', function() { if (backend === 'S3Persistor') { describe('with a file in a specific bucket', function() { - let constantFileContent, fileId, fileUrl, bucketName + let constantFileContents, fileId, fileUrl, bucketName beforeEach(async function() { - constantFileContent = `This is a file in a different S3 bucket ${Math.random()}` + constantFileContents = `This is a file in a different S3 bucket ${Math.random()}` fileId = Math.random().toString() bucketName = Math.random().toString() fileUrl = `${filestoreUrl}/bucket/${bucketName}/key/${fileId}` @@ -398,368 +320,14 @@ describe('Filestore', function() { .upload({ Bucket: bucketName, Key: fileId, - Body: constantFileContent + Body: constantFileContents }) .promise() }) it('should get the file from the specified bucket', async function() { const response = await rp.get(fileUrl) - expect(response.body).to.equal(constantFileContent) - }) - }) - } - - if (BackendSettings[backend].fallback) { - describe('with a fallback', function() { - async function uploadStringToPersistor( - persistor, - bucket, - key, - content - ) { - const fileStream = streamifier.createReadStream(content) - await persistor.promises.sendStream(bucket, key, fileStream) - } - - async function getStringFromPersistor(persistor, bucket, key) { - const stream = await persistor.promises.getFileStream( - bucket, - key, - {} - ) - return streamToString(stream) - } - - async function expectPersistorToHaveFile( - persistor, - bucket, - key, - content - ) { - const foundContent = await getStringFromPersistor( - persistor, - bucket, - key - ) - expect(foundContent).to.equal(content) - } - - async function expectPersistorNotToHaveFile(persistor, bucket, key) { - await expect( - getStringFromPersistor(persistor, bucket, key) - ).to.eventually.have.been.rejected.with.property( - 'name', - 'NotFoundError' - ) - } - - let constantFileContent, - fileId, - fileKey, - fileUrl, - bucket, - fallbackBucket - - beforeEach(function() { - constantFileContent = `This is yet more file content ${Math.random()}` - fileId = Math.random().toString() - fileKey = `${projectId}/${directoryName}/${fileId}` - fileUrl = `${filestoreUrl}/project/${projectId}/file/${directoryName}%2F${fileId}` - - bucket = Settings.filestore.stores.user_files - fallbackBucket = Settings.filestore.fallback.buckets[bucket] - }) - - describe('with a file in the fallback bucket', function() { - beforeEach(async function() { - await uploadStringToPersistor( - app.persistor.fallbackPersistor, - fallbackBucket, - fileKey, - constantFileContent - ) - }) - - it('should not find file in the primary', async function() { - await expectPersistorNotToHaveFile( - app.persistor.primaryPersistor, - bucket, - fileKey - ) - }) - - it('should find the file in the fallback', async function() { - await expectPersistorToHaveFile( - app.persistor.fallbackPersistor, - fallbackBucket, - fileKey, - constantFileContent - ) - }) - - describe('when copyOnMiss is disabled', function() { - beforeEach(function() { - Settings.filestore.fallback.copyOnMiss = false - }) - - it('should fetch the file', async function() { - const res = await rp.get(fileUrl) - expect(res.body).to.equal(constantFileContent) - }) - - it('should not copy the file to the primary', async function() { - await rp.get(fileUrl) - - await expectPersistorNotToHaveFile( - app.persistor.primaryPersistor, - bucket, - fileKey - ) - }) - }) - - describe('when copyOnMiss is enabled', function() { - beforeEach(function() { - Settings.filestore.fallback.copyOnMiss = true - }) - - it('should fetch the file', async function() { - const res = await rp.get(fileUrl) - expect(res.body).to.equal(constantFileContent) - }) - - it('copies the file to the primary', async function() { - await rp.get(fileUrl) - // wait for the file to copy in the background - await promisify(setTimeout)(1000) - - await expectPersistorToHaveFile( - app.persistor.primaryPersistor, - bucket, - fileKey, - constantFileContent - ) - }) - }) - - describe('when copying a file', function() { - let newFileId, newFileUrl, newFileKey, opts - - beforeEach(function() { - const newProjectID = `acceptance_tests_copied_project_${Math.random()}` - newFileId = Math.random() - newFileUrl = `${filestoreUrl}/project/${newProjectID}/file/${directoryName}%2F${newFileId}` - newFileKey = `${newProjectID}/${directoryName}/${newFileId}` - - opts = { - method: 'put', - uri: newFileUrl, - json: { - source: { - project_id: projectId, - file_id: `${directoryName}/${fileId}` - } - } - } - }) - - describe('when copyOnMiss is false', function() { - beforeEach(async function() { - Settings.filestore.fallback.copyOnMiss = false - - const response = await rp(opts) - expect(response.statusCode).to.equal(200) - }) - - it('should leave the old file in the old bucket', async function() { - await expectPersistorToHaveFile( - app.persistor.fallbackPersistor, - fallbackBucket, - fileKey, - constantFileContent - ) - }) - - it('should not create a new file in the old bucket', async function() { - await expectPersistorNotToHaveFile( - app.persistor.fallbackPersistor, - fallbackBucket, - newFileKey - ) - }) - - it('should create a new file in the new bucket', async function() { - await expectPersistorToHaveFile( - app.persistor.primaryPersistor, - bucket, - newFileKey, - constantFileContent - ) - }) - - it('should not copy the old file to the primary with the old key', async function() { - // wait for the file to copy in the background - await promisify(setTimeout)(1000) - - await expectPersistorNotToHaveFile( - app.persistor.primaryPersistor, - bucket, - fileKey - ) - }) - }) - - describe('when copyOnMiss is true', function() { - beforeEach(async function() { - Settings.filestore.fallback.copyOnMiss = true - - const response = await rp(opts) - expect(response.statusCode).to.equal(200) - }) - - it('should leave the old file in the old bucket', async function() { - await expectPersistorToHaveFile( - app.persistor.fallbackPersistor, - fallbackBucket, - fileKey, - constantFileContent - ) - }) - - it('should not create a new file in the old bucket', async function() { - await expectPersistorNotToHaveFile( - app.persistor.fallbackPersistor, - fallbackBucket, - newFileKey - ) - }) - - it('should create a new file in the new bucket', async function() { - await expectPersistorToHaveFile( - app.persistor.primaryPersistor, - bucket, - newFileKey, - constantFileContent - ) - }) - - it('should copy the old file to the primary with the old key', async function() { - // wait for the file to copy in the background - await promisify(setTimeout)(1000) - - await expectPersistorToHaveFile( - app.persistor.primaryPersistor, - bucket, - fileKey, - constantFileContent - ) - }) - }) - }) - }) - - describe('when sending a file', function() { - beforeEach(async function() { - const writeStream = request.post(fileUrl) - const readStream = streamifier.createReadStream( - constantFileContent - ) - // hack to consume the result to ensure the http request has been fully processed - const resultStream = fs.createWriteStream('/dev/null') - await pipeline(readStream, writeStream, resultStream) - }) - - it('should store the file on the primary', async function() { - await expectPersistorToHaveFile( - app.persistor.primaryPersistor, - bucket, - fileKey, - constantFileContent - ) - }) - - it('should not store the file on the fallback', async function() { - await expectPersistorNotToHaveFile( - app.persistor.fallbackPersistor, - fallbackBucket, - `${projectId}/${directoryName}/${fileId}` - ) - }) - }) - - describe('when deleting a file', function() { - describe('when the file exists on the primary', function() { - beforeEach(async function() { - await uploadStringToPersistor( - app.persistor.primaryPersistor, - bucket, - fileKey, - constantFileContent - ) - }) - - it('should delete the file', async function() { - const response = await rp.del(fileUrl) - expect(response.statusCode).to.equal(204) - await expect( - rp.get(fileUrl) - ).to.eventually.be.rejected.and.have.property('statusCode', 404) - }) - }) - - describe('when the file exists on the fallback', function() { - beforeEach(async function() { - await uploadStringToPersistor( - app.persistor.fallbackPersistor, - fallbackBucket, - fileKey, - constantFileContent - ) - }) - - it('should delete the file', async function() { - const response = await rp.del(fileUrl) - expect(response.statusCode).to.equal(204) - await expect( - rp.get(fileUrl) - ).to.eventually.be.rejected.and.have.property('statusCode', 404) - }) - }) - - describe('when the file exists on both the primary and the fallback', function() { - beforeEach(async function() { - await uploadStringToPersistor( - app.persistor.primaryPersistor, - bucket, - fileKey, - constantFileContent - ) - await uploadStringToPersistor( - app.persistor.fallbackPersistor, - fallbackBucket, - fileKey, - constantFileContent - ) - }) - - it('should delete the files', async function() { - const response = await rp.del(fileUrl) - expect(response.statusCode).to.equal(204) - await expect( - rp.get(fileUrl) - ).to.eventually.be.rejected.and.have.property('statusCode', 404) - }) - }) - - describe('when the file does not exist', function() { - it('should return return 204', async function() { - // S3 doesn't give us a 404 when the object doesn't exist, so to stay - // consistent we merrily return 204 ourselves here as well - const response = await rp.del(fileUrl) - expect(response.statusCode).to.equal(204) - }) - }) + expect(response.body).to.equal(constantFileContents) }) }) } @@ -773,7 +341,7 @@ describe('Filestore', function() { beforeEach(async function() { fileId = Math.random() - fileUrl = `${filestoreUrl}/project/${projectId}/file/${directoryName}%2F${fileId}` + fileUrl = `${filestoreUrl}/project/acceptance_tests/file/${directoryName}%2F${fileId}` const stat = await fsStat(localFileReadPath) localFileSize = stat.size const writeStream = request.post(fileUrl) diff --git a/services/filestore/test/unit/js/FSPersistorTests.js b/services/filestore/test/unit/js/FSPersistorManagerTests.js similarity index 69% rename from services/filestore/test/unit/js/FSPersistorTests.js rename to services/filestore/test/unit/js/FSPersistorManagerTests.js index 0a09869bc0..3b3b4bf417 100644 --- a/services/filestore/test/unit/js/FSPersistorTests.js +++ b/services/filestore/test/unit/js/FSPersistorManagerTests.js @@ -7,37 +7,24 @@ const Errors = require('../../../app/js/Errors') chai.use(require('sinon-chai')) chai.use(require('chai-as-promised')) -const modulePath = '../../../app/js/FSPersistor.js' +const modulePath = '../../../app/js/FSPersistorManager.js' -describe('FSPersistorTests', function() { +describe('FSPersistorManagerTests', function() { const stat = { size: 4, isFile: sinon.stub().returns(true) } const fd = 1234 + const readStream = 'readStream' const writeStream = 'writeStream' const remoteStream = 'remoteStream' const tempFile = '/tmp/potato.txt' const location = '/foo' const error = new Error('guru meditation error') - const md5 = 'ffffffff' const files = ['animals/wombat.tex', 'vegetables/potato.tex'] const globs = [`${location}/${files[0]}`, `${location}/${files[1]}`] const filteredFilenames = ['animals_wombat.tex', 'vegetables_potato.tex'] - let fs, - rimraf, - stream, - LocalFileWriter, - FSPersistor, - glob, - readStream, - crypto, - Hash + let fs, rimraf, stream, LocalFileWriter, FSPersistorManager, glob beforeEach(function() { - readStream = { - name: 'readStream', - on: sinon.stub().yields(), - pipe: sinon.stub() - } fs = { createReadStream: sinon.stub().returns(readStream), createWriteStream: sinon.stub().returns(writeStream), @@ -54,26 +41,14 @@ describe('FSPersistorTests', function() { deleteFile: sinon.stub().resolves() } } - Hash = { - end: sinon.stub(), - read: sinon.stub().returns(md5), - setEncoding: sinon.stub() - } - crypto = { - createHash: sinon.stub().returns(Hash) - } - FSPersistor = SandboxedModule.require(modulePath, { + FSPersistorManager = SandboxedModule.require(modulePath, { requires: { './LocalFileWriter': LocalFileWriter, './Errors': Errors, fs, glob, rimraf, - stream, - crypto, - // imported by PersistorHelper but otherwise unused here - 'stream-meter': {}, - 'logger-sharelatex': {} + stream }, globals: { console } }) @@ -82,7 +57,7 @@ describe('FSPersistorTests', function() { describe('sendFile', function() { const localFilesystemPath = '/path/to/local/file' it('should copy the file', async function() { - await FSPersistor.promises.sendFile( + await FSPersistorManager.promises.sendFile( location, files[0], localFilesystemPath @@ -97,21 +72,33 @@ describe('FSPersistorTests', function() { it('should return an error if the file cannot be stored', async function() { stream.pipeline.yields(error) await expect( - FSPersistor.promises.sendFile(location, files[0], localFilesystemPath) + FSPersistorManager.promises.sendFile( + location, + files[0], + localFilesystemPath + ) ).to.eventually.be.rejected.and.have.property('cause', error) }) }) describe('sendStream', function() { it('should send the stream to LocalFileWriter', async function() { - await FSPersistor.promises.sendStream(location, files[0], remoteStream) + await FSPersistorManager.promises.sendStream( + location, + files[0], + remoteStream + ) expect(LocalFileWriter.promises.writeStream).to.have.been.calledWith( remoteStream ) }) it('should delete the temporary file', async function() { - await FSPersistor.promises.sendStream(location, files[0], remoteStream) + await FSPersistorManager.promises.sendStream( + location, + files[0], + remoteStream + ) expect(LocalFileWriter.promises.deleteFile).to.have.been.calledWith( tempFile ) @@ -120,55 +107,30 @@ describe('FSPersistorTests', function() { it('should return the error from LocalFileWriter', async function() { LocalFileWriter.promises.writeStream.rejects(error) await expect( - FSPersistor.promises.sendStream(location, files[0], remoteStream) + FSPersistorManager.promises.sendStream(location, files[0], remoteStream) ).to.eventually.be.rejectedWith(error) }) it('should send the temporary file to the filestore', async function() { - await FSPersistor.promises.sendStream(location, files[0], remoteStream) + await FSPersistorManager.promises.sendStream( + location, + files[0], + remoteStream + ) expect(fs.createReadStream).to.have.been.calledWith(tempFile) }) - - describe('when the md5 hash does not match', function() { - it('should return a write error', async function() { - await expect( - FSPersistor.promises.sendStream( - location, - files[0], - remoteStream, - '00000000' - ) - ) - .to.eventually.be.rejected.and.be.an.instanceOf(Errors.WriteError) - .and.have.property('message', 'md5 hash mismatch') - }) - - it('deletes the copied file', async function() { - try { - await FSPersistor.promises.sendStream( - location, - files[0], - remoteStream, - '00000000' - ) - } catch (_) {} - expect(LocalFileWriter.promises.deleteFile).to.have.been.calledWith( - `${location}/${filteredFilenames[0]}` - ) - }) - }) }) describe('getFileStream', function() { it('should use correct file location', async function() { - await FSPersistor.promises.getFileStream(location, files[0], {}) + await FSPersistorManager.promises.getFileStream(location, files[0], {}) expect(fs.open).to.have.been.calledWith( `${location}/${filteredFilenames[0]}` ) }) it('should pass the options to createReadStream', async function() { - await FSPersistor.promises.getFileStream(location, files[0], { + await FSPersistorManager.promises.getFileStream(location, files[0], { start: 0, end: 8 }) @@ -184,14 +146,18 @@ describe('FSPersistorTests', function() { err.code = 'ENOENT' fs.open.yields(err) - await expect(FSPersistor.promises.getFileStream(location, files[0], {})) + await expect( + FSPersistorManager.promises.getFileStream(location, files[0], {}) + ) .to.eventually.be.rejected.and.be.an.instanceOf(Errors.NotFoundError) .and.have.property('cause', err) }) it('should wrap any other error', async function() { fs.open.yields(error) - await expect(FSPersistor.promises.getFileStream(location, files[0], {})) + await expect( + FSPersistorManager.promises.getFileStream(location, files[0], {}) + ) .to.eventually.be.rejectedWith('failed to open file for streaming') .and.be.an.instanceOf(Errors.ReadError) .and.have.property('cause', error) @@ -215,18 +181,18 @@ describe('FSPersistorTests', function() { it('should return the file size', async function() { expect( - await FSPersistor.promises.getFileSize(location, files[0]) + await FSPersistorManager.promises.getFileSize(location, files[0]) ).to.equal(size) }) it('should throw a NotFoundError if the file does not exist', async function() { await expect( - FSPersistor.promises.getFileSize(location, badFilename) + FSPersistorManager.promises.getFileSize(location, badFilename) ).to.eventually.be.rejected.and.be.an.instanceOf(Errors.NotFoundError) }) it('should wrap any other error', async function() { - await expect(FSPersistor.promises.getFileSize(location, 'raccoon')) + await expect(FSPersistorManager.promises.getFileSize(location, 'raccoon')) .to.eventually.be.rejected.and.be.an.instanceOf(Errors.ReadError) .and.have.property('cause', error) }) @@ -234,28 +200,28 @@ describe('FSPersistorTests', function() { describe('copyFile', function() { it('Should open the source for reading', async function() { - await FSPersistor.promises.copyFile(location, files[0], files[1]) + await FSPersistorManager.promises.copyFile(location, files[0], files[1]) expect(fs.createReadStream).to.have.been.calledWith( `${location}/${filteredFilenames[0]}` ) }) it('Should open the target for writing', async function() { - await FSPersistor.promises.copyFile(location, files[0], files[1]) + await FSPersistorManager.promises.copyFile(location, files[0], files[1]) expect(fs.createWriteStream).to.have.been.calledWith( `${location}/${filteredFilenames[1]}` ) }) it('Should pipe the source to the target', async function() { - await FSPersistor.promises.copyFile(location, files[0], files[1]) + await FSPersistorManager.promises.copyFile(location, files[0], files[1]) expect(stream.pipeline).to.have.been.calledWith(readStream, writeStream) }) }) describe('deleteFile', function() { it('Should call unlink with correct options', async function() { - await FSPersistor.promises.deleteFile(location, files[0]) + await FSPersistorManager.promises.deleteFile(location, files[0]) expect(fs.unlink).to.have.been.calledWith( `${location}/${filteredFilenames[0]}` ) @@ -264,14 +230,14 @@ describe('FSPersistorTests', function() { it('Should propagate the error', async function() { fs.unlink.yields(error) await expect( - FSPersistor.promises.deleteFile(location, files[0]) + FSPersistorManager.promises.deleteFile(location, files[0]) ).to.eventually.be.rejected.and.have.property('cause', error) }) }) describe('deleteDirectory', function() { it('Should call rmdir(rimraf) with correct options', async function() { - await FSPersistor.promises.deleteDirectory(location, files[0]) + await FSPersistorManager.promises.deleteDirectory(location, files[0]) expect(rimraf).to.have.been.calledWith( `${location}/${filteredFilenames[0]}` ) @@ -280,7 +246,7 @@ describe('FSPersistorTests', function() { it('Should propagate the error', async function() { rimraf.yields(error) await expect( - FSPersistor.promises.deleteDirectory(location, files[0]) + FSPersistorManager.promises.deleteDirectory(location, files[0]) ).to.eventually.be.rejected.and.have.property('cause', error) }) }) @@ -300,7 +266,7 @@ describe('FSPersistorTests', function() { }) it('Should call stat with correct options', async function() { - await FSPersistor.promises.checkIfFileExists(location, files[0]) + await FSPersistorManager.promises.checkIfFileExists(location, files[0]) expect(fs.stat).to.have.been.calledWith( `${location}/${filteredFilenames[0]}` ) @@ -308,18 +274,23 @@ describe('FSPersistorTests', function() { it('Should return true for existing files', async function() { expect( - await FSPersistor.promises.checkIfFileExists(location, files[0]) + await FSPersistorManager.promises.checkIfFileExists(location, files[0]) ).to.equal(true) }) it('Should return false for non-existing files', async function() { expect( - await FSPersistor.promises.checkIfFileExists(location, badFilename) + await FSPersistorManager.promises.checkIfFileExists( + location, + badFilename + ) ).to.equal(false) }) it('should wrap the error if there is a problem', async function() { - await expect(FSPersistor.promises.checkIfFileExists(location, 'llama')) + await expect( + FSPersistorManager.promises.checkIfFileExists(location, 'llama') + ) .to.eventually.be.rejected.and.be.an.instanceOf(Errors.ReadError) .and.have.property('cause', error) }) @@ -328,7 +299,9 @@ describe('FSPersistorTests', function() { describe('directorySize', function() { it('should wrap the error', async function() { glob.yields(error) - await expect(FSPersistor.promises.directorySize(location, files[0])) + await expect( + FSPersistorManager.promises.directorySize(location, files[0]) + ) .to.eventually.be.rejected.and.be.an.instanceOf(Errors.ReadError) .and.include({ cause: error }) .and.have.property('info') @@ -336,7 +309,7 @@ describe('FSPersistorTests', function() { }) it('should filter the directory name', async function() { - await FSPersistor.promises.directorySize(location, files[0]) + await FSPersistorManager.promises.directorySize(location, files[0]) expect(glob).to.have.been.calledWith( `${location}/${filteredFilenames[0]}_*` ) @@ -344,7 +317,7 @@ describe('FSPersistorTests', function() { it('should sum directory files size', async function() { expect( - await FSPersistor.promises.directorySize(location, files[0]) + await FSPersistorManager.promises.directorySize(location, files[0]) ).to.equal(stat.size * files.length) }) }) diff --git a/services/filestore/test/unit/js/MigrationPersistorTests.js b/services/filestore/test/unit/js/MigrationPersistorTests.js deleted file mode 100644 index db8401c78c..0000000000 --- a/services/filestore/test/unit/js/MigrationPersistorTests.js +++ /dev/null @@ -1,519 +0,0 @@ -const sinon = require('sinon') -const chai = require('chai') -const { expect } = chai -const modulePath = '../../../app/js/MigrationPersistor.js' -const SandboxedModule = require('sandboxed-module') - -const Errors = require('../../../app/js/Errors') - -// Not all methods are tested here, but a method with each type of wrapping has -// tests. Specifically, the following wrapping methods are tested here: -// getFileStream: _wrapFallbackMethod -// sendStream: forward-to-primary -// deleteFile: _wrapMethodOnBothPersistors -// copyFile: copyFileWithFallback - -describe('MigrationPersistorTests', function() { - const bucket = 'womBucket' - const fallbackBucket = 'bucKangaroo' - const key = 'monKey' - const destKey = 'donKey' - const genericError = new Error('guru meditation error') - const notFoundError = new Errors.NotFoundError('not found') - const size = 33 - const md5 = 'ffffffff' - - let Metrics, - Settings, - Logger, - Stream, - MigrationPersistor, - fileStream, - newPersistor - - beforeEach(function() { - fileStream = { - name: 'fileStream', - on: sinon - .stub() - .withArgs('end') - .yields(), - pipe: sinon.stub() - } - - newPersistor = function(hasFile) { - return { - promises: { - sendFile: sinon.stub().resolves(), - sendStream: sinon.stub().resolves(), - getFileStream: hasFile - ? sinon.stub().resolves(fileStream) - : sinon.stub().rejects(notFoundError), - deleteDirectory: sinon.stub().resolves(), - getFileSize: hasFile - ? sinon.stub().resolves(size) - : sinon.stub().rejects(notFoundError), - deleteFile: sinon.stub().resolves(), - copyFile: hasFile - ? sinon.stub().resolves() - : sinon.stub().rejects(notFoundError), - checkIfFileExists: sinon.stub().resolves(hasFile), - directorySize: hasFile - ? sinon.stub().resolves(size) - : sinon.stub().rejects(notFoundError), - getFileMd5Hash: hasFile - ? sinon.stub().resolves(md5) - : sinon.stub().rejects(notFoundError) - } - } - } - - Settings = { - filestore: { - fallback: { - buckets: { - [bucket]: fallbackBucket - } - } - } - } - - Metrics = { - inc: sinon.stub() - } - - Stream = { - pipeline: sinon.stub().yields(), - PassThrough: sinon.stub() - } - - Logger = { - warn: sinon.stub() - } - - MigrationPersistor = SandboxedModule.require(modulePath, { - requires: { - 'settings-sharelatex': Settings, - stream: Stream, - './Errors': Errors, - 'metrics-sharelatex': Metrics, - 'logger-sharelatex': Logger - }, - globals: { console } - }) - }) - - describe('getFileStream', function() { - const options = { wombat: 'potato' } - describe('when the primary persistor has the file', function() { - let primaryPersistor, fallbackPersistor, migrationPersistor, response - beforeEach(async function() { - primaryPersistor = newPersistor(true) - fallbackPersistor = newPersistor(false) - migrationPersistor = MigrationPersistor( - primaryPersistor, - fallbackPersistor - ) - response = await migrationPersistor.promises.getFileStream( - bucket, - key, - options - ) - }) - - it('should return the file stream', function() { - expect(response).to.equal(fileStream) - }) - - it('should fetch the file from the primary persistor, with the correct options', function() { - expect( - primaryPersistor.promises.getFileStream - ).to.have.been.calledWithExactly(bucket, key, options) - }) - - it('should not query the fallback persistor', function() { - expect(fallbackPersistor.promises.getFileStream).not.to.have.been.called - }) - }) - - describe('when the fallback persistor has the file', function() { - let primaryPersistor, fallbackPersistor, migrationPersistor, response - beforeEach(async function() { - primaryPersistor = newPersistor(false) - fallbackPersistor = newPersistor(true) - migrationPersistor = MigrationPersistor( - primaryPersistor, - fallbackPersistor - ) - response = await migrationPersistor.promises.getFileStream( - bucket, - key, - options - ) - }) - - it('should return the file stream', function() { - expect(response).to.be.an.instanceOf(Stream.PassThrough) - }) - - it('should fetch the file from the primary persistor with the correct options', function() { - expect( - primaryPersistor.promises.getFileStream - ).to.have.been.calledWithExactly(bucket, key, options) - }) - - it('should fetch the file from the fallback persistor with the fallback bucket with the correct options', function() { - expect( - fallbackPersistor.promises.getFileStream - ).to.have.been.calledWithExactly(fallbackBucket, key, options) - }) - - it('should create one read stream', function() { - expect(fallbackPersistor.promises.getFileStream).to.have.been.calledOnce - }) - - it('should not send the file to the primary', function() { - expect(primaryPersistor.promises.sendStream).not.to.have.been.called - }) - }) - - describe('when the file should be copied to the primary', function() { - let primaryPersistor, - fallbackPersistor, - migrationPersistor, - returnedStream - beforeEach(async function() { - primaryPersistor = newPersistor(false) - fallbackPersistor = newPersistor(true) - migrationPersistor = MigrationPersistor( - primaryPersistor, - fallbackPersistor - ) - Settings.filestore.fallback.copyOnMiss = true - returnedStream = await migrationPersistor.promises.getFileStream( - bucket, - key, - options - ) - }) - - it('should create one read stream', function() { - expect(fallbackPersistor.promises.getFileStream).to.have.been.calledOnce - }) - - it('should get the md5 hash from the source', function() { - expect( - fallbackPersistor.promises.getFileMd5Hash - ).to.have.been.calledWith(fallbackBucket, key) - }) - - it('should send a stream to the primary', function() { - expect( - primaryPersistor.promises.sendStream - ).to.have.been.calledWithExactly( - bucket, - key, - sinon.match.instanceOf(Stream.PassThrough), - md5 - ) - }) - - it('should send a stream to the client', function() { - expect(returnedStream).to.be.an.instanceOf(Stream.PassThrough) - }) - }) - - describe('when neither persistor has the file', function() { - it('rejects with a NotFoundError', async function() { - const migrationPersistor = MigrationPersistor( - newPersistor(false), - newPersistor(false) - ) - return expect( - migrationPersistor.promises.getFileStream(bucket, key) - ).to.eventually.be.rejected.and.be.an.instanceOf(Errors.NotFoundError) - }) - }) - - describe('when the primary persistor throws an unexpected error', function() { - let primaryPersistor, fallbackPersistor, migrationPersistor, error - beforeEach(async function() { - primaryPersistor = newPersistor(false) - fallbackPersistor = newPersistor(true) - primaryPersistor.promises.getFileStream = sinon - .stub() - .rejects(genericError) - migrationPersistor = MigrationPersistor( - primaryPersistor, - fallbackPersistor - ) - try { - await migrationPersistor.promises.getFileStream(bucket, key, options) - } catch (err) { - error = err - } - }) - - it('rejects with the error', function() { - expect(error).to.equal(genericError) - }) - - it('does not call the fallback', function() { - expect(fallbackPersistor.promises.getFileStream).not.to.have.been.called - }) - }) - - describe('when the fallback persistor throws an unexpected error', function() { - let primaryPersistor, fallbackPersistor, migrationPersistor, error - beforeEach(async function() { - primaryPersistor = newPersistor(false) - fallbackPersistor = newPersistor(false) - fallbackPersistor.promises.getFileStream = sinon - .stub() - .rejects(genericError) - migrationPersistor = MigrationPersistor( - primaryPersistor, - fallbackPersistor - ) - try { - await migrationPersistor.promises.getFileStream(bucket, key, options) - } catch (err) { - error = err - } - }) - - it('rejects with the error', function() { - expect(error).to.equal(genericError) - }) - - it('should have called the fallback', function() { - expect( - fallbackPersistor.promises.getFileStream - ).to.have.been.calledWith(fallbackBucket, key) - }) - }) - }) - - describe('sendStream', function() { - let primaryPersistor, fallbackPersistor, migrationPersistor - beforeEach(function() { - primaryPersistor = newPersistor(false) - fallbackPersistor = newPersistor(false) - migrationPersistor = MigrationPersistor( - primaryPersistor, - fallbackPersistor - ) - }) - - describe('when it works', function() { - beforeEach(async function() { - return migrationPersistor.promises.sendStream(bucket, key, fileStream) - }) - - it('should send the file to the primary persistor', function() { - expect( - primaryPersistor.promises.sendStream - ).to.have.been.calledWithExactly(bucket, key, fileStream) - }) - - it('should not send the file to the fallback persistor', function() { - expect(fallbackPersistor.promises.sendStream).not.to.have.been.called - }) - }) - - describe('when the primary persistor throws an error', function() { - it('returns the error', async function() { - primaryPersistor.promises.sendStream.rejects(notFoundError) - return expect( - migrationPersistor.promises.sendStream(bucket, key, fileStream) - ).to.eventually.be.rejected.and.be.an.instanceOf(Errors.NotFoundError) - }) - }) - }) - - describe('deleteFile', function() { - let primaryPersistor, fallbackPersistor, migrationPersistor - beforeEach(function() { - primaryPersistor = newPersistor(false) - fallbackPersistor = newPersistor(false) - migrationPersistor = MigrationPersistor( - primaryPersistor, - fallbackPersistor - ) - }) - - describe('when it works', function() { - beforeEach(async function() { - return migrationPersistor.promises.deleteFile(bucket, key) - }) - - it('should delete the file from the primary', function() { - expect( - primaryPersistor.promises.deleteFile - ).to.have.been.calledWithExactly(bucket, key) - }) - - it('should delete the file from the fallback', function() { - expect( - fallbackPersistor.promises.deleteFile - ).to.have.been.calledWithExactly(fallbackBucket, key) - }) - }) - - describe('when the primary persistor throws an error', function() { - let error - beforeEach(async function() { - primaryPersistor.promises.deleteFile.rejects(genericError) - try { - await migrationPersistor.promises.deleteFile(bucket, key) - } catch (err) { - error = err - } - }) - - it('should return the error', function() { - expect(error).to.equal(genericError) - }) - - it('should delete the file from the primary', function() { - expect( - primaryPersistor.promises.deleteFile - ).to.have.been.calledWithExactly(bucket, key) - }) - - it('should delete the file from the fallback', function() { - expect( - fallbackPersistor.promises.deleteFile - ).to.have.been.calledWithExactly(fallbackBucket, key) - }) - }) - - describe('when the fallback persistor throws an error', function() { - let error - beforeEach(async function() { - fallbackPersistor.promises.deleteFile.rejects(genericError) - try { - await migrationPersistor.promises.deleteFile(bucket, key) - } catch (err) { - error = err - } - }) - - it('should return the error', function() { - expect(error).to.equal(genericError) - }) - - it('should delete the file from the primary', function() { - expect( - primaryPersistor.promises.deleteFile - ).to.have.been.calledWithExactly(bucket, key) - }) - - it('should delete the file from the fallback', function() { - expect( - fallbackPersistor.promises.deleteFile - ).to.have.been.calledWithExactly(fallbackBucket, key) - }) - }) - }) - - describe('copyFile', function() { - describe('when the file exists on the primary', function() { - let primaryPersistor, fallbackPersistor, migrationPersistor - beforeEach(async function() { - primaryPersistor = newPersistor(true) - fallbackPersistor = newPersistor(false) - migrationPersistor = MigrationPersistor( - primaryPersistor, - fallbackPersistor - ) - return migrationPersistor.promises.copyFile(bucket, key, destKey) - }) - - it('should call copyFile to copy the file', function() { - expect( - primaryPersistor.promises.copyFile - ).to.have.been.calledWithExactly(bucket, key, destKey) - }) - - it('should not try to read from the fallback', function() { - expect(fallbackPersistor.promises.getFileStream).not.to.have.been.called - }) - }) - - describe('when the file does not exist on the primary', function() { - let primaryPersistor, fallbackPersistor, migrationPersistor - beforeEach(async function() { - primaryPersistor = newPersistor(false) - fallbackPersistor = newPersistor(true) - migrationPersistor = MigrationPersistor( - primaryPersistor, - fallbackPersistor - ) - return migrationPersistor.promises.copyFile(bucket, key, destKey) - }) - - it('should call copyFile to copy the file', function() { - expect( - primaryPersistor.promises.copyFile - ).to.have.been.calledWithExactly(bucket, key, destKey) - }) - - it('should fetch the file from the fallback', function() { - expect( - fallbackPersistor.promises.getFileStream - ).not.to.have.been.calledWithExactly(fallbackBucket, key) - }) - - it('should get the md5 hash from the source', function() { - expect( - fallbackPersistor.promises.getFileMd5Hash - ).to.have.been.calledWith(fallbackBucket, key) - }) - - it('should send the file to the primary', function() { - expect( - primaryPersistor.promises.sendStream - ).to.have.been.calledWithExactly( - bucket, - destKey, - sinon.match.instanceOf(Stream.PassThrough), - md5 - ) - }) - }) - - describe('when the file does not exist on the fallback', function() { - let primaryPersistor, fallbackPersistor, migrationPersistor, error - beforeEach(async function() { - primaryPersistor = newPersistor(false) - fallbackPersistor = newPersistor(false) - migrationPersistor = MigrationPersistor( - primaryPersistor, - fallbackPersistor - ) - try { - await migrationPersistor.promises.copyFile(bucket, key, destKey) - } catch (err) { - error = err - } - }) - - it('should call copyFile to copy the file', function() { - expect( - primaryPersistor.promises.copyFile - ).to.have.been.calledWithExactly(bucket, key, destKey) - }) - - it('should fetch the file from the fallback', function() { - expect( - fallbackPersistor.promises.getFileStream - ).not.to.have.been.calledWithExactly(fallbackBucket, key) - }) - - it('should return a not-found error', function() { - expect(error).to.be.an.instanceOf(Errors.NotFoundError) - }) - }) - }) -}) diff --git a/services/filestore/test/unit/js/PersistorManagerTests.js b/services/filestore/test/unit/js/PersistorManagerTests.js index cdc9de0f92..0ecbb22078 100644 --- a/services/filestore/test/unit/js/PersistorManagerTests.js +++ b/services/filestore/test/unit/js/PersistorManagerTests.js @@ -6,14 +6,18 @@ const SandboxedModule = require('sandboxed-module') const modulePath = '../../../app/js/PersistorManager.js' describe('PersistorManager', function() { - let PersistorManager, FSPersistor, S3Persistor, settings, requires + let PersistorManager, + FSPersistorManager, + S3PersistorManager, + settings, + requires beforeEach(function() { - FSPersistor = { - wrappedMethod: sinon.stub().returns('FSPersistor') + FSPersistorManager = { + wrappedMethod: sinon.stub().returns('FSPersistorManager') } - S3Persistor = { - wrappedMethod: sinon.stub().returns('S3Persistor') + S3PersistorManager = { + wrappedMethod: sinon.stub().returns('S3PersistorManager') } settings = { @@ -21,8 +25,8 @@ describe('PersistorManager', function() { } requires = { - './S3Persistor': S3Persistor, - './FSPersistor': FSPersistor, + './S3PersistorManager': S3PersistorManager, + './FSPersistorManager': FSPersistorManager, 'settings-sharelatex': settings, 'logger-sharelatex': { log() {}, @@ -36,7 +40,7 @@ describe('PersistorManager', function() { PersistorManager = SandboxedModule.require(modulePath, { requires }) expect(PersistorManager).to.respondTo('wrappedMethod') - expect(PersistorManager.wrappedMethod()).to.equal('S3Persistor') + expect(PersistorManager.wrappedMethod()).to.equal('S3PersistorManager') }) it("should implement the S3 wrapped method when 'aws-sdk' is configured", function() { @@ -44,7 +48,7 @@ describe('PersistorManager', function() { PersistorManager = SandboxedModule.require(modulePath, { requires }) expect(PersistorManager).to.respondTo('wrappedMethod') - expect(PersistorManager.wrappedMethod()).to.equal('S3Persistor') + expect(PersistorManager.wrappedMethod()).to.equal('S3PersistorManager') }) it('should implement the FS wrapped method when FS is configured', function() { @@ -52,7 +56,7 @@ describe('PersistorManager', function() { PersistorManager = SandboxedModule.require(modulePath, { requires }) expect(PersistorManager).to.respondTo('wrappedMethod') - expect(PersistorManager.wrappedMethod()).to.equal('FSPersistor') + expect(PersistorManager.wrappedMethod()).to.equal('FSPersistorManager') }) it('should throw an error when the backend is not configured', function() { diff --git a/services/filestore/test/unit/js/S3PersistorTests.js b/services/filestore/test/unit/js/S3PersistorManagerTests.js similarity index 78% rename from services/filestore/test/unit/js/S3PersistorTests.js rename to services/filestore/test/unit/js/S3PersistorManagerTests.js index 9686deed5f..daeac66d3f 100644 --- a/services/filestore/test/unit/js/S3PersistorTests.js +++ b/services/filestore/test/unit/js/S3PersistorManagerTests.js @@ -1,12 +1,12 @@ const sinon = require('sinon') const chai = require('chai') const { expect } = chai -const modulePath = '../../../app/js/S3Persistor.js' +const modulePath = '../../../app/js/S3PersistorManager.js' const SandboxedModule = require('sandboxed-module') const Errors = require('../../../app/js/Errors') -describe('S3PersistorTests', function() { +describe('S3PersistorManagerTests', function() { const defaultS3Key = 'frog' const defaultS3Secret = 'prince' const defaultS3Credentials = { @@ -26,26 +26,21 @@ describe('S3PersistorTests', function() { { Key: 'hippo', Size: 22 } ] const filesSize = 33 - const md5 = 'ffffffff00000000ffffffff00000000' let Metrics, - Logger, S3, Fs, Meter, MeteredStream, ReadStream, - Stream, - S3Persistor, + S3PersistorManager, S3Client, S3ReadStream, S3NotFoundError, S3AccessDeniedError, FileNotFoundError, EmptyPromise, - settings, - Hash, - crypto + settings beforeEach(function() { settings = { @@ -61,10 +56,6 @@ describe('S3PersistorTests', function() { } } - Stream = { - pipeline: sinon.stub().yields() - } - EmptyPromise = { promise: sinon.stub().resolves() } @@ -74,11 +65,7 @@ describe('S3PersistorTests', function() { } ReadStream = { - pipe: sinon.stub().returns('readStream'), - on: sinon - .stub() - .withArgs('end') - .yields() + pipe: sinon.stub().returns('readStream') } FileNotFoundError = new Error('File not found') @@ -89,7 +76,6 @@ describe('S3PersistorTests', function() { } MeteredStream = { - type: 'metered', on: sinon.stub(), bytes: objectSize } @@ -104,7 +90,7 @@ describe('S3PersistorTests', function() { S3ReadStream = { on: sinon.stub(), - pipe: sinon.stub(), + pipe: sinon.stub().returns('s3Stream'), removeListener: sinon.stub() } S3ReadStream.on.withArgs('readable').yields() @@ -114,8 +100,7 @@ describe('S3PersistorTests', function() { }), headObject: sinon.stub().returns({ promise: sinon.stub().resolves({ - ContentLength: objectSize, - ETag: md5 + ContentLength: objectSize }) }), listObjects: sinon.stub().returns({ @@ -123,39 +108,21 @@ describe('S3PersistorTests', function() { Contents: files }) }), - upload: sinon - .stub() - .returns({ promise: sinon.stub().resolves({ ETag: `"${md5}"` }) }), + upload: sinon.stub().returns(EmptyPromise), copyObject: sinon.stub().returns(EmptyPromise), deleteObject: sinon.stub().returns(EmptyPromise), deleteObjects: sinon.stub().returns(EmptyPromise) } S3 = sinon.stub().returns(S3Client) - Hash = { - end: sinon.stub(), - read: sinon.stub().returns(md5), - setEncoding: sinon.stub() - } - crypto = { - createHash: sinon.stub().returns(Hash) - } - - Logger = { - warn: sinon.stub() - } - - S3Persistor = SandboxedModule.require(modulePath, { + S3PersistorManager = SandboxedModule.require(modulePath, { requires: { 'aws-sdk/clients/s3': S3, 'settings-sharelatex': settings, - 'logger-sharelatex': Logger, './Errors': Errors, fs: Fs, 'stream-meter': Meter, - stream: Stream, - 'metrics-sharelatex': Metrics, - crypto + 'metrics-sharelatex': Metrics }, globals: { console } }) @@ -166,11 +133,11 @@ describe('S3PersistorTests', function() { let stream beforeEach(async function() { - stream = await S3Persistor.promises.getFileStream(bucket, key) + stream = await S3PersistorManager.promises.getFileStream(bucket, key) }) - it('returns a metered stream', function() { - expect(stream).to.equal(MeteredStream) + it('returns a stream', function() { + expect(stream).to.equal('s3Stream') }) it('sets the AWS client up with credentials from settings', function() { @@ -185,10 +152,7 @@ describe('S3PersistorTests', function() { }) it('pipes the stream through the meter', function() { - expect(Stream.pipeline).to.have.been.calledWith( - S3ReadStream, - MeteredStream - ) + expect(S3ReadStream.pipe).to.have.been.calledWith(MeteredStream) }) it('records an ingress metric', function() { @@ -200,14 +164,14 @@ describe('S3PersistorTests', function() { let stream beforeEach(async function() { - stream = await S3Persistor.promises.getFileStream(bucket, key, { + stream = await S3PersistorManager.promises.getFileStream(bucket, key, { start: 5, end: 10 }) }) - it('returns a metered stream', function() { - expect(stream).to.equal(MeteredStream) + it('returns a stream', function() { + expect(stream).to.equal('s3Stream') }) it('passes the byte range on to S3', function() { @@ -237,11 +201,11 @@ describe('S3PersistorTests', function() { auth_secret: alternativeSecret } - stream = await S3Persistor.promises.getFileStream(bucket, key) + stream = await S3PersistorManager.promises.getFileStream(bucket, key) }) - it('returns a metered stream', function() { - expect(stream).to.equal(MeteredStream) + it('returns a stream', function() { + expect(stream).to.equal('s3Stream') }) it('sets the AWS client up with the alternative credentials', function() { @@ -256,13 +220,16 @@ describe('S3PersistorTests', function() { }) it('caches the credentials', async function() { - stream = await S3Persistor.promises.getFileStream(bucket, key) + stream = await S3PersistorManager.promises.getFileStream(bucket, key) expect(S3).to.have.been.calledOnceWith(alternativeS3Credentials) }) it('uses the default credentials for an unknown bucket', async function() { - stream = await S3Persistor.promises.getFileStream('anotherBucket', key) + stream = await S3PersistorManager.promises.getFileStream( + 'anotherBucket', + key + ) expect(S3).to.have.been.calledTwice expect(S3.firstCall).to.have.been.calledWith(alternativeS3Credentials) @@ -270,8 +237,14 @@ describe('S3PersistorTests', function() { }) it('caches the default credentials', async function() { - stream = await S3Persistor.promises.getFileStream('anotherBucket', key) - stream = await S3Persistor.promises.getFileStream('anotherBucket', key) + stream = await S3PersistorManager.promises.getFileStream( + 'anotherBucket', + key + ) + stream = await S3PersistorManager.promises.getFileStream( + 'anotherBucket', + key + ) expect(S3).to.have.been.calledTwice expect(S3.firstCall).to.have.been.calledWith(alternativeS3Credentials) @@ -283,7 +256,7 @@ describe('S3PersistorTests', function() { delete settings.filestore.s3.secret await expect( - S3Persistor.promises.getFileStream('anotherBucket', key) + S3PersistorManager.promises.getFileStream('anotherBucket', key) ).to.eventually.be.rejected.and.be.an.instanceOf(Errors.SettingsError) }) }) @@ -295,7 +268,7 @@ describe('S3PersistorTests', function() { S3ReadStream.on = sinon.stub() S3ReadStream.on.withArgs('error').yields(S3NotFoundError) try { - stream = await S3Persistor.promises.getFileStream(bucket, key) + stream = await S3PersistorManager.promises.getFileStream(bucket, key) } catch (err) { error = err } @@ -309,12 +282,12 @@ describe('S3PersistorTests', function() { expect(error).to.be.an.instanceOf(Errors.NotFoundError) }) - it('wraps the error', function() { - expect(error.cause).to.exist + it('wraps the error from S3', function() { + expect(error.cause).to.equal(S3NotFoundError) }) it('stores the bucket and key in the error', function() { - expect(error.info).to.include({ bucketName: bucket, key: key }) + expect(error.info).to.deep.equal({ Bucket: bucket, Key: key }) }) }) @@ -325,7 +298,7 @@ describe('S3PersistorTests', function() { S3ReadStream.on = sinon.stub() S3ReadStream.on.withArgs('error').yields(S3AccessDeniedError) try { - stream = await S3Persistor.promises.getFileStream(bucket, key) + stream = await S3PersistorManager.promises.getFileStream(bucket, key) } catch (err) { error = err } @@ -339,12 +312,12 @@ describe('S3PersistorTests', function() { expect(error).to.be.an.instanceOf(Errors.NotFoundError) }) - it('wraps the error', function() { - expect(error.cause).to.exist + it('wraps the error from S3', function() { + expect(error.cause).to.equal(S3AccessDeniedError) }) it('stores the bucket and key in the error', function() { - expect(error.info).to.include({ bucketName: bucket, key: key }) + expect(error.info).to.deep.equal({ Bucket: bucket, Key: key }) }) }) @@ -355,7 +328,7 @@ describe('S3PersistorTests', function() { S3ReadStream.on = sinon.stub() S3ReadStream.on.withArgs('error').yields(genericError) try { - stream = await S3Persistor.promises.getFileStream(bucket, key) + stream = await S3PersistorManager.promises.getFileStream(bucket, key) } catch (err) { error = err } @@ -369,12 +342,12 @@ describe('S3PersistorTests', function() { expect(error).to.be.an.instanceOf(Errors.ReadError) }) - it('wraps the error', function() { - expect(error.cause).to.exist + it('wraps the error from S3', function() { + expect(error.cause).to.equal(genericError) }) it('stores the bucket and key in the error', function() { - expect(error.info).to.include({ bucketName: bucket, key: key }) + expect(error.info).to.deep.equal({ Bucket: bucket, Key: key }) }) }) }) @@ -384,7 +357,7 @@ describe('S3PersistorTests', function() { let size beforeEach(async function() { - size = await S3Persistor.promises.getFileSize(bucket, key) + size = await S3PersistorManager.promises.getFileSize(bucket, key) }) it('should return the object size', function() { @@ -407,7 +380,7 @@ describe('S3PersistorTests', function() { promise: sinon.stub().rejects(S3NotFoundError) }) try { - await S3Persistor.promises.getFileSize(bucket, key) + await S3PersistorManager.promises.getFileSize(bucket, key) } catch (err) { error = err } @@ -430,7 +403,7 @@ describe('S3PersistorTests', function() { promise: sinon.stub().rejects(genericError) }) try { - await S3Persistor.promises.getFileSize(bucket, key) + await S3PersistorManager.promises.getFileSize(bucket, key) } catch (err) { error = err } @@ -449,62 +422,24 @@ describe('S3PersistorTests', function() { describe('sendStream', function() { describe('with valid parameters', function() { beforeEach(async function() { - return S3Persistor.promises.sendStream(bucket, key, ReadStream) + return S3PersistorManager.promises.sendStream(bucket, key, ReadStream) }) it('should upload the stream', function() { expect(S3Client.upload).to.have.been.calledWith({ Bucket: bucket, Key: key, - Body: MeteredStream + Body: 'readStream' }) }) it('should meter the stream', function() { - expect(Stream.pipeline).to.have.been.calledWith( - ReadStream, - MeteredStream - ) + expect(ReadStream.pipe).to.have.been.calledWith(MeteredStream) }) it('should record an egress metric', function() { expect(Metrics.count).to.have.been.calledWith('s3.egress', objectSize) }) - - it('calculates the md5 hash of the file', function() { - expect(Stream.pipeline).to.have.been.calledWith(ReadStream, Hash) - }) - }) - - describe('when a hash is supploed', function() { - beforeEach(async function() { - return S3Persistor.promises.sendStream( - bucket, - key, - ReadStream, - 'aaaaaaaabbbbbbbbaaaaaaaabbbbbbbb' - ) - }) - - it('should not calculate the md5 hash of the file', function() { - expect(Stream.pipeline).not.to.have.been.calledWith( - sinon.match.any, - Hash - ) - }) - - it('sends the hash in base64', function() { - expect(S3Client.upload).to.have.been.calledWith({ - Bucket: bucket, - Key: key, - Body: MeteredStream, - ContentMD5: 'qqqqqru7u7uqqqqqu7u7uw==' - }) - }) - - it('does not fetch the md5 hash of the uploaded file', function() { - expect(S3Client.headObject).not.to.have.been.called - }) }) describe('when the upload fails', function() { @@ -514,7 +449,7 @@ describe('S3PersistorTests', function() { promise: sinon.stub().rejects(genericError) }) try { - await S3Persistor.promises.sendStream(bucket, key, ReadStream) + await S3PersistorManager.promises.sendStream(bucket, key, ReadStream) } catch (err) { error = err } @@ -529,7 +464,7 @@ describe('S3PersistorTests', function() { describe('sendFile', function() { describe('with valid parameters', function() { beforeEach(async function() { - return S3Persistor.promises.sendFile(bucket, key, filename) + return S3PersistorManager.promises.sendFile(bucket, key, filename) }) it('should create a read stream for the file', function() { @@ -540,7 +475,7 @@ describe('S3PersistorTests', function() { expect(S3Client.upload).to.have.been.calledWith({ Bucket: bucket, Key: key, - Body: MeteredStream + Body: 'readStream' }) }) }) @@ -551,7 +486,7 @@ describe('S3PersistorTests', function() { beforeEach(async function() { Fs.createReadStream = sinon.stub().throws(FileNotFoundError) try { - await S3Persistor.promises.sendFile(bucket, key, filename) + await S3PersistorManager.promises.sendFile(bucket, key, filename) } catch (err) { error = err } @@ -572,7 +507,7 @@ describe('S3PersistorTests', function() { beforeEach(async function() { Fs.createReadStream = sinon.stub().throws(genericError) try { - await S3Persistor.promises.sendFile(bucket, key, filename) + await S3PersistorManager.promises.sendFile(bucket, key, filename) } catch (err) { error = err } @@ -591,7 +526,7 @@ describe('S3PersistorTests', function() { describe('copyFile', function() { describe('with valid parameters', function() { beforeEach(async function() { - return S3Persistor.promises.copyFile(bucket, key, destKey) + return S3PersistorManager.promises.copyFile(bucket, key, destKey) }) it('should copy the object', function() { @@ -611,7 +546,7 @@ describe('S3PersistorTests', function() { promise: sinon.stub().rejects(S3NotFoundError) }) try { - await S3Persistor.promises.copyFile(bucket, key, destKey) + await S3PersistorManager.promises.copyFile(bucket, key, destKey) } catch (err) { error = err } @@ -626,7 +561,7 @@ describe('S3PersistorTests', function() { describe('deleteFile', function() { describe('with valid parameters', function() { beforeEach(async function() { - return S3Persistor.promises.deleteFile(bucket, key) + return S3PersistorManager.promises.deleteFile(bucket, key) }) it('should delete the object', function() { @@ -645,7 +580,7 @@ describe('S3PersistorTests', function() { promise: sinon.stub().rejects(S3NotFoundError) }) try { - await S3Persistor.promises.deleteFile(bucket, key) + await S3PersistorManager.promises.deleteFile(bucket, key) } catch (err) { error = err } @@ -660,7 +595,7 @@ describe('S3PersistorTests', function() { describe('deleteDirectory', function() { describe('with valid parameters', function() { beforeEach(async function() { - return S3Persistor.promises.deleteDirectory(bucket, key) + return S3PersistorManager.promises.deleteDirectory(bucket, key) }) it('should list the objects in the directory', function() { @@ -686,7 +621,7 @@ describe('S3PersistorTests', function() { S3Client.listObjects = sinon .stub() .returns({ promise: sinon.stub().resolves({ Contents: [] }) }) - return S3Persistor.promises.deleteDirectory(bucket, key) + return S3PersistorManager.promises.deleteDirectory(bucket, key) }) it('should list the objects in the directory', function() { @@ -709,7 +644,7 @@ describe('S3PersistorTests', function() { .stub() .returns({ promise: sinon.stub().rejects(genericError) }) try { - await S3Persistor.promises.deleteDirectory(bucket, key) + await S3PersistorManager.promises.deleteDirectory(bucket, key) } catch (err) { error = err } @@ -736,7 +671,7 @@ describe('S3PersistorTests', function() { .stub() .returns({ promise: sinon.stub().rejects(genericError) }) try { - await S3Persistor.promises.deleteDirectory(bucket, key) + await S3PersistorManager.promises.deleteDirectory(bucket, key) } catch (err) { error = err } @@ -757,7 +692,7 @@ describe('S3PersistorTests', function() { let size beforeEach(async function() { - size = await S3Persistor.promises.directorySize(bucket, key) + size = await S3PersistorManager.promises.directorySize(bucket, key) }) it('should list the objects in the directory', function() { @@ -779,7 +714,7 @@ describe('S3PersistorTests', function() { S3Client.listObjects = sinon .stub() .returns({ promise: sinon.stub().resolves({ Contents: [] }) }) - size = await S3Persistor.promises.directorySize(bucket, key) + size = await S3PersistorManager.promises.directorySize(bucket, key) }) it('should list the objects in the directory', function() { @@ -802,7 +737,7 @@ describe('S3PersistorTests', function() { .stub() .returns({ promise: sinon.stub().rejects(genericError) }) try { - await S3Persistor.promises.directorySize(bucket, key) + await S3PersistorManager.promises.directorySize(bucket, key) } catch (err) { error = err } @@ -823,7 +758,10 @@ describe('S3PersistorTests', function() { let exists beforeEach(async function() { - exists = await S3Persistor.promises.checkIfFileExists(bucket, key) + exists = await S3PersistorManager.promises.checkIfFileExists( + bucket, + key + ) }) it('should get the object header', function() { @@ -845,7 +783,10 @@ describe('S3PersistorTests', function() { S3Client.headObject = sinon .stub() .returns({ promise: sinon.stub().rejects(S3NotFoundError) }) - exists = await S3Persistor.promises.checkIfFileExists(bucket, key) + exists = await S3PersistorManager.promises.checkIfFileExists( + bucket, + key + ) }) it('should get the object header', function() { @@ -868,7 +809,7 @@ describe('S3PersistorTests', function() { .stub() .returns({ promise: sinon.stub().rejects(genericError) }) try { - await S3Persistor.promises.checkIfFileExists(bucket, key) + await S3PersistorManager.promises.checkIfFileExists(bucket, key) } catch (err) { error = err } From 4e507197fc6c0744f1077f1bfd28d3507a54e209 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 17 Feb 2020 14:04:42 +0000 Subject: [PATCH 421/555] Revert "Merge pull request #91 from overleaf/revert-78-spd-migration-persistor" This reverts commit 0cce792604c258538cf012486e7329fa1be7da0f, reversing changes made to 08b606e22f855121b2d8a2832a5d7e2f45836c3f. --- services/filestore/.eslintrc | 3 +- .../{FSPersistorManager.js => FSPersistor.js} | 68 ++- .../filestore/app/js/MigrationPersistor.js | 228 ++++++++ services/filestore/app/js/PersistorHelper.js | 105 ++++ services/filestore/app/js/PersistorManager.js | 35 +- .../{S3PersistorManager.js => S3Persistor.js} | 179 ++++-- .../filestore/config/settings.defaults.coffee | 61 +- services/filestore/package-lock.json | 282 +--------- services/filestore/package.json | 8 +- .../test/acceptance/js/FilestoreApp.js | 1 + .../test/acceptance/js/FilestoreTests.js | 490 ++++++++++++++++- ...torManagerTests.js => FSPersistorTests.js} | 147 +++-- .../test/unit/js/MigrationPersistorTests.js | 519 ++++++++++++++++++ .../test/unit/js/PersistorManagerTests.js | 24 +- ...torManagerTests.js => S3PersistorTests.js} | 213 ++++--- 15 files changed, 1782 insertions(+), 581 deletions(-) rename services/filestore/app/js/{FSPersistorManager.js => FSPersistor.js} (75%) create mode 100644 services/filestore/app/js/MigrationPersistor.js create mode 100644 services/filestore/app/js/PersistorHelper.js rename services/filestore/app/js/{S3PersistorManager.js => S3Persistor.js} (63%) rename services/filestore/test/unit/js/{FSPersistorManagerTests.js => FSPersistorTests.js} (69%) create mode 100644 services/filestore/test/unit/js/MigrationPersistorTests.js rename services/filestore/test/unit/js/{S3PersistorManagerTests.js => S3PersistorTests.js} (78%) diff --git a/services/filestore/.eslintrc b/services/filestore/.eslintrc index 42a4b5cace..73103de7f6 100644 --- a/services/filestore/.eslintrc +++ b/services/filestore/.eslintrc @@ -23,7 +23,8 @@ "rules": { // Swap the no-unused-expressions rule with a more chai-friendly one "no-unused-expressions": 0, - "chai-friendly/no-unused-expressions": "error" + "chai-friendly/no-unused-expressions": "error", + "no-console": "error" }, "overrides": [ { diff --git a/services/filestore/app/js/FSPersistorManager.js b/services/filestore/app/js/FSPersistor.js similarity index 75% rename from services/filestore/app/js/FSPersistorManager.js rename to services/filestore/app/js/FSPersistor.js index 862acb9bcb..973c670efd 100644 --- a/services/filestore/app/js/FSPersistorManager.js +++ b/services/filestore/app/js/FSPersistor.js @@ -7,6 +7,7 @@ const { promisify, callbackify } = require('util') const LocalFileWriter = require('./LocalFileWriter').promises const { NotFoundError, ReadError, WriteError } = require('./Errors') +const PersistorHelper = require('./PersistorHelper') const pipeline = promisify(Stream.pipeline) const fsUnlink = promisify(fs.unlink) @@ -27,7 +28,7 @@ async function sendFile(location, target, source) { const targetStream = fs.createWriteStream(`${location}/${filteredTarget}`) await pipeline(sourceStream, targetStream) } catch (err) { - throw _wrapError( + throw PersistorHelper.wrapError( err, 'failed to copy the specified file', { location, target, source }, @@ -36,11 +37,22 @@ async function sendFile(location, target, source) { } } -async function sendStream(location, target, sourceStream) { +async function sendStream(location, target, sourceStream, sourceMd5) { const fsPath = await LocalFileWriter.writeStream(sourceStream) + if (!sourceMd5) { + sourceMd5 = await _getFileMd5HashForPath(fsPath) + } try { await sendFile(location, target, fsPath) + const destMd5 = await getFileMd5Hash(location, target) + if (sourceMd5 !== destMd5) { + await LocalFileWriter.deleteFile(`${location}/${filterName(target)}`) + throw new WriteError({ + message: 'md5 hash mismatch', + info: { sourceMd5, destMd5, location, target } + }) + } } finally { await LocalFileWriter.deleteFile(fsPath) } @@ -53,7 +65,7 @@ async function getFileStream(location, name, opts) { try { opts.fd = await fsOpen(`${location}/${filteredName}`, 'r') } catch (err) { - throw _wrapError( + throw PersistorHelper.wrapError( err, 'failed to open file for streaming', { location, filteredName, opts }, @@ -71,7 +83,7 @@ async function getFileSize(location, filename) { const stat = await fsStat(fullPath) return stat.size } catch (err) { - throw _wrapError( + throw PersistorHelper.wrapError( err, 'failed to stat file', { location, filename }, @@ -80,6 +92,18 @@ async function getFileSize(location, filename) { } } +async function getFileMd5Hash(location, filename) { + const fullPath = path.join(location, filterName(filename)) + try { + return await _getFileMd5HashForPath(fullPath) + } catch (err) { + throw new ReadError({ + message: 'unable to get md5 hash from file', + info: { location, filename } + }).withCause(err) + } +} + async function copyFile(location, fromName, toName) { const filteredFromName = filterName(fromName) const filteredToName = filterName(toName) @@ -89,7 +113,7 @@ async function copyFile(location, fromName, toName) { const targetStream = fs.createWriteStream(`${location}/${filteredToName}`) await pipeline(sourceStream, targetStream) } catch (err) { - throw _wrapError( + throw PersistorHelper.wrapError( err, 'failed to copy file', { location, filteredFromName, filteredToName }, @@ -103,12 +127,17 @@ async function deleteFile(location, name) { try { await fsUnlink(`${location}/${filteredName}`) } catch (err) { - throw _wrapError( + const wrappedError = PersistorHelper.wrapError( err, 'failed to delete file', { location, filteredName }, WriteError ) + if (!(wrappedError instanceof NotFoundError)) { + // S3 doesn't give us a 404 when a file wasn't there to be deleted, so we + // should be consistent here as well + throw wrappedError + } } } @@ -119,7 +148,7 @@ async function deleteDirectory(location, name) { try { await rmrf(`${location}/${filteredName}`) } catch (err) { - throw _wrapError( + throw PersistorHelper.wrapError( err, 'failed to delete directory', { location, filteredName }, @@ -137,7 +166,7 @@ async function checkIfFileExists(location, name) { if (err.code === 'ENOENT') { return false } - throw _wrapError( + throw PersistorHelper.wrapError( err, 'failed to stat file', { location, filteredName }, @@ -167,7 +196,7 @@ async function directorySize(location, name) { } } } catch (err) { - throw _wrapError( + throw PersistorHelper.wrapError( err, 'failed to get directory size', { location, name }, @@ -178,25 +207,12 @@ async function directorySize(location, name) { return size } -function _wrapError(error, message, params, ErrorType) { - if (error.code === 'ENOENT') { - return new NotFoundError({ - message: 'no such file or directory', - info: params - }).withCause(error) - } else { - return new ErrorType({ - message: message, - info: params - }).withCause(error) - } -} - module.exports = { sendFile: callbackify(sendFile), sendStream: callbackify(sendStream), getFileStream: callbackify(getFileStream), getFileSize: callbackify(getFileSize), + getFileMd5Hash: callbackify(getFileMd5Hash), copyFile: callbackify(copyFile), deleteFile: callbackify(deleteFile), deleteDirectory: callbackify(deleteDirectory), @@ -207,6 +223,7 @@ module.exports = { sendStream, getFileStream, getFileSize, + getFileMd5Hash, copyFile, deleteFile, deleteDirectory, @@ -214,3 +231,8 @@ module.exports = { directorySize } } + +async function _getFileMd5HashForPath(fullPath) { + const stream = fs.createReadStream(fullPath) + return PersistorHelper.calculateStreamMd5(stream) +} diff --git a/services/filestore/app/js/MigrationPersistor.js b/services/filestore/app/js/MigrationPersistor.js new file mode 100644 index 0000000000..3ddc762922 --- /dev/null +++ b/services/filestore/app/js/MigrationPersistor.js @@ -0,0 +1,228 @@ +const metrics = require('metrics-sharelatex') +const Settings = require('settings-sharelatex') +const logger = require('logger-sharelatex') +const Stream = require('stream') +const { callbackify, promisify } = require('util') +const { NotFoundError, WriteError } = require('./Errors') + +const pipeline = promisify(Stream.pipeline) + +// Persistor that wraps two other persistors. Talks to the 'primary' by default, +// but will fall back to an older persistor in the case of a not-found error. +// If `Settings.filestore.fallback.copyOnMiss` is set, this will copy files from the fallback +// to the primary, in the event that they are missing. +// +// It is unlikely that the bucket/location name will be the same on the fallback +// as the primary. The bucket names should be overridden in `Settings.filestore.fallback.buckets` +// e.g. +// Settings.filestore.fallback.buckets = { +// myBucketOnS3: 'myBucketOnGCS' +// } + +module.exports = function(primary, fallback) { + function _wrapMethodOnBothPersistors(method) { + return async function(bucket, key, ...moreArgs) { + const fallbackBucket = _getFallbackBucket(bucket) + + await Promise.all([ + primary.promises[method](bucket, key, ...moreArgs), + fallback.promises[method](fallbackBucket, key, ...moreArgs) + ]) + } + } + + async function getFileStreamWithFallback(bucket, key, opts) { + const shouldCopy = + Settings.filestore.fallback.copyOnMiss && !opts.start && !opts.end + + try { + return await primary.promises.getFileStream(bucket, key, opts) + } catch (err) { + if (err instanceof NotFoundError) { + const fallbackBucket = _getFallbackBucket(bucket) + const fallbackStream = await fallback.promises.getFileStream( + fallbackBucket, + key, + opts + ) + // tee the stream to the client, and as a copy to the primary (if necessary) + // start listening on both straight away so that we don't consume bytes + // in one place before the other + const returnStream = new Stream.PassThrough() + pipeline(fallbackStream, returnStream) + + if (shouldCopy) { + const copyStream = new Stream.PassThrough() + pipeline(fallbackStream, copyStream) + + _copyStreamFromFallbackAndVerify( + copyStream, + fallbackBucket, + bucket, + key, + key + ).catch(() => { + // swallow errors, as this runs in the background and will log a warning + }) + } + return returnStream + } + throw err + } + } + + async function copyFileWithFallback(bucket, sourceKey, destKey) { + try { + return await primary.promises.copyFile(bucket, sourceKey, destKey) + } catch (err) { + if (err instanceof NotFoundError) { + const fallbackBucket = _getFallbackBucket(bucket) + const fallbackStream = await fallback.promises.getFileStream( + fallbackBucket, + sourceKey, + {} + ) + + const copyStream = new Stream.PassThrough() + pipeline(fallbackStream, copyStream) + + if (Settings.filestore.fallback.copyOnMiss) { + const missStream = new Stream.PassThrough() + pipeline(fallbackStream, missStream) + + // copy from sourceKey -> sourceKey + _copyStreamFromFallbackAndVerify( + missStream, + fallbackBucket, + bucket, + sourceKey, + sourceKey + ).then(() => { + // swallow errors, as this runs in the background and will log a warning + }) + } + // copy from sourceKey -> destKey + return _copyStreamFromFallbackAndVerify( + copyStream, + fallbackBucket, + bucket, + sourceKey, + destKey + ) + } + throw err + } + } + + function _getFallbackBucket(bucket) { + return Settings.filestore.fallback.buckets[bucket] + } + + function _wrapFallbackMethod(method) { + return async function(bucket, key, ...moreArgs) { + try { + return await primary.promises[method](bucket, key, ...moreArgs) + } catch (err) { + if (err instanceof NotFoundError) { + const fallbackBucket = _getFallbackBucket(bucket) + if (Settings.filestore.fallback.copyOnMiss) { + const fallbackStream = await fallback.promises.getFileStream( + fallbackBucket, + key, + {} + ) + // run in background + _copyStreamFromFallbackAndVerify( + fallbackStream, + fallbackBucket, + bucket, + key, + key + ).catch(err => { + logger.warn({ err }, 'failed to copy file from fallback') + }) + } + return fallback.promises[method](fallbackBucket, key, ...moreArgs) + } + throw err + } + } + } + + async function _copyStreamFromFallbackAndVerify( + stream, + sourceBucket, + destBucket, + sourceKey, + destKey + ) { + try { + let sourceMd5 + try { + sourceMd5 = await fallback.promises.getFileMd5Hash( + sourceBucket, + sourceKey + ) + } catch (err) { + logger.warn(err, 'error getting md5 hash from fallback persistor') + } + + await primary.promises.sendStream(destBucket, destKey, stream, sourceMd5) + } catch (err) { + const error = new WriteError({ + message: 'unable to copy file to destination persistor', + info: { + sourceBucket, + destBucket, + sourceKey, + destKey + } + }).withCause(err) + metrics.inc('fallback.copy.failure') + + try { + await primary.promises.deleteFile(destBucket, destKey) + } catch (err) { + error.info.cleanupError = new WriteError({ + message: 'unable to clean up destination copy artifact', + info: { + destBucket, + destKey + } + }).withCause(err) + } + + logger.warn({ error }, 'failed to copy file from fallback') + throw error + } + } + + return { + primaryPersistor: primary, + fallbackPersistor: fallback, + sendFile: primary.sendFile, + sendStream: primary.sendStream, + getFileStream: callbackify(getFileStreamWithFallback), + getFileMd5Hash: callbackify(_wrapFallbackMethod('getFileMd5Hash')), + deleteDirectory: callbackify( + _wrapMethodOnBothPersistors('deleteDirectory') + ), + getFileSize: callbackify(_wrapFallbackMethod('getFileSize')), + deleteFile: callbackify(_wrapMethodOnBothPersistors('deleteFile')), + copyFile: callbackify(copyFileWithFallback), + checkIfFileExists: callbackify(_wrapFallbackMethod('checkIfFileExists')), + directorySize: callbackify(_wrapFallbackMethod('directorySize')), + promises: { + sendFile: primary.promises.sendFile, + sendStream: primary.promises.sendStream, + getFileStream: getFileStreamWithFallback, + getFileMd5Hash: _wrapFallbackMethod('getFileMd5Hash'), + deleteDirectory: _wrapMethodOnBothPersistors('deleteDirectory'), + getFileSize: _wrapFallbackMethod('getFileSize'), + deleteFile: _wrapMethodOnBothPersistors('deleteFile'), + copyFile: copyFileWithFallback, + checkIfFileExists: _wrapFallbackMethod('checkIfFileExists'), + directorySize: _wrapFallbackMethod('directorySize') + } + } +} diff --git a/services/filestore/app/js/PersistorHelper.js b/services/filestore/app/js/PersistorHelper.js new file mode 100644 index 0000000000..ea8132a9c9 --- /dev/null +++ b/services/filestore/app/js/PersistorHelper.js @@ -0,0 +1,105 @@ +const crypto = require('crypto') +const meter = require('stream-meter') +const Stream = require('stream') +const logger = require('logger-sharelatex') +const { WriteError, ReadError, NotFoundError } = require('./Errors') +const { promisify } = require('util') + +const pipeline = promisify(Stream.pipeline) + +module.exports = { + calculateStreamMd5, + verifyMd5, + getMeteredStream, + waitForStreamReady, + wrapError +} + +// returns a promise which resolves with the md5 hash of the stream +function calculateStreamMd5(stream) { + const hash = crypto.createHash('md5') + hash.setEncoding('hex') + + return pipeline(stream, hash).then(() => hash.read()) +} + +// verifies the md5 hash of a file against the supplied md5 or the one stored in +// storage if not supplied - deletes the new file if the md5 does not match and +// throws an error +async function verifyMd5(persistor, bucket, key, sourceMd5, destMd5 = null) { + if (!destMd5) { + destMd5 = await persistor.promises.getFileMd5Hash(bucket, key) + } + + if (sourceMd5 !== destMd5) { + try { + await persistor.promises.deleteFile(bucket, key) + } catch (err) { + logger.warn(err, 'error deleting file for invalid upload') + } + + throw new WriteError({ + message: 'source and destination hashes do not match', + info: { + sourceMd5, + destMd5, + bucket, + key + } + }) + } +} + +// returns the next stream in the pipeline, and calls the callback with the byte count +// when the stream finishes or receives an error +function getMeteredStream(stream, callback) { + const meteredStream = meter() + + pipeline(stream, meteredStream) + .then(() => { + callback(null, meteredStream.bytes) + }) + .catch(err => { + // on error, just send how many bytes we received before the stream stopped + callback(err, meteredStream.bytes) + }) + + return meteredStream +} + +// resolves when a stream is 'readable', or rejects if the stream throws an error +// before that happens - this lets us handle protocol-level errors before trying +// to read them +function waitForStreamReady(stream) { + return new Promise((resolve, reject) => { + const onError = function(err) { + reject(wrapError(err, 'error before stream became ready', {}, ReadError)) + } + const onStreamReady = function() { + stream.removeListener('readable', onStreamReady) + stream.removeListener('error', onError) + resolve(stream) + } + stream.on('readable', onStreamReady) + stream.on('error', onError) + }) +} + +function wrapError(error, message, params, ErrorType) { + if ( + error instanceof NotFoundError || + ['NoSuchKey', 'NotFound', 404, 'AccessDenied', 'ENOENT'].includes( + error.code + ) + ) { + return new NotFoundError({ + message: 'no such file', + info: params + }).withCause(error) + } else { + return new ErrorType({ + message: message, + info: params + }).withCause(error) + } +} diff --git a/services/filestore/app/js/PersistorManager.js b/services/filestore/app/js/PersistorManager.js index cca0cf0f36..32f6cd41f8 100644 --- a/services/filestore/app/js/PersistorManager.js +++ b/services/filestore/app/js/PersistorManager.js @@ -3,7 +3,8 @@ const logger = require('logger-sharelatex') logger.log( { - backend: settings.filestore.backend + backend: settings.filestore.backend, + fallback: settings.filestore.fallback && settings.filestore.fallback.backend }, 'Loading backend' ) @@ -11,14 +12,26 @@ if (!settings.filestore.backend) { throw new Error('no backend specified - config incomplete') } -switch (settings.filestore.backend) { - case 'aws-sdk': - case 's3': - module.exports = require('./S3PersistorManager') - break - case 'fs': - module.exports = require('./FSPersistorManager') - break - default: - throw new Error(`unknown filestore backend: ${settings.filestore.backend}`) +function getPersistor(backend) { + switch (backend) { + case 'aws-sdk': + case 's3': + return require('./S3Persistor') + case 'fs': + return require('./FSPersistor') + default: + throw new Error(`unknown filestore backend: ${backend}`) + } } + +let persistor = getPersistor(settings.filestore.backend) + +if (settings.filestore.fallback && settings.filestore.fallback.backend) { + const migrationPersistor = require('./MigrationPersistor') + persistor = migrationPersistor( + persistor, + getPersistor(settings.filestore.fallback.backend) + ) +} + +module.exports = persistor diff --git a/services/filestore/app/js/S3PersistorManager.js b/services/filestore/app/js/S3Persistor.js similarity index 63% rename from services/filestore/app/js/S3PersistorManager.js rename to services/filestore/app/js/S3Persistor.js index 52cadfbfbd..891d7be68e 100644 --- a/services/filestore/app/js/S3PersistorManager.js +++ b/services/filestore/app/js/S3Persistor.js @@ -6,7 +6,8 @@ https.globalAgent.maxSockets = 300 const settings = require('settings-sharelatex') const metrics = require('metrics-sharelatex') -const meter = require('stream-meter') +const PersistorHelper = require('./PersistorHelper') + const fs = require('fs') const S3 = require('aws-sdk/clients/s3') const { URL } = require('url') @@ -18,10 +19,11 @@ const { SettingsError } = require('./Errors') -module.exports = { +const S3Persistor = { sendFile: callbackify(sendFile), sendStream: callbackify(sendStream), getFileStream: callbackify(getFileStream), + getFileMd5Hash: callbackify(getFileMd5Hash), deleteDirectory: callbackify(deleteDirectory), getFileSize: callbackify(getFileSize), deleteFile: callbackify(deleteFile), @@ -32,6 +34,7 @@ module.exports = { sendFile, sendStream, getFileStream, + getFileMd5Hash, deleteDirectory, getFileSize, deleteFile, @@ -41,12 +44,18 @@ module.exports = { } } +module.exports = S3Persistor + +function hexToBase64(hex) { + return Buffer.from(hex, 'hex').toString('base64') +} + async function sendFile(bucketName, key, fsPath) { let readStream try { readStream = fs.createReadStream(fsPath) } catch (err) { - throw _wrapError( + throw PersistorHelper.wrapError( err, 'error reading file from disk', { bucketName, key, fsPath }, @@ -56,22 +65,56 @@ async function sendFile(bucketName, key, fsPath) { return sendStream(bucketName, key, readStream) } -async function sendStream(bucketName, key, readStream) { +async function sendStream(bucketName, key, readStream, sourceMd5) { try { - const meteredStream = meter() - meteredStream.on('finish', () => { - metrics.count('s3.egress', meteredStream.bytes) - }) + // if there is no supplied md5 hash, we calculate the hash as the data passes through + let hashPromise + let b64Hash - await _getClientForBucket(bucketName) - .upload({ - Bucket: bucketName, - Key: key, - Body: readStream.pipe(meteredStream) - }) + if (sourceMd5) { + b64Hash = hexToBase64(sourceMd5) + } else { + hashPromise = PersistorHelper.calculateStreamMd5(readStream) + } + + const meteredStream = PersistorHelper.getMeteredStream( + readStream, + (_, byteCount) => { + // ignore the error parameter and just log the byte count + metrics.count('s3.egress', byteCount) + } + ) + + // if we have an md5 hash, pass this to S3 to verify the upload + const uploadOptions = { + Bucket: bucketName, + Key: key, + Body: meteredStream + } + if (b64Hash) { + uploadOptions.ContentMD5 = b64Hash + } + + const response = await _getClientForBucket(bucketName) + .upload(uploadOptions) .promise() + const destMd5 = _md5FromResponse(response) + + // if we didn't have an md5 hash, we should compare our computed one with S3's + // as we couldn't tell S3 about it beforehand + if (hashPromise) { + sourceMd5 = await hashPromise + // throws on mismatch + await PersistorHelper.verifyMd5( + S3Persistor, + bucketName, + key, + sourceMd5, + destMd5 + ) + } } catch (err) { - throw _wrapError( + throw PersistorHelper.wrapError( err, 'upload to S3 failed', { bucketName, key }, @@ -91,25 +134,29 @@ async function getFileStream(bucketName, key, opts) { params.Range = `bytes=${opts.start}-${opts.end}` } - return new Promise((resolve, reject) => { - const stream = _getClientForBucket(bucketName) - .getObject(params) - .createReadStream() + const stream = _getClientForBucket(bucketName) + .getObject(params) + .createReadStream() - const meteredStream = meter() - meteredStream.on('finish', () => { - metrics.count('s3.ingress', meteredStream.bytes) - }) - - const onStreamReady = function() { - stream.removeListener('readable', onStreamReady) - resolve(stream.pipe(meteredStream)) + const meteredStream = PersistorHelper.getMeteredStream( + stream, + (_, byteCount) => { + // ignore the error parameter and just log the byte count + metrics.count('s3.ingress', byteCount) } - stream.on('readable', onStreamReady) - stream.on('error', err => { - reject(_wrapError(err, 'error reading from S3', params, ReadError)) - }) - }) + ) + + try { + await PersistorHelper.waitForStreamReady(stream) + return meteredStream + } catch (err) { + throw PersistorHelper.wrapError( + err, + 'error reading file from S3', + { bucketName, key, opts }, + ReadError + ) + } } async function deleteDirectory(bucketName, key) { @@ -120,7 +167,7 @@ async function deleteDirectory(bucketName, key) { .listObjects({ Bucket: bucketName, Prefix: key }) .promise() } catch (err) { - throw _wrapError( + throw PersistorHelper.wrapError( err, 'failed to list objects in S3', { bucketName, key }, @@ -141,7 +188,7 @@ async function deleteDirectory(bucketName, key) { }) .promise() } catch (err) { - throw _wrapError( + throw PersistorHelper.wrapError( err, 'failed to delete objects in S3', { bucketName, key }, @@ -158,7 +205,7 @@ async function getFileSize(bucketName, key) { .promise() return response.ContentLength } catch (err) { - throw _wrapError( + throw PersistorHelper.wrapError( err, 'error getting size of s3 object', { bucketName, key }, @@ -167,13 +214,31 @@ async function getFileSize(bucketName, key) { } } +async function getFileMd5Hash(bucketName, key) { + try { + const response = await _getClientForBucket(bucketName) + .headObject({ Bucket: bucketName, Key: key }) + .promise() + const md5 = _md5FromResponse(response) + return md5 + } catch (err) { + throw PersistorHelper.wrapError( + err, + 'error getting hash of s3 object', + { bucketName, key }, + ReadError + ) + } +} + async function deleteFile(bucketName, key) { try { await _getClientForBucket(bucketName) .deleteObject({ Bucket: bucketName, Key: key }) .promise() } catch (err) { - throw _wrapError( + // s3 does not give us a NotFoundError here + throw PersistorHelper.wrapError( err, 'failed to delete file in S3', { bucketName, key }, @@ -193,7 +258,12 @@ async function copyFile(bucketName, sourceKey, destKey) { .copyObject(params) .promise() } catch (err) { - throw _wrapError(err, 'failed to copy file in S3', params, WriteError) + throw PersistorHelper.wrapError( + err, + 'failed to copy file in S3', + params, + WriteError + ) } } @@ -205,7 +275,7 @@ async function checkIfFileExists(bucketName, key) { if (err instanceof NotFoundError) { return false } - throw _wrapError( + throw PersistorHelper.wrapError( err, 'error checking whether S3 object exists', { bucketName, key }, @@ -222,7 +292,7 @@ async function directorySize(bucketName, key) { return response.Contents.reduce((acc, item) => item.Size + acc, 0) } catch (err) { - throw _wrapError( + throw PersistorHelper.wrapError( err, 'error getting directory size in S3', { bucketName, key }, @@ -231,22 +301,6 @@ async function directorySize(bucketName, key) { } } -function _wrapError(error, message, params, ErrorType) { - if ( - ['NoSuchKey', 'NotFound', 'AccessDenied', 'ENOENT'].includes(error.code) - ) { - return new NotFoundError({ - message: 'no such file', - info: params - }).withCause(error) - } else { - return new ErrorType({ - message: message, - info: params - }).withCause(error) - } -} - const _clients = new Map() let _defaultClient @@ -309,3 +363,18 @@ function _buildClientOptions(bucketCredentials) { return options } + +function _md5FromResponse(response) { + const md5 = (response.ETag || '').replace(/[ "]/g, '') + if (!md5.match(/^[a-f0-9]{32}$/)) { + throw new ReadError({ + message: 's3 etag not in md5-hash format', + info: { + md5, + eTag: response.ETag + } + }) + } + + return md5 +} diff --git a/services/filestore/config/settings.defaults.coffee b/services/filestore/config/settings.defaults.coffee index 206f932a76..bb124ae8e0 100644 --- a/services/filestore/config/settings.defaults.coffee +++ b/services/filestore/config/settings.defaults.coffee @@ -7,6 +7,19 @@ if process.env['AWS_KEY'] && !process.env['AWS_ACCESS_KEY_ID'] if process.env['AWS_SECRET'] && !process.env['AWS_SECRET_ACCESS_KEY'] process.env['AWS_SECRET_ACCESS_KEY'] = process.env['AWS_SECRET'] +# pre-backend setting, fall back to old behaviour +unless process.env['BACKEND']? + if process.env['AWS_ACCESS_KEY_ID']? or process.env['S3_BUCKET_CREDENTIALS']? + process.env['BACKEND'] = "s3" + process.env['USER_FILES_BUCKET_NAME'] = process.env['AWS_S3_USER_FILES_BUCKET_NAME'] + process.env['TEMPLATE_FILES_BUCKET_NAME'] = process.env['AWS_S3_TEMPLATE_FILES_BUCKET_NAME'] + process.env['PUBLIC_FILES_BUCKET_NAME'] = process.env['AWS_S3_PUBLIC_FILES_BUCKET_NAME'] + else + process.env['BACKEND'] = "fs" + process.env['USER_FILES_BUCKET_NAME'] = Path.resolve(__dirname + "/../user_files") + process.env['TEMPLATE_FILES_BUCKET_NAME'] = Path.resolve(__dirname + "/../template_files") + process.env['PUBLIC_FILES_BUCKET_NAME'] = Path.resolve(__dirname + "/../public_files") + settings = internal: filestore: @@ -18,38 +31,28 @@ settings = # Choices are # s3 - Amazon S3 # fs - local filesystem - if process.env['AWS_ACCESS_KEY_ID']? or process.env['S3_BUCKET_CREDENTIALS']? - backend: "s3" - s3: + backend: process.env['BACKEND'] + + s3: + if process.env['AWS_ACCESS_KEY_ID']? or process.env['S3_BUCKET_CREDENTIALS']? key: process.env['AWS_ACCESS_KEY_ID'] secret: process.env['AWS_SECRET_ACCESS_KEY'] endpoint: process.env['AWS_S3_ENDPOINT'] - stores: - user_files: process.env['AWS_S3_USER_FILES_BUCKET_NAME'] - template_files: process.env['AWS_S3_TEMPLATE_FILES_BUCKET_NAME'] - public_files: process.env['AWS_S3_PUBLIC_FILES_BUCKET_NAME'] - # if you are using S3, then fill in your S3 details below, - # or use env var with the same structure. - # s3: - # key: "" # default - # secret: "" # default - # - # s3BucketCreds: - # bucketname1: # secrets for bucketname1 - # auth_key: "" - # auth_secret: "" - # bucketname2: # secrets for bucketname2... - s3BucketCreds: JSON.parse process.env['S3_BUCKET_CREDENTIALS'] if process.env['S3_BUCKET_CREDENTIALS']? - else - backend: "fs" - stores: - # - # For local filesystem this is the directory to store the files in. - # Must contain full path, e.g. "/var/lib/sharelatex/data". - # This path must exist, not be tmpfs and be writable to by the user sharelatex is run as. - user_files: Path.resolve(__dirname + "/../user_files") - public_files: Path.resolve(__dirname + "/../public_files") - template_files: Path.resolve(__dirname + "/../template_files") + + stores: + user_files: process.env['USER_FILES_BUCKET_NAME'] + template_files: process.env['TEMPLATE_FILES_BUCKET_NAME'] + public_files: process.env['PUBLIC_FILES_BUCKET_NAME'] + + s3BucketCreds: JSON.parse process.env['S3_BUCKET_CREDENTIALS'] if process.env['S3_BUCKET_CREDENTIALS']? + + fallback: + if process.env['FALLBACK_BACKEND']? + backend: process.env['FALLBACK_BACKEND'] + # mapping of bucket names on the fallback, to bucket names on the primary. + # e.g. { myS3UserFilesBucketName: 'myGoogleUserFilesBucketName' } + buckets: JSON.parse(process.env['FALLBACK_BUCKET_MAPPING'] || '{}') + copyOnMiss: process.env['COPY_ON_MISS'] == 'true' path: uploadFolder: Path.resolve(__dirname + "/../uploads") diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index 7adbe68767..64902d42af 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -586,11 +586,6 @@ "event-target-shim": "^5.0.0" } }, - "accept-encoding": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/accept-encoding/-/accept-encoding-0.1.0.tgz", - "integrity": "sha1-XdiLjfcfHcLlzGuVZezOHjmaMz4=" - }, "accepts": { "version": "1.3.5", "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.5.tgz", @@ -770,11 +765,6 @@ } } }, - "aws-sign": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/aws-sign/-/aws-sign-0.2.1.tgz", - "integrity": "sha1-uWGyLwuqTxXsJBFA83dtbBQoVtA=" - }, "aws-sign2": { "version": "0.7.0", "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", @@ -837,14 +827,6 @@ "tweetnacl": "^0.14.3" } }, - "best-encoding": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/best-encoding/-/best-encoding-0.1.1.tgz", - "integrity": "sha1-GVIT2rysBFgYuAe3ox+Dn63cl04=", - "requires": { - "accept-encoding": "~0.1.0" - } - }, "bignumber.js": { "version": "7.2.1", "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-7.2.1.tgz", @@ -863,14 +845,6 @@ "resolved": "https://registry.npmjs.org/bintrees/-/bintrees-1.0.1.tgz", "integrity": "sha1-DmVcm5wkNeqraL9AJyJtK1WjRSQ=" }, - "bl": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/bl/-/bl-0.7.0.tgz", - "integrity": "sha1-P7BnBgKsKHjrdw3CA58YNr5irls=", - "requires": { - "readable-stream": "~1.0.2" - } - }, "body-parser": { "version": "1.18.3", "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.18.3.tgz", @@ -894,14 +868,6 @@ "integrity": "sha1-tcCeF8rNET0Rt7s+04TMASmU2Gs=", "dev": true }, - "boom": { - "version": "0.3.8", - "resolved": "https://registry.npmjs.org/boom/-/boom-0.3.8.tgz", - "integrity": "sha1-yM2wQUNZEnQWKMBE7Mcy0dF8Ceo=", - "requires": { - "hoek": "0.7.x" - } - }, "brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", @@ -975,11 +941,6 @@ "quick-lru": "^4.0.1" } }, - "caseless": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.3.0.tgz", - "integrity": "sha1-U06XkWOH07cGtk/eu6xGQ4RQk08=" - }, "chai": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/chai/-/chai-4.2.0.tgz", @@ -1097,14 +1058,6 @@ "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", "dev": true }, - "combined-stream": { - "version": "0.0.7", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-0.0.7.tgz", - "integrity": "sha1-ATfmV7qlp1QcV6w3rF/AfXO03B8=", - "requires": { - "delayed-stream": "0.0.5" - } - }, "common-tags": { "version": "1.8.0", "resolved": "https://registry.npmjs.org/common-tags/-/common-tags-1.8.0.tgz", @@ -1151,11 +1104,6 @@ "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz", "integrity": "sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s=" }, - "cookie-jar": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/cookie-jar/-/cookie-jar-0.2.0.tgz", - "integrity": "sha1-ZOzAasl423leS1KQy+SLo3gUAPo=" - }, "cookie-signature": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", @@ -1193,14 +1141,6 @@ } } }, - "cryptiles": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-0.1.3.tgz", - "integrity": "sha1-GlVnNPBtJLo0hirpy55wmjr7/xw=", - "requires": { - "boom": "0.3.x" - } - }, "dashdash": { "version": "1.14.1", "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", @@ -1252,11 +1192,6 @@ "resolved": "https://registry.npmjs.org/delay/-/delay-4.3.0.tgz", "integrity": "sha1-7+6/uPVFV5yzlrOnIkQ+yW0UxQ4=" }, - "delayed-stream": { - "version": "0.0.5", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz", - "integrity": "sha1-1LH0OpPoKW3+AmlPRoC8N6MTxz8=" - }, "depd": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", @@ -2052,28 +1987,6 @@ } } }, - "forever-agent": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.2.0.tgz", - "integrity": "sha1-4cJcetROCcOPIzh2x2/MJP+EOx8=" - }, - "form-data": { - "version": "0.0.10", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-0.0.10.tgz", - "integrity": "sha1-2zRaU3jYau6x7V1VO4aawZLS9e0=", - "requires": { - "async": "~0.2.7", - "combined-stream": "~0.0.4", - "mime": "~1.2.2" - }, - "dependencies": { - "mime": { - "version": "1.2.11", - "resolved": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz", - "integrity": "sha1-WCA+7Ybjpe8XrtK32evUfwpg3RA=" - } - } - }, "forwarded": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.1.2.tgz", @@ -2297,17 +2210,6 @@ "integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==", "dev": true }, - "hawk": { - "version": "0.10.2", - "resolved": "https://registry.npmjs.org/hawk/-/hawk-0.10.2.tgz", - "integrity": "sha1-mzYd7pWpMWQObVBOBWCaj8OsRdI=", - "requires": { - "boom": "0.3.x", - "cryptiles": "0.1.x", - "hoek": "0.7.x", - "sntp": "0.1.x" - } - }, "he": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/he/-/he-1.1.1.tgz", @@ -2323,11 +2225,6 @@ "resolved": "https://registry.npmjs.org/hex2dec/-/hex2dec-1.1.2.tgz", "integrity": "sha1-jhzkvvNqdPfVcjw/swkMKGAHczg=" }, - "hoek": { - "version": "0.7.6", - "resolved": "https://registry.npmjs.org/hoek/-/hoek-0.7.6.tgz", - "integrity": "sha1-YPvZBFV1Qc0rh5Wr8wihs3cOFVo=" - }, "hosted-git-info": { "version": "2.8.5", "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.5.tgz", @@ -2667,28 +2564,6 @@ "graceful-fs": "^4.1.9" } }, - "knox": { - "version": "0.9.2", - "resolved": "https://registry.npmjs.org/knox/-/knox-0.9.2.tgz", - "integrity": "sha1-NzZZNmniTwJP2vcjtqHcSv2DmnE=", - "requires": { - "debug": "^1.0.2", - "mime": "*", - "once": "^1.3.0", - "stream-counter": "^1.0.0", - "xml2js": "^0.4.4" - }, - "dependencies": { - "debug": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/debug/-/debug-1.0.5.tgz", - "integrity": "sha1-9yQSF0MPmd7EwrRz6rkiKOh0wqw=", - "requires": { - "ms": "2.0.0" - } - } - } - }, "levn": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", @@ -3318,55 +3193,6 @@ "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.8.4.tgz", "integrity": "sha1-1nOGYrZhvhnicR7wGqOxghLxMDA=" }, - "node-transloadit": { - "version": "0.0.4", - "resolved": "https://registry.npmjs.org/node-transloadit/-/node-transloadit-0.0.4.tgz", - "integrity": "sha1-4ZoHheON94NblO2AANHjXmg7zsE=", - "requires": { - "request": "~2.16.6", - "underscore": "1.2.1" - }, - "dependencies": { - "json-stringify-safe": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-3.0.0.tgz", - "integrity": "sha1-nbew5TDH8onF6MhDKvGRwv91pbM=" - }, - "mime": { - "version": "1.2.11", - "resolved": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz", - "integrity": "sha1-WCA+7Ybjpe8XrtK32evUfwpg3RA=" - }, - "qs": { - "version": "0.5.6", - "resolved": "https://registry.npmjs.org/qs/-/qs-0.5.6.tgz", - "integrity": "sha1-MbGtBYVnZRxSaSFQa5qHk5EaA4Q=" - }, - "request": { - "version": "2.16.6", - "resolved": "https://registry.npmjs.org/request/-/request-2.16.6.tgz", - "integrity": "sha1-hy/kRa5y3iZrN4edatfclI+gHK0=", - "requires": { - "aws-sign": "~0.2.0", - "cookie-jar": "~0.2.0", - "forever-agent": "~0.2.0", - "form-data": "~0.0.3", - "hawk": "~0.10.2", - "json-stringify-safe": "~3.0.0", - "mime": "~1.2.7", - "node-uuid": "~1.4.0", - "oauth-sign": "~0.2.0", - "qs": "~0.5.4", - "tunnel-agent": "~0.2.0" - } - }, - "underscore": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.2.1.tgz", - "integrity": "sha1-/FxrB2VnPZKi1KyLTcCqiHAuK9Q=" - } - } - }, "node-uuid": { "version": "1.4.8", "resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.8.tgz", @@ -3392,11 +3218,6 @@ } } }, - "oauth-sign": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.2.0.tgz", - "integrity": "sha1-oOahcV2u0GLzIrYit/5a/RA1tuI=" - }, "object-inspect": { "version": "1.7.0", "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.7.0.tgz", @@ -4399,29 +4220,6 @@ "read-pkg": "^2.0.0" } }, - "readable-stream": { - "version": "1.0.34", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz", - "integrity": "sha1-Elgg40vIQtLyqq+v5MKRbuMsFXw=", - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.1", - "isarray": "0.0.1", - "string_decoder": "~0.10.x" - }, - "dependencies": { - "isarray": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", - "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" - } - } - }, - "recluster": { - "version": "0.3.7", - "resolved": "https://registry.npmjs.org/recluster/-/recluster-0.3.7.tgz", - "integrity": "sha1-aKRx3ZC2obl3ZjTPdpZAWutWeJU=" - }, "regexpp": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-2.0.1.tgz", @@ -4594,24 +4392,6 @@ "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", "dev": true }, - "response": { - "version": "0.14.0", - "resolved": "https://registry.npmjs.org/response/-/response-0.14.0.tgz", - "integrity": "sha1-BmNS/z5rAm0EdYCUB2Y7Rob9JpY=", - "requires": { - "best-encoding": "^0.1.1", - "bl": "~0.7.0", - "caseless": "^0.3.0", - "mime": "~1.2.11" - }, - "dependencies": { - "mime": { - "version": "1.2.11", - "resolved": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz", - "integrity": "sha1-WCA+7Ybjpe8XrtK32evUfwpg3RA=" - } - } - }, "restore-cursor": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz", @@ -4826,14 +4606,6 @@ } } }, - "sntp": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/sntp/-/sntp-0.1.4.tgz", - "integrity": "sha1-XvSBuVGnspr/30r9fyaDj8ESD4Q=", - "requires": { - "hoek": "0.7.x" - } - }, "source-map": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", @@ -4921,49 +4693,11 @@ "resolved": "https://registry.npmjs.org/stealthy-require/-/stealthy-require-1.1.1.tgz", "integrity": "sha1-NbCYdbT/SfJqd35QmzCQoyJr8ks=" }, - "stream-browserify": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/stream-browserify/-/stream-browserify-2.0.1.tgz", - "integrity": "sha1-ZiZu5fm9uZQKTkUUyvtDu3Hlyds=", - "requires": { - "inherits": "~2.0.1", - "readable-stream": "^2.0.2" - }, - "dependencies": { - "readable-stream": { - "version": "2.3.6", - "resolved": "http://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", - "integrity": "sha1-sRwn2IuP8fvgcGQ8+UsMea4bCq8=", - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha1-nPFhG6YmhdcDCunkujQUnDrwP8g=", - "requires": { - "safe-buffer": "~5.1.0" - } - } - } - }, "stream-buffers": { "version": "0.2.6", "resolved": "https://registry.npmjs.org/stream-buffers/-/stream-buffers-0.2.6.tgz", "integrity": "sha1-GBwI1bs2kARfaUAbmuanoM8zE/w=" }, - "stream-counter": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/stream-counter/-/stream-counter-1.0.0.tgz", - "integrity": "sha1-kc8lac5NxQYf6816yyY5SloRR1E=" - }, "stream-meter": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/stream-meter/-/stream-meter-1.0.4.tgz", @@ -5001,6 +4735,12 @@ "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.0.tgz", "integrity": "sha1-1cdSgl5TZ+eG944Y5EXqIjoVWVI=" }, + "streamifier": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/streamifier/-/streamifier-0.1.1.tgz", + "integrity": "sha1-l+mNj6TRBdYqJpHR3AfoINuN/E8=", + "dev": true + }, "string-width": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", @@ -5042,11 +4782,6 @@ "function-bind": "^1.1.1" } }, - "string_decoder": { - "version": "0.10.31", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", - "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=" - }, "strip-ansi": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", @@ -5240,11 +4975,6 @@ "integrity": "sha512-qOebF53frne81cf0S9B41ByenJ3/IuH8yJKngAX35CmiZySA0khhkovshKK+jGCaMnVomla7gVlIcc3EvKPbTQ==", "dev": true }, - "tunnel-agent": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.2.0.tgz", - "integrity": "sha1-aFPCr7GyEJ5FYp5JK9419Fnqaeg=" - }, "tweetnacl": { "version": "0.14.5", "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", diff --git a/services/filestore/package.json b/services/filestore/package.json index 14e35cd8a2..6f1dde0e8a 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -28,21 +28,16 @@ "fs-extra": "^1.0.0", "glob": "^7.1.6", "heapdump": "^0.3.2", - "knox": "~0.9.1", "logger-sharelatex": "^1.7.0", "metrics-sharelatex": "^2.2.0", "mocha": "5.2.0", - "node-transloadit": "0.0.4", "node-uuid": "~1.4.1", "pngcrush": "0.0.3", "range-parser": "^1.0.2", - "recluster": "^0.3.7", "request": "^2.88.0", "request-promise-native": "^1.0.8", - "response": "0.14.0", "rimraf": "2.2.8", "settings-sharelatex": "^1.1.0", - "stream-browserify": "^2.0.1", "stream-buffers": "~0.2.5", "stream-meter": "^1.0.4", "underscore": "~1.5.2" @@ -68,6 +63,7 @@ "prettier-eslint-cli": "^5.0.0", "sandboxed-module": "2.0.3", "sinon": "7.1.1", - "sinon-chai": "^3.3.0" + "sinon-chai": "^3.3.0", + "streamifier": "^0.1.1" } } diff --git a/services/filestore/test/acceptance/js/FilestoreApp.js b/services/filestore/test/acceptance/js/FilestoreApp.js index 718d53bcf8..20564e2d40 100644 --- a/services/filestore/test/acceptance/js/FilestoreApp.js +++ b/services/filestore/test/acceptance/js/FilestoreApp.js @@ -56,6 +56,7 @@ class FilestoreApp { } this.initing = false + this.persistor = require('../../../app/js/PersistorManager') } async waitForInit() { diff --git a/services/filestore/test/acceptance/js/FilestoreTests.js b/services/filestore/test/acceptance/js/FilestoreTests.js index d7dfbce57c..fd1baed474 100644 --- a/services/filestore/test/acceptance/js/FilestoreTests.js +++ b/services/filestore/test/acceptance/js/FilestoreTests.js @@ -11,6 +11,7 @@ const S3 = require('aws-sdk/clients/s3') const Stream = require('stream') const request = require('request') const { promisify } = require('util') +const streamifier = require('streamifier') chai.use(require('chai-as-promised')) const fsWriteFile = promisify(fs.writeFile) @@ -25,6 +26,20 @@ async function getMetric(filestoreUrl, metric) { return parseInt(found ? found[1] : 0) || 0 } +if (!process.env.AWS_ACCESS_KEY_ID) { + throw new Error('please provide credentials for the AWS S3 test server') +} + +function streamToString(stream) { + const chunks = [] + return new Promise((resolve, reject) => { + stream.on('data', chunk => chunks.push(chunk)) + stream.on('error', reject) + stream.on('end', () => resolve(Buffer.concat(chunks).toString('utf8'))) + stream.resume() + }) +} + // store settings for multiple backends, so that we can test each one. // fs will always be available - add others if they are configured const BackendSettings = { @@ -35,11 +50,8 @@ const BackendSettings = { public_files: Path.resolve(__dirname, '../../../public_files'), template_files: Path.resolve(__dirname, '../../../template_files') } - } -} - -if (process.env.AWS_ACCESS_KEY_ID) { - BackendSettings.S3Persistor = { + }, + S3Persistor: { backend: 's3', s3: { key: process.env.AWS_ACCESS_KEY_ID, @@ -52,6 +64,62 @@ if (process.env.AWS_ACCESS_KEY_ID) { template_files: process.env.AWS_S3_TEMPLATE_FILES_BUCKET_NAME, public_files: process.env.AWS_S3_PUBLIC_FILES_BUCKET_NAME } + }, + FallbackS3ToFSPersistor: { + backend: 's3', + s3: { + key: process.env.AWS_ACCESS_KEY_ID, + secret: process.env.AWS_SECRET_ACCESS_KEY, + endpoint: process.env.AWS_S3_ENDPOINT, + pathStyle: true + }, + stores: { + user_files: process.env.AWS_S3_USER_FILES_BUCKET_NAME, + template_files: process.env.AWS_S3_TEMPLATE_FILES_BUCKET_NAME, + public_files: process.env.AWS_S3_PUBLIC_FILES_BUCKET_NAME + }, + fallback: { + backend: 'fs', + buckets: { + [process.env.AWS_S3_USER_FILES_BUCKET_NAME]: Path.resolve( + __dirname, + '../../../user_files' + ), + [process.env.AWS_S3_PUBLIC_FILES_BUCKET_NAME]: Path.resolve( + __dirname, + '../../../public_files' + ), + [process.env.AWS_S3_TEMPLATE_FILES_BUCKET_NAME]: Path.resolve( + __dirname, + '../../../template_files' + ) + } + } + }, + FallbackFSToS3Persistor: { + backend: 'fs', + s3: { + key: process.env.AWS_ACCESS_KEY_ID, + secret: process.env.AWS_SECRET_ACCESS_KEY, + endpoint: process.env.AWS_S3_ENDPOINT, + pathStyle: true + }, + stores: { + user_files: Path.resolve(__dirname, '../../../user_files'), + public_files: Path.resolve(__dirname, '../../../public_files'), + template_files: Path.resolve(__dirname, '../../../template_files') + }, + fallback: { + backend: 's3', + buckets: { + [Path.resolve(__dirname, '../../../user_files')]: process.env + .AWS_S3_USER_FILES_BUCKET_NAME, + [Path.resolve(__dirname, '../../../public_files')]: process.env + .AWS_S3_PUBLIC_FILES_BUCKET_NAME, + [Path.resolve(__dirname, '../../../template_files')]: process.env + .AWS_S3_TEMPLATE_FILES_BUCKET_NAME + } + } } } @@ -63,7 +131,7 @@ describe('Filestore', function() { // redefine the test suite for every available backend Object.keys(BackendSettings).forEach(backend => { describe(backend, function() { - let app, previousEgress, previousIngress + let app, previousEgress, previousIngress, projectId before(async function() { // create the app with the relevant filestore settings @@ -84,6 +152,7 @@ describe('Filestore', function() { getMetric(filestoreUrl, 's3_ingress') ]) } + projectId = `acceptance_tests_${Math.random()}` }) it('should send a 200 for the status endpoint', async function() { @@ -100,23 +169,21 @@ describe('Filestore', function() { }) describe('with a file on the server', function() { - let fileId, fileUrl + let fileId, fileUrl, constantFileContent const localFileReadPath = '/tmp/filestore_acceptance_tests_file_read.txt' - const constantFileContent = [ - 'hello world', - `line 2 goes here ${Math.random()}`, - 'there are 3 lines in all' - ].join('\n') - - before(async function() { - await fsWriteFile(localFileReadPath, constantFileContent) - }) beforeEach(async function() { fileId = Math.random() - fileUrl = `${filestoreUrl}/project/acceptance_tests/file/${directoryName}%2F${fileId}` + fileUrl = `${filestoreUrl}/project/${projectId}/file/${directoryName}%2F${fileId}` + constantFileContent = [ + 'hello world', + `line 2 goes here ${Math.random()}`, + 'there are 3 lines in all' + ].join('\n') + + await fsWriteFile(localFileReadPath, constantFileContent) const writeStream = request.post(fileUrl) const readStream = fs.createReadStream(localFileReadPath) @@ -177,7 +244,7 @@ describe('Filestore', function() { }) it('should be able to copy files', async function() { - const newProjectID = 'acceptance_tests_copyied_project' + const newProjectID = `acceptance_tests_copied_project_${Math.random()}` const newFileId = Math.random() const newFileUrl = `${filestoreUrl}/project/${newProjectID}/file/${directoryName}%2F${newFileId}` const opts = { @@ -185,7 +252,7 @@ describe('Filestore', function() { uri: newFileUrl, json: { source: { - project_id: 'acceptance_tests', + project_id: projectId, file_id: `${directoryName}/${fileId}` } } @@ -198,6 +265,18 @@ describe('Filestore', function() { expect(response.body).to.equal(constantFileContent) }) + it('should be able to overwrite the file', async function() { + const newContent = `here is some different content, ${Math.random()}` + const writeStream = request.post(fileUrl) + const readStream = streamifier.createReadStream(newContent) + // hack to consume the result to ensure the http request has been fully processed + const resultStream = fs.createWriteStream('/dev/null') + await pipeline(readStream, writeStream, resultStream) + + const response = await rp.get(fileUrl) + expect(response.body).to.equal(newContent) + }) + if (backend === 'S3Persistor') { it('should record an egress metric for the upload', async function() { const metric = await getMetric(filestoreUrl, 's3_egress') @@ -227,7 +306,7 @@ describe('Filestore', function() { }) describe('with multiple files', function() { - let fileIds, fileUrls, project + let fileIds, fileUrls const directoryName = 'directory' const localFileReadPaths = [ '/tmp/filestore_acceptance_tests_file_read_1.txt', @@ -254,11 +333,10 @@ describe('Filestore', function() { }) beforeEach(async function() { - project = `acceptance_tests_${Math.random()}` fileIds = [Math.random(), Math.random()] fileUrls = [ - `${filestoreUrl}/project/${project}/file/${directoryName}%2F${fileIds[0]}`, - `${filestoreUrl}/project/${project}/file/${directoryName}%2F${fileIds[1]}` + `${filestoreUrl}/project/${projectId}/file/${directoryName}%2F${fileIds[0]}`, + `${filestoreUrl}/project/${projectId}/file/${directoryName}%2F${fileIds[1]}` ] const writeStreams = [ @@ -282,7 +360,7 @@ describe('Filestore', function() { it('should get the directory size', async function() { const response = await rp.get( - `${filestoreUrl}/project/${project}/size` + `${filestoreUrl}/project/${projectId}/size` ) expect(parseInt(JSON.parse(response.body)['total bytes'])).to.equal( constantFileContents[0].length + constantFileContents[1].length @@ -292,10 +370,10 @@ describe('Filestore', function() { if (backend === 'S3Persistor') { describe('with a file in a specific bucket', function() { - let constantFileContents, fileId, fileUrl, bucketName + let constantFileContent, fileId, fileUrl, bucketName beforeEach(async function() { - constantFileContents = `This is a file in a different S3 bucket ${Math.random()}` + constantFileContent = `This is a file in a different S3 bucket ${Math.random()}` fileId = Math.random().toString() bucketName = Math.random().toString() fileUrl = `${filestoreUrl}/bucket/${bucketName}/key/${fileId}` @@ -320,14 +398,368 @@ describe('Filestore', function() { .upload({ Bucket: bucketName, Key: fileId, - Body: constantFileContents + Body: constantFileContent }) .promise() }) it('should get the file from the specified bucket', async function() { const response = await rp.get(fileUrl) - expect(response.body).to.equal(constantFileContents) + expect(response.body).to.equal(constantFileContent) + }) + }) + } + + if (BackendSettings[backend].fallback) { + describe('with a fallback', function() { + async function uploadStringToPersistor( + persistor, + bucket, + key, + content + ) { + const fileStream = streamifier.createReadStream(content) + await persistor.promises.sendStream(bucket, key, fileStream) + } + + async function getStringFromPersistor(persistor, bucket, key) { + const stream = await persistor.promises.getFileStream( + bucket, + key, + {} + ) + return streamToString(stream) + } + + async function expectPersistorToHaveFile( + persistor, + bucket, + key, + content + ) { + const foundContent = await getStringFromPersistor( + persistor, + bucket, + key + ) + expect(foundContent).to.equal(content) + } + + async function expectPersistorNotToHaveFile(persistor, bucket, key) { + await expect( + getStringFromPersistor(persistor, bucket, key) + ).to.eventually.have.been.rejected.with.property( + 'name', + 'NotFoundError' + ) + } + + let constantFileContent, + fileId, + fileKey, + fileUrl, + bucket, + fallbackBucket + + beforeEach(function() { + constantFileContent = `This is yet more file content ${Math.random()}` + fileId = Math.random().toString() + fileKey = `${projectId}/${directoryName}/${fileId}` + fileUrl = `${filestoreUrl}/project/${projectId}/file/${directoryName}%2F${fileId}` + + bucket = Settings.filestore.stores.user_files + fallbackBucket = Settings.filestore.fallback.buckets[bucket] + }) + + describe('with a file in the fallback bucket', function() { + beforeEach(async function() { + await uploadStringToPersistor( + app.persistor.fallbackPersistor, + fallbackBucket, + fileKey, + constantFileContent + ) + }) + + it('should not find file in the primary', async function() { + await expectPersistorNotToHaveFile( + app.persistor.primaryPersistor, + bucket, + fileKey + ) + }) + + it('should find the file in the fallback', async function() { + await expectPersistorToHaveFile( + app.persistor.fallbackPersistor, + fallbackBucket, + fileKey, + constantFileContent + ) + }) + + describe('when copyOnMiss is disabled', function() { + beforeEach(function() { + Settings.filestore.fallback.copyOnMiss = false + }) + + it('should fetch the file', async function() { + const res = await rp.get(fileUrl) + expect(res.body).to.equal(constantFileContent) + }) + + it('should not copy the file to the primary', async function() { + await rp.get(fileUrl) + + await expectPersistorNotToHaveFile( + app.persistor.primaryPersistor, + bucket, + fileKey + ) + }) + }) + + describe('when copyOnMiss is enabled', function() { + beforeEach(function() { + Settings.filestore.fallback.copyOnMiss = true + }) + + it('should fetch the file', async function() { + const res = await rp.get(fileUrl) + expect(res.body).to.equal(constantFileContent) + }) + + it('copies the file to the primary', async function() { + await rp.get(fileUrl) + // wait for the file to copy in the background + await promisify(setTimeout)(1000) + + await expectPersistorToHaveFile( + app.persistor.primaryPersistor, + bucket, + fileKey, + constantFileContent + ) + }) + }) + + describe('when copying a file', function() { + let newFileId, newFileUrl, newFileKey, opts + + beforeEach(function() { + const newProjectID = `acceptance_tests_copied_project_${Math.random()}` + newFileId = Math.random() + newFileUrl = `${filestoreUrl}/project/${newProjectID}/file/${directoryName}%2F${newFileId}` + newFileKey = `${newProjectID}/${directoryName}/${newFileId}` + + opts = { + method: 'put', + uri: newFileUrl, + json: { + source: { + project_id: projectId, + file_id: `${directoryName}/${fileId}` + } + } + } + }) + + describe('when copyOnMiss is false', function() { + beforeEach(async function() { + Settings.filestore.fallback.copyOnMiss = false + + const response = await rp(opts) + expect(response.statusCode).to.equal(200) + }) + + it('should leave the old file in the old bucket', async function() { + await expectPersistorToHaveFile( + app.persistor.fallbackPersistor, + fallbackBucket, + fileKey, + constantFileContent + ) + }) + + it('should not create a new file in the old bucket', async function() { + await expectPersistorNotToHaveFile( + app.persistor.fallbackPersistor, + fallbackBucket, + newFileKey + ) + }) + + it('should create a new file in the new bucket', async function() { + await expectPersistorToHaveFile( + app.persistor.primaryPersistor, + bucket, + newFileKey, + constantFileContent + ) + }) + + it('should not copy the old file to the primary with the old key', async function() { + // wait for the file to copy in the background + await promisify(setTimeout)(1000) + + await expectPersistorNotToHaveFile( + app.persistor.primaryPersistor, + bucket, + fileKey + ) + }) + }) + + describe('when copyOnMiss is true', function() { + beforeEach(async function() { + Settings.filestore.fallback.copyOnMiss = true + + const response = await rp(opts) + expect(response.statusCode).to.equal(200) + }) + + it('should leave the old file in the old bucket', async function() { + await expectPersistorToHaveFile( + app.persistor.fallbackPersistor, + fallbackBucket, + fileKey, + constantFileContent + ) + }) + + it('should not create a new file in the old bucket', async function() { + await expectPersistorNotToHaveFile( + app.persistor.fallbackPersistor, + fallbackBucket, + newFileKey + ) + }) + + it('should create a new file in the new bucket', async function() { + await expectPersistorToHaveFile( + app.persistor.primaryPersistor, + bucket, + newFileKey, + constantFileContent + ) + }) + + it('should copy the old file to the primary with the old key', async function() { + // wait for the file to copy in the background + await promisify(setTimeout)(1000) + + await expectPersistorToHaveFile( + app.persistor.primaryPersistor, + bucket, + fileKey, + constantFileContent + ) + }) + }) + }) + }) + + describe('when sending a file', function() { + beforeEach(async function() { + const writeStream = request.post(fileUrl) + const readStream = streamifier.createReadStream( + constantFileContent + ) + // hack to consume the result to ensure the http request has been fully processed + const resultStream = fs.createWriteStream('/dev/null') + await pipeline(readStream, writeStream, resultStream) + }) + + it('should store the file on the primary', async function() { + await expectPersistorToHaveFile( + app.persistor.primaryPersistor, + bucket, + fileKey, + constantFileContent + ) + }) + + it('should not store the file on the fallback', async function() { + await expectPersistorNotToHaveFile( + app.persistor.fallbackPersistor, + fallbackBucket, + `${projectId}/${directoryName}/${fileId}` + ) + }) + }) + + describe('when deleting a file', function() { + describe('when the file exists on the primary', function() { + beforeEach(async function() { + await uploadStringToPersistor( + app.persistor.primaryPersistor, + bucket, + fileKey, + constantFileContent + ) + }) + + it('should delete the file', async function() { + const response = await rp.del(fileUrl) + expect(response.statusCode).to.equal(204) + await expect( + rp.get(fileUrl) + ).to.eventually.be.rejected.and.have.property('statusCode', 404) + }) + }) + + describe('when the file exists on the fallback', function() { + beforeEach(async function() { + await uploadStringToPersistor( + app.persistor.fallbackPersistor, + fallbackBucket, + fileKey, + constantFileContent + ) + }) + + it('should delete the file', async function() { + const response = await rp.del(fileUrl) + expect(response.statusCode).to.equal(204) + await expect( + rp.get(fileUrl) + ).to.eventually.be.rejected.and.have.property('statusCode', 404) + }) + }) + + describe('when the file exists on both the primary and the fallback', function() { + beforeEach(async function() { + await uploadStringToPersistor( + app.persistor.primaryPersistor, + bucket, + fileKey, + constantFileContent + ) + await uploadStringToPersistor( + app.persistor.fallbackPersistor, + fallbackBucket, + fileKey, + constantFileContent + ) + }) + + it('should delete the files', async function() { + const response = await rp.del(fileUrl) + expect(response.statusCode).to.equal(204) + await expect( + rp.get(fileUrl) + ).to.eventually.be.rejected.and.have.property('statusCode', 404) + }) + }) + + describe('when the file does not exist', function() { + it('should return return 204', async function() { + // S3 doesn't give us a 404 when the object doesn't exist, so to stay + // consistent we merrily return 204 ourselves here as well + const response = await rp.del(fileUrl) + expect(response.statusCode).to.equal(204) + }) + }) }) }) } @@ -341,7 +773,7 @@ describe('Filestore', function() { beforeEach(async function() { fileId = Math.random() - fileUrl = `${filestoreUrl}/project/acceptance_tests/file/${directoryName}%2F${fileId}` + fileUrl = `${filestoreUrl}/project/${projectId}/file/${directoryName}%2F${fileId}` const stat = await fsStat(localFileReadPath) localFileSize = stat.size const writeStream = request.post(fileUrl) diff --git a/services/filestore/test/unit/js/FSPersistorManagerTests.js b/services/filestore/test/unit/js/FSPersistorTests.js similarity index 69% rename from services/filestore/test/unit/js/FSPersistorManagerTests.js rename to services/filestore/test/unit/js/FSPersistorTests.js index 3b3b4bf417..0a09869bc0 100644 --- a/services/filestore/test/unit/js/FSPersistorManagerTests.js +++ b/services/filestore/test/unit/js/FSPersistorTests.js @@ -7,24 +7,37 @@ const Errors = require('../../../app/js/Errors') chai.use(require('sinon-chai')) chai.use(require('chai-as-promised')) -const modulePath = '../../../app/js/FSPersistorManager.js' +const modulePath = '../../../app/js/FSPersistor.js' -describe('FSPersistorManagerTests', function() { +describe('FSPersistorTests', function() { const stat = { size: 4, isFile: sinon.stub().returns(true) } const fd = 1234 - const readStream = 'readStream' const writeStream = 'writeStream' const remoteStream = 'remoteStream' const tempFile = '/tmp/potato.txt' const location = '/foo' const error = new Error('guru meditation error') + const md5 = 'ffffffff' const files = ['animals/wombat.tex', 'vegetables/potato.tex'] const globs = [`${location}/${files[0]}`, `${location}/${files[1]}`] const filteredFilenames = ['animals_wombat.tex', 'vegetables_potato.tex'] - let fs, rimraf, stream, LocalFileWriter, FSPersistorManager, glob + let fs, + rimraf, + stream, + LocalFileWriter, + FSPersistor, + glob, + readStream, + crypto, + Hash beforeEach(function() { + readStream = { + name: 'readStream', + on: sinon.stub().yields(), + pipe: sinon.stub() + } fs = { createReadStream: sinon.stub().returns(readStream), createWriteStream: sinon.stub().returns(writeStream), @@ -41,14 +54,26 @@ describe('FSPersistorManagerTests', function() { deleteFile: sinon.stub().resolves() } } - FSPersistorManager = SandboxedModule.require(modulePath, { + Hash = { + end: sinon.stub(), + read: sinon.stub().returns(md5), + setEncoding: sinon.stub() + } + crypto = { + createHash: sinon.stub().returns(Hash) + } + FSPersistor = SandboxedModule.require(modulePath, { requires: { './LocalFileWriter': LocalFileWriter, './Errors': Errors, fs, glob, rimraf, - stream + stream, + crypto, + // imported by PersistorHelper but otherwise unused here + 'stream-meter': {}, + 'logger-sharelatex': {} }, globals: { console } }) @@ -57,7 +82,7 @@ describe('FSPersistorManagerTests', function() { describe('sendFile', function() { const localFilesystemPath = '/path/to/local/file' it('should copy the file', async function() { - await FSPersistorManager.promises.sendFile( + await FSPersistor.promises.sendFile( location, files[0], localFilesystemPath @@ -72,33 +97,21 @@ describe('FSPersistorManagerTests', function() { it('should return an error if the file cannot be stored', async function() { stream.pipeline.yields(error) await expect( - FSPersistorManager.promises.sendFile( - location, - files[0], - localFilesystemPath - ) + FSPersistor.promises.sendFile(location, files[0], localFilesystemPath) ).to.eventually.be.rejected.and.have.property('cause', error) }) }) describe('sendStream', function() { it('should send the stream to LocalFileWriter', async function() { - await FSPersistorManager.promises.sendStream( - location, - files[0], - remoteStream - ) + await FSPersistor.promises.sendStream(location, files[0], remoteStream) expect(LocalFileWriter.promises.writeStream).to.have.been.calledWith( remoteStream ) }) it('should delete the temporary file', async function() { - await FSPersistorManager.promises.sendStream( - location, - files[0], - remoteStream - ) + await FSPersistor.promises.sendStream(location, files[0], remoteStream) expect(LocalFileWriter.promises.deleteFile).to.have.been.calledWith( tempFile ) @@ -107,30 +120,55 @@ describe('FSPersistorManagerTests', function() { it('should return the error from LocalFileWriter', async function() { LocalFileWriter.promises.writeStream.rejects(error) await expect( - FSPersistorManager.promises.sendStream(location, files[0], remoteStream) + FSPersistor.promises.sendStream(location, files[0], remoteStream) ).to.eventually.be.rejectedWith(error) }) it('should send the temporary file to the filestore', async function() { - await FSPersistorManager.promises.sendStream( - location, - files[0], - remoteStream - ) + await FSPersistor.promises.sendStream(location, files[0], remoteStream) expect(fs.createReadStream).to.have.been.calledWith(tempFile) }) + + describe('when the md5 hash does not match', function() { + it('should return a write error', async function() { + await expect( + FSPersistor.promises.sendStream( + location, + files[0], + remoteStream, + '00000000' + ) + ) + .to.eventually.be.rejected.and.be.an.instanceOf(Errors.WriteError) + .and.have.property('message', 'md5 hash mismatch') + }) + + it('deletes the copied file', async function() { + try { + await FSPersistor.promises.sendStream( + location, + files[0], + remoteStream, + '00000000' + ) + } catch (_) {} + expect(LocalFileWriter.promises.deleteFile).to.have.been.calledWith( + `${location}/${filteredFilenames[0]}` + ) + }) + }) }) describe('getFileStream', function() { it('should use correct file location', async function() { - await FSPersistorManager.promises.getFileStream(location, files[0], {}) + await FSPersistor.promises.getFileStream(location, files[0], {}) expect(fs.open).to.have.been.calledWith( `${location}/${filteredFilenames[0]}` ) }) it('should pass the options to createReadStream', async function() { - await FSPersistorManager.promises.getFileStream(location, files[0], { + await FSPersistor.promises.getFileStream(location, files[0], { start: 0, end: 8 }) @@ -146,18 +184,14 @@ describe('FSPersistorManagerTests', function() { err.code = 'ENOENT' fs.open.yields(err) - await expect( - FSPersistorManager.promises.getFileStream(location, files[0], {}) - ) + await expect(FSPersistor.promises.getFileStream(location, files[0], {})) .to.eventually.be.rejected.and.be.an.instanceOf(Errors.NotFoundError) .and.have.property('cause', err) }) it('should wrap any other error', async function() { fs.open.yields(error) - await expect( - FSPersistorManager.promises.getFileStream(location, files[0], {}) - ) + await expect(FSPersistor.promises.getFileStream(location, files[0], {})) .to.eventually.be.rejectedWith('failed to open file for streaming') .and.be.an.instanceOf(Errors.ReadError) .and.have.property('cause', error) @@ -181,18 +215,18 @@ describe('FSPersistorManagerTests', function() { it('should return the file size', async function() { expect( - await FSPersistorManager.promises.getFileSize(location, files[0]) + await FSPersistor.promises.getFileSize(location, files[0]) ).to.equal(size) }) it('should throw a NotFoundError if the file does not exist', async function() { await expect( - FSPersistorManager.promises.getFileSize(location, badFilename) + FSPersistor.promises.getFileSize(location, badFilename) ).to.eventually.be.rejected.and.be.an.instanceOf(Errors.NotFoundError) }) it('should wrap any other error', async function() { - await expect(FSPersistorManager.promises.getFileSize(location, 'raccoon')) + await expect(FSPersistor.promises.getFileSize(location, 'raccoon')) .to.eventually.be.rejected.and.be.an.instanceOf(Errors.ReadError) .and.have.property('cause', error) }) @@ -200,28 +234,28 @@ describe('FSPersistorManagerTests', function() { describe('copyFile', function() { it('Should open the source for reading', async function() { - await FSPersistorManager.promises.copyFile(location, files[0], files[1]) + await FSPersistor.promises.copyFile(location, files[0], files[1]) expect(fs.createReadStream).to.have.been.calledWith( `${location}/${filteredFilenames[0]}` ) }) it('Should open the target for writing', async function() { - await FSPersistorManager.promises.copyFile(location, files[0], files[1]) + await FSPersistor.promises.copyFile(location, files[0], files[1]) expect(fs.createWriteStream).to.have.been.calledWith( `${location}/${filteredFilenames[1]}` ) }) it('Should pipe the source to the target', async function() { - await FSPersistorManager.promises.copyFile(location, files[0], files[1]) + await FSPersistor.promises.copyFile(location, files[0], files[1]) expect(stream.pipeline).to.have.been.calledWith(readStream, writeStream) }) }) describe('deleteFile', function() { it('Should call unlink with correct options', async function() { - await FSPersistorManager.promises.deleteFile(location, files[0]) + await FSPersistor.promises.deleteFile(location, files[0]) expect(fs.unlink).to.have.been.calledWith( `${location}/${filteredFilenames[0]}` ) @@ -230,14 +264,14 @@ describe('FSPersistorManagerTests', function() { it('Should propagate the error', async function() { fs.unlink.yields(error) await expect( - FSPersistorManager.promises.deleteFile(location, files[0]) + FSPersistor.promises.deleteFile(location, files[0]) ).to.eventually.be.rejected.and.have.property('cause', error) }) }) describe('deleteDirectory', function() { it('Should call rmdir(rimraf) with correct options', async function() { - await FSPersistorManager.promises.deleteDirectory(location, files[0]) + await FSPersistor.promises.deleteDirectory(location, files[0]) expect(rimraf).to.have.been.calledWith( `${location}/${filteredFilenames[0]}` ) @@ -246,7 +280,7 @@ describe('FSPersistorManagerTests', function() { it('Should propagate the error', async function() { rimraf.yields(error) await expect( - FSPersistorManager.promises.deleteDirectory(location, files[0]) + FSPersistor.promises.deleteDirectory(location, files[0]) ).to.eventually.be.rejected.and.have.property('cause', error) }) }) @@ -266,7 +300,7 @@ describe('FSPersistorManagerTests', function() { }) it('Should call stat with correct options', async function() { - await FSPersistorManager.promises.checkIfFileExists(location, files[0]) + await FSPersistor.promises.checkIfFileExists(location, files[0]) expect(fs.stat).to.have.been.calledWith( `${location}/${filteredFilenames[0]}` ) @@ -274,23 +308,18 @@ describe('FSPersistorManagerTests', function() { it('Should return true for existing files', async function() { expect( - await FSPersistorManager.promises.checkIfFileExists(location, files[0]) + await FSPersistor.promises.checkIfFileExists(location, files[0]) ).to.equal(true) }) it('Should return false for non-existing files', async function() { expect( - await FSPersistorManager.promises.checkIfFileExists( - location, - badFilename - ) + await FSPersistor.promises.checkIfFileExists(location, badFilename) ).to.equal(false) }) it('should wrap the error if there is a problem', async function() { - await expect( - FSPersistorManager.promises.checkIfFileExists(location, 'llama') - ) + await expect(FSPersistor.promises.checkIfFileExists(location, 'llama')) .to.eventually.be.rejected.and.be.an.instanceOf(Errors.ReadError) .and.have.property('cause', error) }) @@ -299,9 +328,7 @@ describe('FSPersistorManagerTests', function() { describe('directorySize', function() { it('should wrap the error', async function() { glob.yields(error) - await expect( - FSPersistorManager.promises.directorySize(location, files[0]) - ) + await expect(FSPersistor.promises.directorySize(location, files[0])) .to.eventually.be.rejected.and.be.an.instanceOf(Errors.ReadError) .and.include({ cause: error }) .and.have.property('info') @@ -309,7 +336,7 @@ describe('FSPersistorManagerTests', function() { }) it('should filter the directory name', async function() { - await FSPersistorManager.promises.directorySize(location, files[0]) + await FSPersistor.promises.directorySize(location, files[0]) expect(glob).to.have.been.calledWith( `${location}/${filteredFilenames[0]}_*` ) @@ -317,7 +344,7 @@ describe('FSPersistorManagerTests', function() { it('should sum directory files size', async function() { expect( - await FSPersistorManager.promises.directorySize(location, files[0]) + await FSPersistor.promises.directorySize(location, files[0]) ).to.equal(stat.size * files.length) }) }) diff --git a/services/filestore/test/unit/js/MigrationPersistorTests.js b/services/filestore/test/unit/js/MigrationPersistorTests.js new file mode 100644 index 0000000000..db8401c78c --- /dev/null +++ b/services/filestore/test/unit/js/MigrationPersistorTests.js @@ -0,0 +1,519 @@ +const sinon = require('sinon') +const chai = require('chai') +const { expect } = chai +const modulePath = '../../../app/js/MigrationPersistor.js' +const SandboxedModule = require('sandboxed-module') + +const Errors = require('../../../app/js/Errors') + +// Not all methods are tested here, but a method with each type of wrapping has +// tests. Specifically, the following wrapping methods are tested here: +// getFileStream: _wrapFallbackMethod +// sendStream: forward-to-primary +// deleteFile: _wrapMethodOnBothPersistors +// copyFile: copyFileWithFallback + +describe('MigrationPersistorTests', function() { + const bucket = 'womBucket' + const fallbackBucket = 'bucKangaroo' + const key = 'monKey' + const destKey = 'donKey' + const genericError = new Error('guru meditation error') + const notFoundError = new Errors.NotFoundError('not found') + const size = 33 + const md5 = 'ffffffff' + + let Metrics, + Settings, + Logger, + Stream, + MigrationPersistor, + fileStream, + newPersistor + + beforeEach(function() { + fileStream = { + name: 'fileStream', + on: sinon + .stub() + .withArgs('end') + .yields(), + pipe: sinon.stub() + } + + newPersistor = function(hasFile) { + return { + promises: { + sendFile: sinon.stub().resolves(), + sendStream: sinon.stub().resolves(), + getFileStream: hasFile + ? sinon.stub().resolves(fileStream) + : sinon.stub().rejects(notFoundError), + deleteDirectory: sinon.stub().resolves(), + getFileSize: hasFile + ? sinon.stub().resolves(size) + : sinon.stub().rejects(notFoundError), + deleteFile: sinon.stub().resolves(), + copyFile: hasFile + ? sinon.stub().resolves() + : sinon.stub().rejects(notFoundError), + checkIfFileExists: sinon.stub().resolves(hasFile), + directorySize: hasFile + ? sinon.stub().resolves(size) + : sinon.stub().rejects(notFoundError), + getFileMd5Hash: hasFile + ? sinon.stub().resolves(md5) + : sinon.stub().rejects(notFoundError) + } + } + } + + Settings = { + filestore: { + fallback: { + buckets: { + [bucket]: fallbackBucket + } + } + } + } + + Metrics = { + inc: sinon.stub() + } + + Stream = { + pipeline: sinon.stub().yields(), + PassThrough: sinon.stub() + } + + Logger = { + warn: sinon.stub() + } + + MigrationPersistor = SandboxedModule.require(modulePath, { + requires: { + 'settings-sharelatex': Settings, + stream: Stream, + './Errors': Errors, + 'metrics-sharelatex': Metrics, + 'logger-sharelatex': Logger + }, + globals: { console } + }) + }) + + describe('getFileStream', function() { + const options = { wombat: 'potato' } + describe('when the primary persistor has the file', function() { + let primaryPersistor, fallbackPersistor, migrationPersistor, response + beforeEach(async function() { + primaryPersistor = newPersistor(true) + fallbackPersistor = newPersistor(false) + migrationPersistor = MigrationPersistor( + primaryPersistor, + fallbackPersistor + ) + response = await migrationPersistor.promises.getFileStream( + bucket, + key, + options + ) + }) + + it('should return the file stream', function() { + expect(response).to.equal(fileStream) + }) + + it('should fetch the file from the primary persistor, with the correct options', function() { + expect( + primaryPersistor.promises.getFileStream + ).to.have.been.calledWithExactly(bucket, key, options) + }) + + it('should not query the fallback persistor', function() { + expect(fallbackPersistor.promises.getFileStream).not.to.have.been.called + }) + }) + + describe('when the fallback persistor has the file', function() { + let primaryPersistor, fallbackPersistor, migrationPersistor, response + beforeEach(async function() { + primaryPersistor = newPersistor(false) + fallbackPersistor = newPersistor(true) + migrationPersistor = MigrationPersistor( + primaryPersistor, + fallbackPersistor + ) + response = await migrationPersistor.promises.getFileStream( + bucket, + key, + options + ) + }) + + it('should return the file stream', function() { + expect(response).to.be.an.instanceOf(Stream.PassThrough) + }) + + it('should fetch the file from the primary persistor with the correct options', function() { + expect( + primaryPersistor.promises.getFileStream + ).to.have.been.calledWithExactly(bucket, key, options) + }) + + it('should fetch the file from the fallback persistor with the fallback bucket with the correct options', function() { + expect( + fallbackPersistor.promises.getFileStream + ).to.have.been.calledWithExactly(fallbackBucket, key, options) + }) + + it('should create one read stream', function() { + expect(fallbackPersistor.promises.getFileStream).to.have.been.calledOnce + }) + + it('should not send the file to the primary', function() { + expect(primaryPersistor.promises.sendStream).not.to.have.been.called + }) + }) + + describe('when the file should be copied to the primary', function() { + let primaryPersistor, + fallbackPersistor, + migrationPersistor, + returnedStream + beforeEach(async function() { + primaryPersistor = newPersistor(false) + fallbackPersistor = newPersistor(true) + migrationPersistor = MigrationPersistor( + primaryPersistor, + fallbackPersistor + ) + Settings.filestore.fallback.copyOnMiss = true + returnedStream = await migrationPersistor.promises.getFileStream( + bucket, + key, + options + ) + }) + + it('should create one read stream', function() { + expect(fallbackPersistor.promises.getFileStream).to.have.been.calledOnce + }) + + it('should get the md5 hash from the source', function() { + expect( + fallbackPersistor.promises.getFileMd5Hash + ).to.have.been.calledWith(fallbackBucket, key) + }) + + it('should send a stream to the primary', function() { + expect( + primaryPersistor.promises.sendStream + ).to.have.been.calledWithExactly( + bucket, + key, + sinon.match.instanceOf(Stream.PassThrough), + md5 + ) + }) + + it('should send a stream to the client', function() { + expect(returnedStream).to.be.an.instanceOf(Stream.PassThrough) + }) + }) + + describe('when neither persistor has the file', function() { + it('rejects with a NotFoundError', async function() { + const migrationPersistor = MigrationPersistor( + newPersistor(false), + newPersistor(false) + ) + return expect( + migrationPersistor.promises.getFileStream(bucket, key) + ).to.eventually.be.rejected.and.be.an.instanceOf(Errors.NotFoundError) + }) + }) + + describe('when the primary persistor throws an unexpected error', function() { + let primaryPersistor, fallbackPersistor, migrationPersistor, error + beforeEach(async function() { + primaryPersistor = newPersistor(false) + fallbackPersistor = newPersistor(true) + primaryPersistor.promises.getFileStream = sinon + .stub() + .rejects(genericError) + migrationPersistor = MigrationPersistor( + primaryPersistor, + fallbackPersistor + ) + try { + await migrationPersistor.promises.getFileStream(bucket, key, options) + } catch (err) { + error = err + } + }) + + it('rejects with the error', function() { + expect(error).to.equal(genericError) + }) + + it('does not call the fallback', function() { + expect(fallbackPersistor.promises.getFileStream).not.to.have.been.called + }) + }) + + describe('when the fallback persistor throws an unexpected error', function() { + let primaryPersistor, fallbackPersistor, migrationPersistor, error + beforeEach(async function() { + primaryPersistor = newPersistor(false) + fallbackPersistor = newPersistor(false) + fallbackPersistor.promises.getFileStream = sinon + .stub() + .rejects(genericError) + migrationPersistor = MigrationPersistor( + primaryPersistor, + fallbackPersistor + ) + try { + await migrationPersistor.promises.getFileStream(bucket, key, options) + } catch (err) { + error = err + } + }) + + it('rejects with the error', function() { + expect(error).to.equal(genericError) + }) + + it('should have called the fallback', function() { + expect( + fallbackPersistor.promises.getFileStream + ).to.have.been.calledWith(fallbackBucket, key) + }) + }) + }) + + describe('sendStream', function() { + let primaryPersistor, fallbackPersistor, migrationPersistor + beforeEach(function() { + primaryPersistor = newPersistor(false) + fallbackPersistor = newPersistor(false) + migrationPersistor = MigrationPersistor( + primaryPersistor, + fallbackPersistor + ) + }) + + describe('when it works', function() { + beforeEach(async function() { + return migrationPersistor.promises.sendStream(bucket, key, fileStream) + }) + + it('should send the file to the primary persistor', function() { + expect( + primaryPersistor.promises.sendStream + ).to.have.been.calledWithExactly(bucket, key, fileStream) + }) + + it('should not send the file to the fallback persistor', function() { + expect(fallbackPersistor.promises.sendStream).not.to.have.been.called + }) + }) + + describe('when the primary persistor throws an error', function() { + it('returns the error', async function() { + primaryPersistor.promises.sendStream.rejects(notFoundError) + return expect( + migrationPersistor.promises.sendStream(bucket, key, fileStream) + ).to.eventually.be.rejected.and.be.an.instanceOf(Errors.NotFoundError) + }) + }) + }) + + describe('deleteFile', function() { + let primaryPersistor, fallbackPersistor, migrationPersistor + beforeEach(function() { + primaryPersistor = newPersistor(false) + fallbackPersistor = newPersistor(false) + migrationPersistor = MigrationPersistor( + primaryPersistor, + fallbackPersistor + ) + }) + + describe('when it works', function() { + beforeEach(async function() { + return migrationPersistor.promises.deleteFile(bucket, key) + }) + + it('should delete the file from the primary', function() { + expect( + primaryPersistor.promises.deleteFile + ).to.have.been.calledWithExactly(bucket, key) + }) + + it('should delete the file from the fallback', function() { + expect( + fallbackPersistor.promises.deleteFile + ).to.have.been.calledWithExactly(fallbackBucket, key) + }) + }) + + describe('when the primary persistor throws an error', function() { + let error + beforeEach(async function() { + primaryPersistor.promises.deleteFile.rejects(genericError) + try { + await migrationPersistor.promises.deleteFile(bucket, key) + } catch (err) { + error = err + } + }) + + it('should return the error', function() { + expect(error).to.equal(genericError) + }) + + it('should delete the file from the primary', function() { + expect( + primaryPersistor.promises.deleteFile + ).to.have.been.calledWithExactly(bucket, key) + }) + + it('should delete the file from the fallback', function() { + expect( + fallbackPersistor.promises.deleteFile + ).to.have.been.calledWithExactly(fallbackBucket, key) + }) + }) + + describe('when the fallback persistor throws an error', function() { + let error + beforeEach(async function() { + fallbackPersistor.promises.deleteFile.rejects(genericError) + try { + await migrationPersistor.promises.deleteFile(bucket, key) + } catch (err) { + error = err + } + }) + + it('should return the error', function() { + expect(error).to.equal(genericError) + }) + + it('should delete the file from the primary', function() { + expect( + primaryPersistor.promises.deleteFile + ).to.have.been.calledWithExactly(bucket, key) + }) + + it('should delete the file from the fallback', function() { + expect( + fallbackPersistor.promises.deleteFile + ).to.have.been.calledWithExactly(fallbackBucket, key) + }) + }) + }) + + describe('copyFile', function() { + describe('when the file exists on the primary', function() { + let primaryPersistor, fallbackPersistor, migrationPersistor + beforeEach(async function() { + primaryPersistor = newPersistor(true) + fallbackPersistor = newPersistor(false) + migrationPersistor = MigrationPersistor( + primaryPersistor, + fallbackPersistor + ) + return migrationPersistor.promises.copyFile(bucket, key, destKey) + }) + + it('should call copyFile to copy the file', function() { + expect( + primaryPersistor.promises.copyFile + ).to.have.been.calledWithExactly(bucket, key, destKey) + }) + + it('should not try to read from the fallback', function() { + expect(fallbackPersistor.promises.getFileStream).not.to.have.been.called + }) + }) + + describe('when the file does not exist on the primary', function() { + let primaryPersistor, fallbackPersistor, migrationPersistor + beforeEach(async function() { + primaryPersistor = newPersistor(false) + fallbackPersistor = newPersistor(true) + migrationPersistor = MigrationPersistor( + primaryPersistor, + fallbackPersistor + ) + return migrationPersistor.promises.copyFile(bucket, key, destKey) + }) + + it('should call copyFile to copy the file', function() { + expect( + primaryPersistor.promises.copyFile + ).to.have.been.calledWithExactly(bucket, key, destKey) + }) + + it('should fetch the file from the fallback', function() { + expect( + fallbackPersistor.promises.getFileStream + ).not.to.have.been.calledWithExactly(fallbackBucket, key) + }) + + it('should get the md5 hash from the source', function() { + expect( + fallbackPersistor.promises.getFileMd5Hash + ).to.have.been.calledWith(fallbackBucket, key) + }) + + it('should send the file to the primary', function() { + expect( + primaryPersistor.promises.sendStream + ).to.have.been.calledWithExactly( + bucket, + destKey, + sinon.match.instanceOf(Stream.PassThrough), + md5 + ) + }) + }) + + describe('when the file does not exist on the fallback', function() { + let primaryPersistor, fallbackPersistor, migrationPersistor, error + beforeEach(async function() { + primaryPersistor = newPersistor(false) + fallbackPersistor = newPersistor(false) + migrationPersistor = MigrationPersistor( + primaryPersistor, + fallbackPersistor + ) + try { + await migrationPersistor.promises.copyFile(bucket, key, destKey) + } catch (err) { + error = err + } + }) + + it('should call copyFile to copy the file', function() { + expect( + primaryPersistor.promises.copyFile + ).to.have.been.calledWithExactly(bucket, key, destKey) + }) + + it('should fetch the file from the fallback', function() { + expect( + fallbackPersistor.promises.getFileStream + ).not.to.have.been.calledWithExactly(fallbackBucket, key) + }) + + it('should return a not-found error', function() { + expect(error).to.be.an.instanceOf(Errors.NotFoundError) + }) + }) + }) +}) diff --git a/services/filestore/test/unit/js/PersistorManagerTests.js b/services/filestore/test/unit/js/PersistorManagerTests.js index 0ecbb22078..cdc9de0f92 100644 --- a/services/filestore/test/unit/js/PersistorManagerTests.js +++ b/services/filestore/test/unit/js/PersistorManagerTests.js @@ -6,18 +6,14 @@ const SandboxedModule = require('sandboxed-module') const modulePath = '../../../app/js/PersistorManager.js' describe('PersistorManager', function() { - let PersistorManager, - FSPersistorManager, - S3PersistorManager, - settings, - requires + let PersistorManager, FSPersistor, S3Persistor, settings, requires beforeEach(function() { - FSPersistorManager = { - wrappedMethod: sinon.stub().returns('FSPersistorManager') + FSPersistor = { + wrappedMethod: sinon.stub().returns('FSPersistor') } - S3PersistorManager = { - wrappedMethod: sinon.stub().returns('S3PersistorManager') + S3Persistor = { + wrappedMethod: sinon.stub().returns('S3Persistor') } settings = { @@ -25,8 +21,8 @@ describe('PersistorManager', function() { } requires = { - './S3PersistorManager': S3PersistorManager, - './FSPersistorManager': FSPersistorManager, + './S3Persistor': S3Persistor, + './FSPersistor': FSPersistor, 'settings-sharelatex': settings, 'logger-sharelatex': { log() {}, @@ -40,7 +36,7 @@ describe('PersistorManager', function() { PersistorManager = SandboxedModule.require(modulePath, { requires }) expect(PersistorManager).to.respondTo('wrappedMethod') - expect(PersistorManager.wrappedMethod()).to.equal('S3PersistorManager') + expect(PersistorManager.wrappedMethod()).to.equal('S3Persistor') }) it("should implement the S3 wrapped method when 'aws-sdk' is configured", function() { @@ -48,7 +44,7 @@ describe('PersistorManager', function() { PersistorManager = SandboxedModule.require(modulePath, { requires }) expect(PersistorManager).to.respondTo('wrappedMethod') - expect(PersistorManager.wrappedMethod()).to.equal('S3PersistorManager') + expect(PersistorManager.wrappedMethod()).to.equal('S3Persistor') }) it('should implement the FS wrapped method when FS is configured', function() { @@ -56,7 +52,7 @@ describe('PersistorManager', function() { PersistorManager = SandboxedModule.require(modulePath, { requires }) expect(PersistorManager).to.respondTo('wrappedMethod') - expect(PersistorManager.wrappedMethod()).to.equal('FSPersistorManager') + expect(PersistorManager.wrappedMethod()).to.equal('FSPersistor') }) it('should throw an error when the backend is not configured', function() { diff --git a/services/filestore/test/unit/js/S3PersistorManagerTests.js b/services/filestore/test/unit/js/S3PersistorTests.js similarity index 78% rename from services/filestore/test/unit/js/S3PersistorManagerTests.js rename to services/filestore/test/unit/js/S3PersistorTests.js index daeac66d3f..9686deed5f 100644 --- a/services/filestore/test/unit/js/S3PersistorManagerTests.js +++ b/services/filestore/test/unit/js/S3PersistorTests.js @@ -1,12 +1,12 @@ const sinon = require('sinon') const chai = require('chai') const { expect } = chai -const modulePath = '../../../app/js/S3PersistorManager.js' +const modulePath = '../../../app/js/S3Persistor.js' const SandboxedModule = require('sandboxed-module') const Errors = require('../../../app/js/Errors') -describe('S3PersistorManagerTests', function() { +describe('S3PersistorTests', function() { const defaultS3Key = 'frog' const defaultS3Secret = 'prince' const defaultS3Credentials = { @@ -26,21 +26,26 @@ describe('S3PersistorManagerTests', function() { { Key: 'hippo', Size: 22 } ] const filesSize = 33 + const md5 = 'ffffffff00000000ffffffff00000000' let Metrics, + Logger, S3, Fs, Meter, MeteredStream, ReadStream, - S3PersistorManager, + Stream, + S3Persistor, S3Client, S3ReadStream, S3NotFoundError, S3AccessDeniedError, FileNotFoundError, EmptyPromise, - settings + settings, + Hash, + crypto beforeEach(function() { settings = { @@ -56,6 +61,10 @@ describe('S3PersistorManagerTests', function() { } } + Stream = { + pipeline: sinon.stub().yields() + } + EmptyPromise = { promise: sinon.stub().resolves() } @@ -65,7 +74,11 @@ describe('S3PersistorManagerTests', function() { } ReadStream = { - pipe: sinon.stub().returns('readStream') + pipe: sinon.stub().returns('readStream'), + on: sinon + .stub() + .withArgs('end') + .yields() } FileNotFoundError = new Error('File not found') @@ -76,6 +89,7 @@ describe('S3PersistorManagerTests', function() { } MeteredStream = { + type: 'metered', on: sinon.stub(), bytes: objectSize } @@ -90,7 +104,7 @@ describe('S3PersistorManagerTests', function() { S3ReadStream = { on: sinon.stub(), - pipe: sinon.stub().returns('s3Stream'), + pipe: sinon.stub(), removeListener: sinon.stub() } S3ReadStream.on.withArgs('readable').yields() @@ -100,7 +114,8 @@ describe('S3PersistorManagerTests', function() { }), headObject: sinon.stub().returns({ promise: sinon.stub().resolves({ - ContentLength: objectSize + ContentLength: objectSize, + ETag: md5 }) }), listObjects: sinon.stub().returns({ @@ -108,21 +123,39 @@ describe('S3PersistorManagerTests', function() { Contents: files }) }), - upload: sinon.stub().returns(EmptyPromise), + upload: sinon + .stub() + .returns({ promise: sinon.stub().resolves({ ETag: `"${md5}"` }) }), copyObject: sinon.stub().returns(EmptyPromise), deleteObject: sinon.stub().returns(EmptyPromise), deleteObjects: sinon.stub().returns(EmptyPromise) } S3 = sinon.stub().returns(S3Client) - S3PersistorManager = SandboxedModule.require(modulePath, { + Hash = { + end: sinon.stub(), + read: sinon.stub().returns(md5), + setEncoding: sinon.stub() + } + crypto = { + createHash: sinon.stub().returns(Hash) + } + + Logger = { + warn: sinon.stub() + } + + S3Persistor = SandboxedModule.require(modulePath, { requires: { 'aws-sdk/clients/s3': S3, 'settings-sharelatex': settings, + 'logger-sharelatex': Logger, './Errors': Errors, fs: Fs, 'stream-meter': Meter, - 'metrics-sharelatex': Metrics + stream: Stream, + 'metrics-sharelatex': Metrics, + crypto }, globals: { console } }) @@ -133,11 +166,11 @@ describe('S3PersistorManagerTests', function() { let stream beforeEach(async function() { - stream = await S3PersistorManager.promises.getFileStream(bucket, key) + stream = await S3Persistor.promises.getFileStream(bucket, key) }) - it('returns a stream', function() { - expect(stream).to.equal('s3Stream') + it('returns a metered stream', function() { + expect(stream).to.equal(MeteredStream) }) it('sets the AWS client up with credentials from settings', function() { @@ -152,7 +185,10 @@ describe('S3PersistorManagerTests', function() { }) it('pipes the stream through the meter', function() { - expect(S3ReadStream.pipe).to.have.been.calledWith(MeteredStream) + expect(Stream.pipeline).to.have.been.calledWith( + S3ReadStream, + MeteredStream + ) }) it('records an ingress metric', function() { @@ -164,14 +200,14 @@ describe('S3PersistorManagerTests', function() { let stream beforeEach(async function() { - stream = await S3PersistorManager.promises.getFileStream(bucket, key, { + stream = await S3Persistor.promises.getFileStream(bucket, key, { start: 5, end: 10 }) }) - it('returns a stream', function() { - expect(stream).to.equal('s3Stream') + it('returns a metered stream', function() { + expect(stream).to.equal(MeteredStream) }) it('passes the byte range on to S3', function() { @@ -201,11 +237,11 @@ describe('S3PersistorManagerTests', function() { auth_secret: alternativeSecret } - stream = await S3PersistorManager.promises.getFileStream(bucket, key) + stream = await S3Persistor.promises.getFileStream(bucket, key) }) - it('returns a stream', function() { - expect(stream).to.equal('s3Stream') + it('returns a metered stream', function() { + expect(stream).to.equal(MeteredStream) }) it('sets the AWS client up with the alternative credentials', function() { @@ -220,16 +256,13 @@ describe('S3PersistorManagerTests', function() { }) it('caches the credentials', async function() { - stream = await S3PersistorManager.promises.getFileStream(bucket, key) + stream = await S3Persistor.promises.getFileStream(bucket, key) expect(S3).to.have.been.calledOnceWith(alternativeS3Credentials) }) it('uses the default credentials for an unknown bucket', async function() { - stream = await S3PersistorManager.promises.getFileStream( - 'anotherBucket', - key - ) + stream = await S3Persistor.promises.getFileStream('anotherBucket', key) expect(S3).to.have.been.calledTwice expect(S3.firstCall).to.have.been.calledWith(alternativeS3Credentials) @@ -237,14 +270,8 @@ describe('S3PersistorManagerTests', function() { }) it('caches the default credentials', async function() { - stream = await S3PersistorManager.promises.getFileStream( - 'anotherBucket', - key - ) - stream = await S3PersistorManager.promises.getFileStream( - 'anotherBucket', - key - ) + stream = await S3Persistor.promises.getFileStream('anotherBucket', key) + stream = await S3Persistor.promises.getFileStream('anotherBucket', key) expect(S3).to.have.been.calledTwice expect(S3.firstCall).to.have.been.calledWith(alternativeS3Credentials) @@ -256,7 +283,7 @@ describe('S3PersistorManagerTests', function() { delete settings.filestore.s3.secret await expect( - S3PersistorManager.promises.getFileStream('anotherBucket', key) + S3Persistor.promises.getFileStream('anotherBucket', key) ).to.eventually.be.rejected.and.be.an.instanceOf(Errors.SettingsError) }) }) @@ -268,7 +295,7 @@ describe('S3PersistorManagerTests', function() { S3ReadStream.on = sinon.stub() S3ReadStream.on.withArgs('error').yields(S3NotFoundError) try { - stream = await S3PersistorManager.promises.getFileStream(bucket, key) + stream = await S3Persistor.promises.getFileStream(bucket, key) } catch (err) { error = err } @@ -282,12 +309,12 @@ describe('S3PersistorManagerTests', function() { expect(error).to.be.an.instanceOf(Errors.NotFoundError) }) - it('wraps the error from S3', function() { - expect(error.cause).to.equal(S3NotFoundError) + it('wraps the error', function() { + expect(error.cause).to.exist }) it('stores the bucket and key in the error', function() { - expect(error.info).to.deep.equal({ Bucket: bucket, Key: key }) + expect(error.info).to.include({ bucketName: bucket, key: key }) }) }) @@ -298,7 +325,7 @@ describe('S3PersistorManagerTests', function() { S3ReadStream.on = sinon.stub() S3ReadStream.on.withArgs('error').yields(S3AccessDeniedError) try { - stream = await S3PersistorManager.promises.getFileStream(bucket, key) + stream = await S3Persistor.promises.getFileStream(bucket, key) } catch (err) { error = err } @@ -312,12 +339,12 @@ describe('S3PersistorManagerTests', function() { expect(error).to.be.an.instanceOf(Errors.NotFoundError) }) - it('wraps the error from S3', function() { - expect(error.cause).to.equal(S3AccessDeniedError) + it('wraps the error', function() { + expect(error.cause).to.exist }) it('stores the bucket and key in the error', function() { - expect(error.info).to.deep.equal({ Bucket: bucket, Key: key }) + expect(error.info).to.include({ bucketName: bucket, key: key }) }) }) @@ -328,7 +355,7 @@ describe('S3PersistorManagerTests', function() { S3ReadStream.on = sinon.stub() S3ReadStream.on.withArgs('error').yields(genericError) try { - stream = await S3PersistorManager.promises.getFileStream(bucket, key) + stream = await S3Persistor.promises.getFileStream(bucket, key) } catch (err) { error = err } @@ -342,12 +369,12 @@ describe('S3PersistorManagerTests', function() { expect(error).to.be.an.instanceOf(Errors.ReadError) }) - it('wraps the error from S3', function() { - expect(error.cause).to.equal(genericError) + it('wraps the error', function() { + expect(error.cause).to.exist }) it('stores the bucket and key in the error', function() { - expect(error.info).to.deep.equal({ Bucket: bucket, Key: key }) + expect(error.info).to.include({ bucketName: bucket, key: key }) }) }) }) @@ -357,7 +384,7 @@ describe('S3PersistorManagerTests', function() { let size beforeEach(async function() { - size = await S3PersistorManager.promises.getFileSize(bucket, key) + size = await S3Persistor.promises.getFileSize(bucket, key) }) it('should return the object size', function() { @@ -380,7 +407,7 @@ describe('S3PersistorManagerTests', function() { promise: sinon.stub().rejects(S3NotFoundError) }) try { - await S3PersistorManager.promises.getFileSize(bucket, key) + await S3Persistor.promises.getFileSize(bucket, key) } catch (err) { error = err } @@ -403,7 +430,7 @@ describe('S3PersistorManagerTests', function() { promise: sinon.stub().rejects(genericError) }) try { - await S3PersistorManager.promises.getFileSize(bucket, key) + await S3Persistor.promises.getFileSize(bucket, key) } catch (err) { error = err } @@ -422,24 +449,62 @@ describe('S3PersistorManagerTests', function() { describe('sendStream', function() { describe('with valid parameters', function() { beforeEach(async function() { - return S3PersistorManager.promises.sendStream(bucket, key, ReadStream) + return S3Persistor.promises.sendStream(bucket, key, ReadStream) }) it('should upload the stream', function() { expect(S3Client.upload).to.have.been.calledWith({ Bucket: bucket, Key: key, - Body: 'readStream' + Body: MeteredStream }) }) it('should meter the stream', function() { - expect(ReadStream.pipe).to.have.been.calledWith(MeteredStream) + expect(Stream.pipeline).to.have.been.calledWith( + ReadStream, + MeteredStream + ) }) it('should record an egress metric', function() { expect(Metrics.count).to.have.been.calledWith('s3.egress', objectSize) }) + + it('calculates the md5 hash of the file', function() { + expect(Stream.pipeline).to.have.been.calledWith(ReadStream, Hash) + }) + }) + + describe('when a hash is supploed', function() { + beforeEach(async function() { + return S3Persistor.promises.sendStream( + bucket, + key, + ReadStream, + 'aaaaaaaabbbbbbbbaaaaaaaabbbbbbbb' + ) + }) + + it('should not calculate the md5 hash of the file', function() { + expect(Stream.pipeline).not.to.have.been.calledWith( + sinon.match.any, + Hash + ) + }) + + it('sends the hash in base64', function() { + expect(S3Client.upload).to.have.been.calledWith({ + Bucket: bucket, + Key: key, + Body: MeteredStream, + ContentMD5: 'qqqqqru7u7uqqqqqu7u7uw==' + }) + }) + + it('does not fetch the md5 hash of the uploaded file', function() { + expect(S3Client.headObject).not.to.have.been.called + }) }) describe('when the upload fails', function() { @@ -449,7 +514,7 @@ describe('S3PersistorManagerTests', function() { promise: sinon.stub().rejects(genericError) }) try { - await S3PersistorManager.promises.sendStream(bucket, key, ReadStream) + await S3Persistor.promises.sendStream(bucket, key, ReadStream) } catch (err) { error = err } @@ -464,7 +529,7 @@ describe('S3PersistorManagerTests', function() { describe('sendFile', function() { describe('with valid parameters', function() { beforeEach(async function() { - return S3PersistorManager.promises.sendFile(bucket, key, filename) + return S3Persistor.promises.sendFile(bucket, key, filename) }) it('should create a read stream for the file', function() { @@ -475,7 +540,7 @@ describe('S3PersistorManagerTests', function() { expect(S3Client.upload).to.have.been.calledWith({ Bucket: bucket, Key: key, - Body: 'readStream' + Body: MeteredStream }) }) }) @@ -486,7 +551,7 @@ describe('S3PersistorManagerTests', function() { beforeEach(async function() { Fs.createReadStream = sinon.stub().throws(FileNotFoundError) try { - await S3PersistorManager.promises.sendFile(bucket, key, filename) + await S3Persistor.promises.sendFile(bucket, key, filename) } catch (err) { error = err } @@ -507,7 +572,7 @@ describe('S3PersistorManagerTests', function() { beforeEach(async function() { Fs.createReadStream = sinon.stub().throws(genericError) try { - await S3PersistorManager.promises.sendFile(bucket, key, filename) + await S3Persistor.promises.sendFile(bucket, key, filename) } catch (err) { error = err } @@ -526,7 +591,7 @@ describe('S3PersistorManagerTests', function() { describe('copyFile', function() { describe('with valid parameters', function() { beforeEach(async function() { - return S3PersistorManager.promises.copyFile(bucket, key, destKey) + return S3Persistor.promises.copyFile(bucket, key, destKey) }) it('should copy the object', function() { @@ -546,7 +611,7 @@ describe('S3PersistorManagerTests', function() { promise: sinon.stub().rejects(S3NotFoundError) }) try { - await S3PersistorManager.promises.copyFile(bucket, key, destKey) + await S3Persistor.promises.copyFile(bucket, key, destKey) } catch (err) { error = err } @@ -561,7 +626,7 @@ describe('S3PersistorManagerTests', function() { describe('deleteFile', function() { describe('with valid parameters', function() { beforeEach(async function() { - return S3PersistorManager.promises.deleteFile(bucket, key) + return S3Persistor.promises.deleteFile(bucket, key) }) it('should delete the object', function() { @@ -580,7 +645,7 @@ describe('S3PersistorManagerTests', function() { promise: sinon.stub().rejects(S3NotFoundError) }) try { - await S3PersistorManager.promises.deleteFile(bucket, key) + await S3Persistor.promises.deleteFile(bucket, key) } catch (err) { error = err } @@ -595,7 +660,7 @@ describe('S3PersistorManagerTests', function() { describe('deleteDirectory', function() { describe('with valid parameters', function() { beforeEach(async function() { - return S3PersistorManager.promises.deleteDirectory(bucket, key) + return S3Persistor.promises.deleteDirectory(bucket, key) }) it('should list the objects in the directory', function() { @@ -621,7 +686,7 @@ describe('S3PersistorManagerTests', function() { S3Client.listObjects = sinon .stub() .returns({ promise: sinon.stub().resolves({ Contents: [] }) }) - return S3PersistorManager.promises.deleteDirectory(bucket, key) + return S3Persistor.promises.deleteDirectory(bucket, key) }) it('should list the objects in the directory', function() { @@ -644,7 +709,7 @@ describe('S3PersistorManagerTests', function() { .stub() .returns({ promise: sinon.stub().rejects(genericError) }) try { - await S3PersistorManager.promises.deleteDirectory(bucket, key) + await S3Persistor.promises.deleteDirectory(bucket, key) } catch (err) { error = err } @@ -671,7 +736,7 @@ describe('S3PersistorManagerTests', function() { .stub() .returns({ promise: sinon.stub().rejects(genericError) }) try { - await S3PersistorManager.promises.deleteDirectory(bucket, key) + await S3Persistor.promises.deleteDirectory(bucket, key) } catch (err) { error = err } @@ -692,7 +757,7 @@ describe('S3PersistorManagerTests', function() { let size beforeEach(async function() { - size = await S3PersistorManager.promises.directorySize(bucket, key) + size = await S3Persistor.promises.directorySize(bucket, key) }) it('should list the objects in the directory', function() { @@ -714,7 +779,7 @@ describe('S3PersistorManagerTests', function() { S3Client.listObjects = sinon .stub() .returns({ promise: sinon.stub().resolves({ Contents: [] }) }) - size = await S3PersistorManager.promises.directorySize(bucket, key) + size = await S3Persistor.promises.directorySize(bucket, key) }) it('should list the objects in the directory', function() { @@ -737,7 +802,7 @@ describe('S3PersistorManagerTests', function() { .stub() .returns({ promise: sinon.stub().rejects(genericError) }) try { - await S3PersistorManager.promises.directorySize(bucket, key) + await S3Persistor.promises.directorySize(bucket, key) } catch (err) { error = err } @@ -758,10 +823,7 @@ describe('S3PersistorManagerTests', function() { let exists beforeEach(async function() { - exists = await S3PersistorManager.promises.checkIfFileExists( - bucket, - key - ) + exists = await S3Persistor.promises.checkIfFileExists(bucket, key) }) it('should get the object header', function() { @@ -783,10 +845,7 @@ describe('S3PersistorManagerTests', function() { S3Client.headObject = sinon .stub() .returns({ promise: sinon.stub().rejects(S3NotFoundError) }) - exists = await S3PersistorManager.promises.checkIfFileExists( - bucket, - key - ) + exists = await S3Persistor.promises.checkIfFileExists(bucket, key) }) it('should get the object header', function() { @@ -809,7 +868,7 @@ describe('S3PersistorManagerTests', function() { .stub() .returns({ promise: sinon.stub().rejects(genericError) }) try { - await S3PersistorManager.promises.checkIfFileExists(bucket, key) + await S3Persistor.promises.checkIfFileExists(bucket, key) } catch (err) { error = err } From 5adfb3e2c09013507079a57ce8c00a10bdf09462 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Fri, 14 Feb 2020 13:29:30 +0000 Subject: [PATCH 422/555] Use large upload chunks --- services/filestore/app/js/S3Persistor.js | 2 +- services/filestore/test/unit/js/S3PersistorTests.js | 6 ++++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/services/filestore/app/js/S3Persistor.js b/services/filestore/app/js/S3Persistor.js index 891d7be68e..dc2262c6d6 100644 --- a/services/filestore/app/js/S3Persistor.js +++ b/services/filestore/app/js/S3Persistor.js @@ -96,7 +96,7 @@ async function sendStream(bucketName, key, readStream, sourceMd5) { } const response = await _getClientForBucket(bucketName) - .upload(uploadOptions) + .upload(uploadOptions, { partSize: 100 * 1024 * 1024 }) .promise() const destMd5 = _md5FromResponse(response) diff --git a/services/filestore/test/unit/js/S3PersistorTests.js b/services/filestore/test/unit/js/S3PersistorTests.js index 9686deed5f..ac80fe4533 100644 --- a/services/filestore/test/unit/js/S3PersistorTests.js +++ b/services/filestore/test/unit/js/S3PersistorTests.js @@ -460,6 +460,12 @@ describe('S3PersistorTests', function() { }) }) + it('should upload files in a single part', function() { + expect(S3Client.upload).to.have.been.calledWith(sinon.match.any, { + partSize: 100 * 1024 * 1024 + }) + }) + it('should meter the stream', function() { expect(Stream.pipeline).to.have.been.calledWith( ReadStream, From 2f2a819b74969a60ce8d4f686c5e9a6a20477cb8 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Fri, 14 Feb 2020 13:23:22 +0000 Subject: [PATCH 423/555] Re-fetch file to calculate md5 if etag is not in correct format --- services/filestore/app/js/S3Persistor.js | 18 ++++------- .../test/unit/js/S3PersistorTests.js | 32 +++++++++++++++++++ 2 files changed, 39 insertions(+), 11 deletions(-) diff --git a/services/filestore/app/js/S3Persistor.js b/services/filestore/app/js/S3Persistor.js index dc2262c6d6..c1bfa8ec2f 100644 --- a/services/filestore/app/js/S3Persistor.js +++ b/services/filestore/app/js/S3Persistor.js @@ -98,7 +98,7 @@ async function sendStream(bucketName, key, readStream, sourceMd5) { const response = await _getClientForBucket(bucketName) .upload(uploadOptions, { partSize: 100 * 1024 * 1024 }) .promise() - const destMd5 = _md5FromResponse(response) + const destMd5 = await _md5FromResponse(response) // if we didn't have an md5 hash, we should compare our computed one with S3's // as we couldn't tell S3 about it beforehand @@ -219,7 +219,7 @@ async function getFileMd5Hash(bucketName, key) { const response = await _getClientForBucket(bucketName) .headObject({ Bucket: bucketName, Key: key }) .promise() - const md5 = _md5FromResponse(response) + const md5 = await _md5FromResponse(response) return md5 } catch (err) { throw PersistorHelper.wrapError( @@ -364,16 +364,12 @@ function _buildClientOptions(bucketCredentials) { return options } -function _md5FromResponse(response) { - const md5 = (response.ETag || '').replace(/[ "]/g, '') +async function _md5FromResponse(response) { + let md5 = (response.ETag || '').replace(/[ "]/g, '') if (!md5.match(/^[a-f0-9]{32}$/)) { - throw new ReadError({ - message: 's3 etag not in md5-hash format', - info: { - md5, - eTag: response.ETag - } - }) + // the eTag isn't in md5 format so we need to calculate it ourselves + const stream = await getFileStream(response.Bucket, response.Key, {}) + md5 = await PersistorHelper.calculateStreamMd5(stream) } return md5 diff --git a/services/filestore/test/unit/js/S3PersistorTests.js b/services/filestore/test/unit/js/S3PersistorTests.js index ac80fe4533..f0a075de3c 100644 --- a/services/filestore/test/unit/js/S3PersistorTests.js +++ b/services/filestore/test/unit/js/S3PersistorTests.js @@ -530,6 +530,38 @@ describe('S3PersistorTests', function() { expect(error).to.be.an.instanceOf(Errors.WriteError) }) }) + + describe("when the etag isn't a valid md5 hash", function() { + beforeEach(async function() { + S3Client.upload = sinon.stub().returns({ + promise: sinon.stub().resolves({ + ETag: 'somethingthatisntanmd5', + Bucket: bucket, + Key: key + }) + }) + + await S3Persistor.promises.sendStream(bucket, key, ReadStream) + }) + + it('should re-fetch the file to verify it', function() { + expect(S3Client.getObject).to.have.been.calledWith({ + Bucket: bucket, + Key: key + }) + }) + + it('should meter the download', function() { + expect(Stream.pipeline).to.have.been.calledWith( + S3ReadStream, + MeteredStream + ) + }) + + it('should calculate the md5 hash from the file', function() { + expect(Stream.pipeline).to.have.been.calledWith(MeteredStream, Hash) + }) + }) }) describe('sendFile', function() { From 9e7eec7a19de392927d9bd3f4392782bf741b9fa Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 17 Feb 2020 12:13:31 +0000 Subject: [PATCH 424/555] Add acceptance test for large files --- .../test/acceptance/js/FilestoreTests.js | 32 +++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/services/filestore/test/acceptance/js/FilestoreTests.js b/services/filestore/test/acceptance/js/FilestoreTests.js index fd1baed474..7e57764cc4 100644 --- a/services/filestore/test/acceptance/js/FilestoreTests.js +++ b/services/filestore/test/acceptance/js/FilestoreTests.js @@ -368,6 +368,38 @@ describe('Filestore', function() { }) }) + describe('with a large file', function() { + let fileId, fileUrl, largeFileContent, error + + beforeEach(async function() { + fileId = Math.random() + fileUrl = `${filestoreUrl}/project/${projectId}/file/${directoryName}%2F${fileId}` + + largeFileContent = '_wombat_'.repeat(1024 * 1024) // 8 megabytes + largeFileContent += Math.random() + + const writeStream = request.post(fileUrl) + const readStream = streamifier.createReadStream(largeFileContent) + // hack to consume the result to ensure the http request has been fully processed + const resultStream = fs.createWriteStream('/dev/null') + + try { + await pipeline(readStream, writeStream, resultStream) + } catch (err) { + error = err + } + }) + + it('should be able to get the file back', async function() { + const response = await rp.get(fileUrl) + expect(response.body).to.equal(largeFileContent) + }) + + it('should not throw an error', function() { + expect(error).not.to.exist + }) + }) + if (backend === 'S3Persistor') { describe('with a file in a specific bucket', function() { let constantFileContent, fileId, fileUrl, bucketName From 7c4bf97a7feb3b5a991976bf87f3a61765abf963 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 17 Feb 2020 12:32:50 +0000 Subject: [PATCH 425/555] Avoid fetching file a second time when source md5 can't be fetched --- services/filestore/app/js/S3Persistor.js | 22 ++++++++++++++-------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/services/filestore/app/js/S3Persistor.js b/services/filestore/app/js/S3Persistor.js index c1bfa8ec2f..6e71d95b0d 100644 --- a/services/filestore/app/js/S3Persistor.js +++ b/services/filestore/app/js/S3Persistor.js @@ -98,7 +98,16 @@ async function sendStream(bucketName, key, readStream, sourceMd5) { const response = await _getClientForBucket(bucketName) .upload(uploadOptions, { partSize: 100 * 1024 * 1024 }) .promise() - const destMd5 = await _md5FromResponse(response) + let destMd5 = _md5FromResponse(response) + if (!destMd5) { + // the eTag isn't in md5 format so we need to calculate it ourselves + const verifyStream = await getFileStream( + response.Bucket, + response.Key, + {} + ) + destMd5 = await PersistorHelper.calculateStreamMd5(verifyStream) + } // if we didn't have an md5 hash, we should compare our computed one with S3's // as we couldn't tell S3 about it beforehand @@ -219,8 +228,7 @@ async function getFileMd5Hash(bucketName, key) { const response = await _getClientForBucket(bucketName) .headObject({ Bucket: bucketName, Key: key }) .promise() - const md5 = await _md5FromResponse(response) - return md5 + return _md5FromResponse(response) } catch (err) { throw PersistorHelper.wrapError( err, @@ -364,12 +372,10 @@ function _buildClientOptions(bucketCredentials) { return options } -async function _md5FromResponse(response) { - let md5 = (response.ETag || '').replace(/[ "]/g, '') +function _md5FromResponse(response) { + const md5 = (response.ETag || '').replace(/[ "]/g, '') if (!md5.match(/^[a-f0-9]{32}$/)) { - // the eTag isn't in md5 format so we need to calculate it ourselves - const stream = await getFileStream(response.Bucket, response.Key, {}) - md5 = await PersistorHelper.calculateStreamMd5(stream) + return null } return md5 From 8acc9ef0aef985d0a4eed9f47c31e5808cd2aef8 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 17 Feb 2020 13:50:21 +0000 Subject: [PATCH 426/555] Make S3 partSize a setting --- services/filestore/app/js/S3Persistor.js | 2 +- services/filestore/config/settings.defaults.coffee | 1 + services/filestore/test/acceptance/js/FilestoreTests.js | 9 ++++++--- services/filestore/test/unit/js/S3PersistorTests.js | 3 ++- 4 files changed, 10 insertions(+), 5 deletions(-) diff --git a/services/filestore/app/js/S3Persistor.js b/services/filestore/app/js/S3Persistor.js index 6e71d95b0d..fc505ccfbb 100644 --- a/services/filestore/app/js/S3Persistor.js +++ b/services/filestore/app/js/S3Persistor.js @@ -96,7 +96,7 @@ async function sendStream(bucketName, key, readStream, sourceMd5) { } const response = await _getClientForBucket(bucketName) - .upload(uploadOptions, { partSize: 100 * 1024 * 1024 }) + .upload(uploadOptions, { partSize: settings.filestore.s3.partSize }) .promise() let destMd5 = _md5FromResponse(response) if (!destMd5) { diff --git a/services/filestore/config/settings.defaults.coffee b/services/filestore/config/settings.defaults.coffee index bb124ae8e0..05a45f816d 100644 --- a/services/filestore/config/settings.defaults.coffee +++ b/services/filestore/config/settings.defaults.coffee @@ -38,6 +38,7 @@ settings = key: process.env['AWS_ACCESS_KEY_ID'] secret: process.env['AWS_SECRET_ACCESS_KEY'] endpoint: process.env['AWS_S3_ENDPOINT'] + partSize: process.env['S3_PARTSIZE'] or (100 * 1024 * 1024) stores: user_files: process.env['USER_FILES_BUCKET_NAME'] diff --git a/services/filestore/test/acceptance/js/FilestoreTests.js b/services/filestore/test/acceptance/js/FilestoreTests.js index 7e57764cc4..ccef80bc01 100644 --- a/services/filestore/test/acceptance/js/FilestoreTests.js +++ b/services/filestore/test/acceptance/js/FilestoreTests.js @@ -57,7 +57,8 @@ const BackendSettings = { key: process.env.AWS_ACCESS_KEY_ID, secret: process.env.AWS_SECRET_ACCESS_KEY, endpoint: process.env.AWS_S3_ENDPOINT, - pathStyle: true + pathStyle: true, + partSize: 100 * 1024 * 1024 }, stores: { user_files: process.env.AWS_S3_USER_FILES_BUCKET_NAME, @@ -71,7 +72,8 @@ const BackendSettings = { key: process.env.AWS_ACCESS_KEY_ID, secret: process.env.AWS_SECRET_ACCESS_KEY, endpoint: process.env.AWS_S3_ENDPOINT, - pathStyle: true + pathStyle: true, + partSize: 100 * 1024 * 1024 }, stores: { user_files: process.env.AWS_S3_USER_FILES_BUCKET_NAME, @@ -102,7 +104,8 @@ const BackendSettings = { key: process.env.AWS_ACCESS_KEY_ID, secret: process.env.AWS_SECRET_ACCESS_KEY, endpoint: process.env.AWS_S3_ENDPOINT, - pathStyle: true + pathStyle: true, + partSize: 100 * 1024 * 1024 }, stores: { user_files: Path.resolve(__dirname, '../../../user_files'), diff --git a/services/filestore/test/unit/js/S3PersistorTests.js b/services/filestore/test/unit/js/S3PersistorTests.js index f0a075de3c..07bda746bc 100644 --- a/services/filestore/test/unit/js/S3PersistorTests.js +++ b/services/filestore/test/unit/js/S3PersistorTests.js @@ -53,7 +53,8 @@ describe('S3PersistorTests', function() { backend: 's3', s3: { secret: defaultS3Secret, - key: defaultS3Key + key: defaultS3Key, + partSize: 100 * 1024 * 1024 }, stores: { user_files: 'sl_user_files' From 492f8abc9ff10e3d9be9845a78034abcd2dd3296 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Tue, 18 Feb 2020 10:24:29 +0000 Subject: [PATCH 427/555] Update config/settings.defaults.coffee Co-Authored-By: Jakob Ackermann --- services/filestore/config/settings.defaults.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/config/settings.defaults.coffee b/services/filestore/config/settings.defaults.coffee index 05a45f816d..4142be61f5 100644 --- a/services/filestore/config/settings.defaults.coffee +++ b/services/filestore/config/settings.defaults.coffee @@ -38,7 +38,7 @@ settings = key: process.env['AWS_ACCESS_KEY_ID'] secret: process.env['AWS_SECRET_ACCESS_KEY'] endpoint: process.env['AWS_S3_ENDPOINT'] - partSize: process.env['S3_PARTSIZE'] or (100 * 1024 * 1024) + partSize: process.env['AWS_S3_PARTSIZE'] or (100 * 1024 * 1024) stores: user_files: process.env['USER_FILES_BUCKET_NAME'] From 6e19a650d2b7219f3575f544b2c36581b221f4c8 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 27 Feb 2020 10:33:48 +0000 Subject: [PATCH 428/555] Write to error log on unhandled rejection/exception --- services/filestore/app.js | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/services/filestore/app.js b/services/filestore/app.js index e48e8ae3c8..278997b9aa 100644 --- a/services/filestore/app.js +++ b/services/filestore/app.js @@ -156,4 +156,13 @@ if (!module.parent) { }) } +process + .on('unhandledRejection', (reason, p) => { + logger.err(reason, 'Unhandled Rejection at Promise', p) + }) + .on('uncaughtException', err => { + logger.err(err, 'Uncaught Exception thrown') + process.exit(1) + }) + module.exports = app From 7920570dd887889c983219b9289a26318eda3b1d Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Sun, 23 Feb 2020 15:42:04 +0000 Subject: [PATCH 429/555] [misc] test/unit: add missing require stubs for metrics and settings --- services/filestore/test/unit/js/ImageOptimiserTests.js | 5 ++++- services/filestore/test/unit/js/KeybuilderTests.js | 4 +++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/services/filestore/test/unit/js/ImageOptimiserTests.js b/services/filestore/test/unit/js/ImageOptimiserTests.js index e4bc967345..947400d0d8 100644 --- a/services/filestore/test/unit/js/ImageOptimiserTests.js +++ b/services/filestore/test/unit/js/ImageOptimiserTests.js @@ -19,7 +19,10 @@ describe('ImageOptimiser', function() { ImageOptimiser = SandboxedModule.require(modulePath, { requires: { './SafeExec': SafeExec, - 'logger-sharelatex': logger + 'logger-sharelatex': logger, + 'metrics-sharelatex': { + Timer: sinon.stub().returns({ done: sinon.stub() }) + } } }) }) diff --git a/services/filestore/test/unit/js/KeybuilderTests.js b/services/filestore/test/unit/js/KeybuilderTests.js index 9dcb38f74f..4364b668a3 100644 --- a/services/filestore/test/unit/js/KeybuilderTests.js +++ b/services/filestore/test/unit/js/KeybuilderTests.js @@ -7,7 +7,9 @@ describe('LocalFileWriter', function() { const key = 'wombat/potato' beforeEach(function() { - KeyBuilder = SandboxedModule.require(modulePath) + KeyBuilder = SandboxedModule.require(modulePath, { + requires: { 'settings-sharelatex': {} } + }) }) describe('cachedKey', function() { From 54e3b577728ac1334150a056498ee603da042a02 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Sun, 23 Feb 2020 15:43:26 +0000 Subject: [PATCH 430/555] [misc] test/unit: add missing globals that are lazy loaded --- services/filestore/test/unit/js/S3PersistorTests.js | 2 +- services/filestore/test/unit/js/SafeExecTests.js | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/services/filestore/test/unit/js/S3PersistorTests.js b/services/filestore/test/unit/js/S3PersistorTests.js index 07bda746bc..ac104e36f2 100644 --- a/services/filestore/test/unit/js/S3PersistorTests.js +++ b/services/filestore/test/unit/js/S3PersistorTests.js @@ -158,7 +158,7 @@ describe('S3PersistorTests', function() { 'metrics-sharelatex': Metrics, crypto }, - globals: { console } + globals: { console, Buffer } }) }) diff --git a/services/filestore/test/unit/js/SafeExecTests.js b/services/filestore/test/unit/js/SafeExecTests.js index 1092be00be..6b89c53c01 100644 --- a/services/filestore/test/unit/js/SafeExecTests.js +++ b/services/filestore/test/unit/js/SafeExecTests.js @@ -12,6 +12,7 @@ describe('SafeExec', function() { options = { timeout: 10 * 1000, killSignal: 'SIGTERM' } safeExec = SandboxedModule.require(modulePath, { + globals: { process }, requires: { 'settings-sharelatex': settings } From 94b8a7f89170b44c72a42d4a736cfb3d24968d7c Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Sun, 23 Feb 2020 15:44:16 +0000 Subject: [PATCH 431/555] [misc] test/unit: KeybuilderTests: use a unique suite label --- services/filestore/test/unit/js/KeybuilderTests.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/test/unit/js/KeybuilderTests.js b/services/filestore/test/unit/js/KeybuilderTests.js index 4364b668a3..774fc2f366 100644 --- a/services/filestore/test/unit/js/KeybuilderTests.js +++ b/services/filestore/test/unit/js/KeybuilderTests.js @@ -2,7 +2,7 @@ const SandboxedModule = require('sandboxed-module') const modulePath = '../../../app/js/KeyBuilder.js' -describe('LocalFileWriter', function() { +describe('KeybuilderTests', function() { let KeyBuilder const key = 'wombat/potato' From 6589aa6ae0d3ec9332fd8dcdd447eec9d34aca0a Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Sun, 23 Feb 2020 18:25:39 +0000 Subject: [PATCH 432/555] [misc] test/acceptance: harden the startup check for s3 Signed-off-by: Jakob Ackermann --- .../test/acceptance/js/FilestoreApp.js | 36 +++++++++++-------- 1 file changed, 21 insertions(+), 15 deletions(-) diff --git a/services/filestore/test/acceptance/js/FilestoreApp.js b/services/filestore/test/acceptance/js/FilestoreApp.js index 20564e2d40..eb34ad8302 100644 --- a/services/filestore/test/acceptance/js/FilestoreApp.js +++ b/services/filestore/test/acceptance/js/FilestoreApp.js @@ -4,11 +4,7 @@ const fs = require('fs') const Path = require('path') const { promisify } = require('util') const disrequire = require('disrequire') -const rp = require('request-promise-native').defaults({ - resolveWithFullResponse: true -}) - -const S3_TRIES = 30 +const AWS = require('aws-sdk') logger.logger.level('info') @@ -80,21 +76,31 @@ class FilestoreApp { return } - let s3Available = false + const s3 = new AWS.S3({ + accessKeyId: Settings.filestore.s3.key, + secretAccessKey: Settings.filestore.s3.secret, + endpoint: Settings.filestore.s3.endpoint, + s3ForcePathStyle: true, + signatureVersion: 'v4' + }) - while (tries < S3_TRIES && !s3Available) { + while (true) { try { - const response = await rp.get(`${Settings.filestore.s3.endpoint}/`) - if ([200, 404].includes(response.statusCode)) { - s3Available = true - } + return await s3 + .putObject({ + Key: 'startup', + Body: '42', + Bucket: Settings.filestore.stores.user_files + }) + .promise() } catch (err) { // swallow errors, as we may experience them until fake-s3 is running - } finally { - tries++ - if (!s3Available) { - await sleep(1000) + if (tries === 9) { + // throw just before hitting the 10s test timeout + throw err } + tries++ + await sleep(1000) } } } From 847f124d7b0ad9afd719568c1913047d3418fd57 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Sun, 23 Feb 2020 18:26:30 +0000 Subject: [PATCH 433/555] [misc] test/acceptance: skip the shutdown in case we did not start yet Signed-off-by: Jakob Ackermann --- services/filestore/test/acceptance/js/FilestoreApp.js | 1 + 1 file changed, 1 insertion(+) diff --git a/services/filestore/test/acceptance/js/FilestoreApp.js b/services/filestore/test/acceptance/js/FilestoreApp.js index eb34ad8302..6bc4f32719 100644 --- a/services/filestore/test/acceptance/js/FilestoreApp.js +++ b/services/filestore/test/acceptance/js/FilestoreApp.js @@ -62,6 +62,7 @@ class FilestoreApp { } async stop() { + if (!this.server) return const closeServer = promisify(this.server.close).bind(this.server) try { await closeServer() From 516102e6fb691d6d65cc6c533b6749a0bf08feb0 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Fri, 28 Feb 2020 12:26:46 +0100 Subject: [PATCH 434/555] [misc] test/acceptance: do not hard code fake credentials --- services/filestore/test/acceptance/js/FilestoreTests.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/filestore/test/acceptance/js/FilestoreTests.js b/services/filestore/test/acceptance/js/FilestoreTests.js index ccef80bc01..91fdc0f4c7 100644 --- a/services/filestore/test/acceptance/js/FilestoreTests.js +++ b/services/filestore/test/acceptance/js/FilestoreTests.js @@ -415,8 +415,8 @@ describe('Filestore', function() { const s3ClientSettings = { credentials: { - accessKeyId: 'fake', - secretAccessKey: 'fake' + accessKeyId: process.env.AWS_ACCESS_KEY_ID, + secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY }, endpoint: process.env.AWS_S3_ENDPOINT, sslEnabled: false, From 2b9d0868c5211348fc533d88023496bc22b1d721 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Fri, 28 Feb 2020 12:27:06 +0100 Subject: [PATCH 435/555] [misc] test/acceptance: retrieve ingress metrics just before using it The upload request can bump the ingress metric. The content hash validation might require a full download in case the ETag field of the upload response is not a md5 sum. --- .../test/acceptance/js/FilestoreTests.js | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/services/filestore/test/acceptance/js/FilestoreTests.js b/services/filestore/test/acceptance/js/FilestoreTests.js index 91fdc0f4c7..c6a1e08444 100644 --- a/services/filestore/test/acceptance/js/FilestoreTests.js +++ b/services/filestore/test/acceptance/js/FilestoreTests.js @@ -148,12 +148,8 @@ describe('Filestore', function() { }) beforeEach(async function() { - // retrieve previous metrics from the app if (Settings.filestore.backend === 's3') { - ;[previousEgress, previousIngress] = await Promise.all([ - getMetric(filestoreUrl, 's3_egress'), - getMetric(filestoreUrl, 's3_ingress') - ]) + previousEgress = await getMetric(filestoreUrl, 's3_egress') } projectId = `acceptance_tests_${Math.random()}` }) @@ -195,6 +191,15 @@ describe('Filestore', function() { await pipeline(readStream, writeStream, resultStream) }) + beforeEach(async function retrievePreviousIngressMetrics() { + // The upload request can bump the ingress metric. + // The content hash validation might require a full download + // in case the ETag field of the upload response is not a md5 sum. + if (Settings.filestore.backend === 's3') { + previousIngress = await getMetric(filestoreUrl, 's3_ingress') + } + }) + it('should return 404 for a non-existant id', async function() { const options = { uri: fileUrl + '___this_is_clearly_wrong___' } await expect( From b4e3d7de6205b3e7872c87d591280ed23988715e Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Fri, 10 Jan 2020 10:51:49 +0100 Subject: [PATCH 436/555] [config] add support for third party s3 which uses path-style buckets Signed-off-by: Jakob Ackermann --- services/filestore/config/settings.defaults.coffee | 1 + 1 file changed, 1 insertion(+) diff --git a/services/filestore/config/settings.defaults.coffee b/services/filestore/config/settings.defaults.coffee index 4142be61f5..251fb073b4 100644 --- a/services/filestore/config/settings.defaults.coffee +++ b/services/filestore/config/settings.defaults.coffee @@ -38,6 +38,7 @@ settings = key: process.env['AWS_ACCESS_KEY_ID'] secret: process.env['AWS_SECRET_ACCESS_KEY'] endpoint: process.env['AWS_S3_ENDPOINT'] + pathStyle: process.env['AWS_S3_PATH_STYLE'] partSize: process.env['AWS_S3_PARTSIZE'] or (100 * 1024 * 1024) stores: From e3011b25218d3dce26f38fae7b755741023d9ba1 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 28 Feb 2020 14:47:55 +0000 Subject: [PATCH 437/555] run npm update --- services/filestore/package-lock.json | 2724 +++++++++++++++----------- 1 file changed, 1611 insertions(+), 1113 deletions(-) diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index 64902d42af..2217f87c53 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -5,21 +5,21 @@ "requires": true, "dependencies": { "@babel/code-frame": { - "version": "7.5.5", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.5.5.tgz", - "integrity": "sha512-27d4lZoomVyo51VegxI20xZPuSHusqbQag/ztrBC7wegWoQ1nLREPVSKSW8byhTlzTKyNE4ifaTA6lCp7JjpFw==", + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.8.3.tgz", + "integrity": "sha512-a9gxpmdXtZEInkCSHUJDLHZVBgb1QS0jhss4cPP93EW7s+uC5bikET2twEF3KV+7rDblJcmNvTR7VJejqd2C2g==", "dev": true, "requires": { - "@babel/highlight": "^7.0.0" + "@babel/highlight": "^7.8.3" } }, "@babel/generator": { - "version": "7.7.4", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.7.4.tgz", - "integrity": "sha512-m5qo2WgdOJeyYngKImbkyQrnUN1mPceaG5BV+G0E3gWsa4l/jCSryWJdM2x8OuGAOyh+3d5pVYfZWCiNFtynxg==", + "version": "7.8.6", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.8.6.tgz", + "integrity": "sha512-4bpOR5ZBz+wWcMeVtcf7FbjcFzCp+817z2/gHNncIRcM9MmKzUhtWCYAq27RAfUrAFwb+OCG1s9WEaVxfi6cjg==", "dev": true, "requires": { - "@babel/types": "^7.7.4", + "@babel/types": "^7.8.6", "jsesc": "^2.5.1", "lodash": "^4.17.13", "source-map": "^0.5.0" @@ -34,38 +34,38 @@ } }, "@babel/helper-function-name": { - "version": "7.7.4", - "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.7.4.tgz", - "integrity": "sha512-AnkGIdiBhEuiwdoMnKm7jfPfqItZhgRaZfMg1XX3bS25INOnLPjPG1Ppnajh8eqgt5kPJnfqrRHqFqmjKDZLzQ==", + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.8.3.tgz", + "integrity": "sha512-BCxgX1BC2hD/oBlIFUgOCQDOPV8nSINxCwM3o93xP4P9Fq6aV5sgv2cOOITDMtCfQ+3PvHp3l689XZvAM9QyOA==", "dev": true, "requires": { - "@babel/helper-get-function-arity": "^7.7.4", - "@babel/template": "^7.7.4", - "@babel/types": "^7.7.4" + "@babel/helper-get-function-arity": "^7.8.3", + "@babel/template": "^7.8.3", + "@babel/types": "^7.8.3" } }, "@babel/helper-get-function-arity": { - "version": "7.7.4", - "resolved": "https://registry.npmjs.org/@babel/helper-get-function-arity/-/helper-get-function-arity-7.7.4.tgz", - "integrity": "sha512-QTGKEdCkjgzgfJ3bAyRwF4yyT3pg+vDgan8DSivq1eS0gwi+KGKE5x8kRcbeFTb/673mkO5SN1IZfmCfA5o+EA==", + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/helper-get-function-arity/-/helper-get-function-arity-7.8.3.tgz", + "integrity": "sha512-FVDR+Gd9iLjUMY1fzE2SR0IuaJToR4RkCDARVfsBBPSP53GEqSFjD8gNyxg246VUyc/ALRxFaAK8rVG7UT7xRA==", "dev": true, "requires": { - "@babel/types": "^7.7.4" + "@babel/types": "^7.8.3" } }, "@babel/helper-split-export-declaration": { - "version": "7.7.4", - "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.7.4.tgz", - "integrity": "sha512-guAg1SXFcVr04Guk9eq0S4/rWS++sbmyqosJzVs8+1fH5NI+ZcmkaSkc7dmtAFbHFva6yRJnjW3yAcGxjueDug==", + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.8.3.tgz", + "integrity": "sha512-3x3yOeyBhW851hroze7ElzdkeRXQYQbFIb7gLK1WQYsw2GWDay5gAJNw1sWJ0VFP6z5J1whqeXH/WCdCjZv6dA==", "dev": true, "requires": { - "@babel/types": "^7.7.4" + "@babel/types": "^7.8.3" } }, "@babel/highlight": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.5.0.tgz", - "integrity": "sha512-7dV4eu9gBxoM0dAnj/BCFDW9LFU0zvTrkq0ugM7pnHEgguOEeOz1so2ZghEdzviYzQEED0r4EAgpsBChKy1TRQ==", + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.8.3.tgz", + "integrity": "sha512-PX4y5xQUvy0fnEVHrYOarRPXVWafSjTW9T0Hab8gVIawpl2Sj0ORyrygANq+KjcNlSSTw0YCLSNA8OyZ1I4yEg==", "dev": true, "requires": { "chalk": "^2.0.0", @@ -74,34 +74,34 @@ } }, "@babel/parser": { - "version": "7.7.5", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.7.5.tgz", - "integrity": "sha512-KNlOe9+/nk4i29g0VXgl8PEXIRms5xKLJeuZ6UptN0fHv+jDiriG+y94X6qAgWTR0h3KaoM1wK5G5h7MHFRSig==", + "version": "7.8.6", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.8.6.tgz", + "integrity": "sha512-trGNYSfwq5s0SgM1BMEB8hX3NDmO7EP2wsDGDexiaKMB92BaRpS+qZfpkMqUBhcsOTBwNy9B/jieo4ad/t/z2g==", "dev": true }, "@babel/template": { - "version": "7.7.4", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.7.4.tgz", - "integrity": "sha512-qUzihgVPguAzXCK7WXw8pqs6cEwi54s3E+HrejlkuWO6ivMKx9hZl3Y2fSXp9i5HgyWmj7RKP+ulaYnKM4yYxw==", + "version": "7.8.6", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.8.6.tgz", + "integrity": "sha512-zbMsPMy/v0PWFZEhQJ66bqjhH+z0JgMoBWuikXybgG3Gkd/3t5oQ1Rw2WQhnSrsOmsKXnZOx15tkC4qON/+JPg==", "dev": true, "requires": { - "@babel/code-frame": "^7.0.0", - "@babel/parser": "^7.7.4", - "@babel/types": "^7.7.4" + "@babel/code-frame": "^7.8.3", + "@babel/parser": "^7.8.6", + "@babel/types": "^7.8.6" } }, "@babel/traverse": { - "version": "7.7.4", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.7.4.tgz", - "integrity": "sha512-P1L58hQyupn8+ezVA2z5KBm4/Zr4lCC8dwKCMYzsa5jFMDMQAzaBNy9W5VjB+KAmBjb40U7a/H6ao+Xo+9saIw==", + "version": "7.8.6", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.8.6.tgz", + "integrity": "sha512-2B8l0db/DPi8iinITKuo7cbPznLCEk0kCxDoB9/N6gGNg/gxOXiR/IcymAFPiBwk5w6TtQ27w4wpElgp9btR9A==", "dev": true, "requires": { - "@babel/code-frame": "^7.5.5", - "@babel/generator": "^7.7.4", - "@babel/helper-function-name": "^7.7.4", - "@babel/helper-split-export-declaration": "^7.7.4", - "@babel/parser": "^7.7.4", - "@babel/types": "^7.7.4", + "@babel/code-frame": "^7.8.3", + "@babel/generator": "^7.8.6", + "@babel/helper-function-name": "^7.8.3", + "@babel/helper-split-export-declaration": "^7.8.3", + "@babel/parser": "^7.8.6", + "@babel/types": "^7.8.6", "debug": "^4.1.0", "globals": "^11.1.0", "lodash": "^4.17.13" @@ -116,12 +116,6 @@ "ms": "^2.1.1" } }, - "globals": { - "version": "11.12.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", - "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", - "dev": true - }, "ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", @@ -131,9 +125,9 @@ } }, "@babel/types": { - "version": "7.7.4", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.7.4.tgz", - "integrity": "sha512-cz5Ji23KCi4T+YIE/BolWosrJuSmoZeN1EFnRtBwF+KKLi8GG/Z2c2hOJJeCXPk4mwk4QFvTmwIodJowXgttRA==", + "version": "7.8.6", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.8.6.tgz", + "integrity": "sha512-wqz7pgWMIrht3gquyEFPVXeXCti72Rm8ep9b5tQKz9Yg9LzJA3HxosF1SB3Kc81KD1A3XBkkVYtJvCKS2Z/QrA==", "dev": true, "requires": { "esutils": "^2.0.2", @@ -142,27 +136,25 @@ } }, "@google-cloud/common": { - "version": "0.32.1", - "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.32.1.tgz", - "integrity": "sha1-ajLDQBcs6j22Z00ODjTnh0CgBz8=", + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-2.4.0.tgz", + "integrity": "sha512-zWFjBS35eI9leAHhjfeOYlK5Plcuj/77EzstnrJIZbKgF/nkqjcQuGiMCpzCwOfPyUbz8ZaEOYgbHa759AKbjg==", "requires": { - "@google-cloud/projectify": "^0.3.3", - "@google-cloud/promisify": "^0.4.0", - "@types/request": "^2.48.1", + "@google-cloud/projectify": "^1.0.0", + "@google-cloud/promisify": "^1.0.0", "arrify": "^2.0.0", "duplexify": "^3.6.0", "ent": "^2.2.0", "extend": "^3.0.2", - "google-auth-library": "^3.1.1", - "pify": "^4.0.1", + "google-auth-library": "^5.5.0", "retry-request": "^4.0.0", - "teeny-request": "^3.11.3" + "teeny-request": "^6.0.0" } }, "@google-cloud/debug-agent": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/@google-cloud/debug-agent/-/debug-agent-3.2.0.tgz", - "integrity": "sha1-2qdjWhaYpWY31dxXzhED536uKdM=", + "integrity": "sha512-fP87kYbS6aeDna08BivwQ1J260mwJGchRi99XdWCgqbRwuFac8ul0OT5i2wEeDSc5QaDX8ZuWQQ0igZvh1rTyQ==", "requires": { "@google-cloud/common": "^0.32.0", "@sindresorhus/is": "^0.15.0", @@ -180,17 +172,227 @@ "split": "^1.0.0" }, "dependencies": { + "@google-cloud/common": { + "version": "0.32.1", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.32.1.tgz", + "integrity": "sha512-bLdPzFvvBMtVkwsoBtygE9oUm3yrNmPa71gvOgucYI/GqvNP2tb6RYsDHPq98kvignhcgHGDI5wyNgxaCo8bKQ==", + "requires": { + "@google-cloud/projectify": "^0.3.3", + "@google-cloud/promisify": "^0.4.0", + "@types/request": "^2.48.1", + "arrify": "^2.0.0", + "duplexify": "^3.6.0", + "ent": "^2.2.0", + "extend": "^3.0.2", + "google-auth-library": "^3.1.1", + "pify": "^4.0.1", + "retry-request": "^4.0.0", + "teeny-request": "^3.11.3" + } + }, + "@google-cloud/projectify": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-0.3.3.tgz", + "integrity": "sha512-7522YHQ4IhaafgSunsFF15nG0TGVmxgXidy9cITMe+256RgqfcrfWphiMufW+Ou4kqagW/u3yxwbzVEW3dk2Uw==" + }, + "@google-cloud/promisify": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-0.4.0.tgz", + "integrity": "sha512-4yAHDC52TEMCNcMzVC8WlqnKKKq+Ssi2lXoUg9zWWkZ6U6tq9ZBRYLHHCRdfU+EU9YJsVmivwGcKYCjRGjnf4Q==" + }, + "agent-base": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.3.0.tgz", + "integrity": "sha512-salcGninV0nPrwpGNn4VTXBb1SOuXQBiqbrNXoeizJsHrsL6ERFM2Ne3JUSBWRE6aeNJI2ROP/WEEIDUiDe3cg==", + "requires": { + "es6-promisify": "^5.0.0" + } + }, "coffeescript": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/coffeescript/-/coffeescript-2.4.1.tgz", - "integrity": "sha1-gV/TN98KNNSedKmKbr6pw+eTD3A=" + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/coffeescript/-/coffeescript-2.5.1.tgz", + "integrity": "sha512-J2jRPX0eeFh5VKyVnoLrfVFgLZtnnmp96WQSLAS8OrLm2wtQLcnikYKe1gViJKDH7vucjuhHvBKKBP3rKcD1tQ==" + }, + "debug": { + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", + "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", + "requires": { + "ms": "^2.1.1" + } + }, + "gaxios": { + "version": "1.8.4", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-1.8.4.tgz", + "integrity": "sha512-BoENMnu1Gav18HcpV9IleMPZ9exM+AvUjrAOV4Mzs/vfz2Lu/ABv451iEXByKiMPn2M140uul1txXCg83sAENw==", + "requires": { + "abort-controller": "^3.0.0", + "extend": "^3.0.2", + "https-proxy-agent": "^2.2.1", + "node-fetch": "^2.3.0" + } + }, + "gcp-metadata": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-1.0.0.tgz", + "integrity": "sha512-Q6HrgfrCQeEircnNP3rCcEgiDv7eF9+1B+1MMgpE190+/+0mjQR8PxeOaRgxZWmdDAF9EIryHB9g1moPiw1SbQ==", + "requires": { + "gaxios": "^1.0.2", + "json-bigint": "^0.3.0" + } + }, + "google-auth-library": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-3.1.2.tgz", + "integrity": "sha512-cDQMzTotwyWMrg5jRO7q0A4TL/3GWBgO7I7q5xGKNiiFf9SmGY/OJ1YsLMgI2MVHHsEGyrqYnbnmV1AE+Z6DnQ==", + "requires": { + "base64-js": "^1.3.0", + "fast-text-encoding": "^1.0.0", + "gaxios": "^1.2.1", + "gcp-metadata": "^1.0.0", + "gtoken": "^2.3.2", + "https-proxy-agent": "^2.2.1", + "jws": "^3.1.5", + "lru-cache": "^5.0.0", + "semver": "^5.5.0" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" + } + } + }, + "google-p12-pem": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-1.0.4.tgz", + "integrity": "sha512-SwLAUJqUfTB2iS+wFfSS/G9p7bt4eWcc2LyfvmUXe7cWp6p3mpxDo6LLI29MXdU6wvPcQ/up298X7GMC5ylAlA==", + "requires": { + "node-forge": "^0.8.0", + "pify": "^4.0.0" + } + }, + "gtoken": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-2.3.3.tgz", + "integrity": "sha512-EaB49bu/TCoNeQjhCYKI/CurooBKkGxIqFHsWABW0b25fobBYVTMe84A8EBVVZhl8emiUdNypil9huMOTmyAnw==", + "requires": { + "gaxios": "^1.0.4", + "google-p12-pem": "^1.0.0", + "jws": "^3.1.5", + "mime": "^2.2.0", + "pify": "^4.0.0" + } + }, + "https-proxy-agent": { + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-2.2.4.tgz", + "integrity": "sha512-OmvfoQ53WLjtA9HeYP9RNrWMJzzAz1JGaSFr1nijg0PVR1JaD/xbJq1mdEIIlxGpXp9eSe/O2LgU9DJmTPd0Eg==", + "requires": { + "agent-base": "^4.3.0", + "debug": "^3.1.0" + } + }, + "jwa": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", + "integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==", + "requires": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } + }, + "jws": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", + "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", + "requires": { + "jwa": "^1.4.1", + "safe-buffer": "^5.0.1" + } + }, + "mime": { + "version": "2.4.4", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.4.tgz", + "integrity": "sha512-LRxmNwziLPT828z+4YkNzloCFC2YM4wrB99k+AV5ZbEyfGNWfG8SO1FUXLmLDBSo89NrJZ4DIWeLjy1CHGhMGA==" + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node-forge": { + "version": "0.8.5", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.8.5.tgz", + "integrity": "sha512-vFMQIWt+J/7FLNyKouZ9TazT74PRV3wgv9UT4cRjC8BffxFbKXkgIWR42URCPSnHm/QDz6BOlb2Q0U4+VQT67Q==" + }, + "teeny-request": { + "version": "3.11.3", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-3.11.3.tgz", + "integrity": "sha512-CKncqSF7sH6p4rzCgkb/z/Pcos5efl0DmolzvlqRQUNcpRIruOhY9+T1FsIlyEbfWd7MsFpodROOwHYh2BaXzw==", + "requires": { + "https-proxy-agent": "^2.2.1", + "node-fetch": "^2.2.0", + "uuid": "^3.3.2" + } + }, + "uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" } } }, + "@google-cloud/logging": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/@google-cloud/logging/-/logging-7.2.3.tgz", + "integrity": "sha512-MLAlYVBihCs0e581n9VUYOPJcrSpwSdL7KAjy2wgQidmRD9aWjvg97F22JMCtvrUiFBmAjiJVtn1JyNIXlv0Yw==", + "requires": { + "@google-cloud/common": "^2.2.2", + "@google-cloud/paginator": "^2.0.0", + "@google-cloud/projectify": "^1.0.0", + "@google-cloud/promisify": "^1.0.0", + "@opencensus/propagation-stackdriver": "0.0.20", + "arrify": "^2.0.0", + "dot-prop": "^5.1.0", + "eventid": "^1.0.0", + "extend": "^3.0.2", + "gcp-metadata": "^3.1.0", + "google-auth-library": "^5.2.2", + "google-gax": "^1.11.0", + "is": "^3.3.0", + "on-finished": "^2.3.0", + "pumpify": "^2.0.0", + "snakecase-keys": "^3.0.0", + "stream-events": "^1.0.4", + "through2": "^3.0.0", + "type-fest": "^0.11.0" + } + }, + "@google-cloud/logging-bunyan": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@google-cloud/logging-bunyan/-/logging-bunyan-2.0.3.tgz", + "integrity": "sha512-8n9MwsCRd4v8WZg17+d3m7qInud7lYTm5rpwXHY0/lzWEJYjeiztT09BiCYh56EEhHr+ynymJnzUDZKazkywlg==", + "requires": { + "@google-cloud/logging": "^7.0.0", + "google-auth-library": "^5.0.0" + } + }, + "@google-cloud/paginator": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-2.0.3.tgz", + "integrity": "sha512-kp/pkb2p/p0d8/SKUu4mOq8+HGwF8NPzHWkj+VKrIPQPyMRw8deZtrO/OcSiy9C/7bpfU5Txah5ltUNfPkgEXg==", + "requires": { + "arrify": "^2.0.0", + "extend": "^3.0.2" + } + }, "@google-cloud/profiler": { "version": "0.2.3", "resolved": "https://registry.npmjs.org/@google-cloud/profiler/-/profiler-0.2.3.tgz", - "integrity": "sha1-Fj3738Mwuug1X+RuHlvgZTV7H1w=", + "integrity": "sha512-rNvtrFtIebIxZEJ/O0t8n7HciZGIXBo8DvHxWqAmsCaeLvkTtsaL6HmPkwxrNQ1IhbYWAxF+E/DwCiHyhKmgTg==", "requires": { "@google-cloud/common": "^0.26.0", "@types/console-log-level": "^1.4.0", @@ -212,7 +414,7 @@ "@google-cloud/common": { "version": "0.26.2", "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.26.2.tgz", - "integrity": "sha1-nFTiRxqEqgMelaJIJJduCA8lVkU=", + "integrity": "sha512-xJ2M/q3MrUbnYZuFlpF01caAlEhAUoRn0NXp93Hn3pkFpfSOG8YfbKbpBAHvcKVbBOAKVIwPsleNtuyuabUwLQ==", "requires": { "@google-cloud/projectify": "^0.3.2", "@google-cloud/promisify": "^0.3.0", @@ -228,20 +430,52 @@ "through2": "^3.0.0" } }, + "@google-cloud/projectify": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-0.3.3.tgz", + "integrity": "sha512-7522YHQ4IhaafgSunsFF15nG0TGVmxgXidy9cITMe+256RgqfcrfWphiMufW+Ou4kqagW/u3yxwbzVEW3dk2Uw==" + }, "@google-cloud/promisify": { "version": "0.3.1", "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-0.3.1.tgz", - "integrity": "sha1-9kHm2USo4KBe4MsQkd+mAIm+zbo=" + "integrity": "sha512-QzB0/IMvB0eFxFK7Eqh+bfC8NLv3E9ScjWQrPOk6GgfNroxcVITdTlT8NRsRrcp5+QQJVPLkRqKG0PUdaWXmHw==" + }, + "agent-base": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.3.0.tgz", + "integrity": "sha512-salcGninV0nPrwpGNn4VTXBb1SOuXQBiqbrNXoeizJsHrsL6ERFM2Ne3JUSBWRE6aeNJI2ROP/WEEIDUiDe3cg==", + "requires": { + "es6-promisify": "^5.0.0" + } }, "arrify": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", "integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=" }, + "debug": { + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", + "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", + "requires": { + "ms": "^2.1.1" + } + }, + "gaxios": { + "version": "1.8.4", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-1.8.4.tgz", + "integrity": "sha512-BoENMnu1Gav18HcpV9IleMPZ9exM+AvUjrAOV4Mzs/vfz2Lu/ABv451iEXByKiMPn2M140uul1txXCg83sAENw==", + "requires": { + "abort-controller": "^3.0.0", + "extend": "^3.0.2", + "https-proxy-agent": "^2.2.1", + "node-fetch": "^2.3.0" + } + }, "gcp-metadata": { "version": "0.9.3", "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.9.3.tgz", - "integrity": "sha1-H510lfdGChRSZIHynhFZbdVj3SY=", + "integrity": "sha512-caV4S84xAjENtpezLCT/GILEAF5h/bC4cNqZFmt/tjTn8t+JBtTkQrgBrJu3857YdsnlM8rxX/PMcKGtE8hUlw==", "requires": { "gaxios": "^1.0.2", "json-bigint": "^0.3.0" @@ -250,7 +484,7 @@ "google-auth-library": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-2.0.2.tgz", - "integrity": "sha1-ejFdIDZ0Svavyth7IQ7mY4tA9Xs=", + "integrity": "sha512-FURxmo1hBVmcfLauuMRKOPYAPKht3dGuI2wjeJFalDUThO0HoYVjr4yxt5cgYSFm1dgUpmN9G/poa7ceTFAIiA==", "requires": { "axios": "^0.18.0", "gcp-metadata": "^0.7.0", @@ -264,7 +498,7 @@ "gcp-metadata": { "version": "0.7.0", "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.7.0.tgz", - "integrity": "sha1-bDXbtSvaMqQnu5yY9UI33dG1QG8=", + "integrity": "sha512-ffjC09amcDWjh3VZdkDngIo7WoluyC5Ag9PAYxZbmQLOLNI8lvPtoKTSCyU54j2gwy5roZh6sSMTfkY2ct7K3g==", "requires": { "axios": "^0.18.0", "extend": "^3.0.1", @@ -273,58 +507,106 @@ } } }, - "nan": { - "version": "2.14.0", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.14.0.tgz", - "integrity": "sha1-eBj3IgJ7JFmobwKV1DTR/CM2xSw=" - }, - "readable-stream": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.4.0.tgz", - "integrity": "sha1-pRwmdUZY4KPCHb9ZFjvUW6b0R/w=", + "google-p12-pem": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-1.0.4.tgz", + "integrity": "sha512-SwLAUJqUfTB2iS+wFfSS/G9p7bt4eWcc2LyfvmUXe7cWp6p3mpxDo6LLI29MXdU6wvPcQ/up298X7GMC5ylAlA==", "requires": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" + "node-forge": "^0.8.0", + "pify": "^4.0.0" } }, + "gtoken": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-2.3.3.tgz", + "integrity": "sha512-EaB49bu/TCoNeQjhCYKI/CurooBKkGxIqFHsWABW0b25fobBYVTMe84A8EBVVZhl8emiUdNypil9huMOTmyAnw==", + "requires": { + "gaxios": "^1.0.4", + "google-p12-pem": "^1.0.0", + "jws": "^3.1.5", + "mime": "^2.2.0", + "pify": "^4.0.0" + } + }, + "https-proxy-agent": { + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-2.2.4.tgz", + "integrity": "sha512-OmvfoQ53WLjtA9HeYP9RNrWMJzzAz1JGaSFr1nijg0PVR1JaD/xbJq1mdEIIlxGpXp9eSe/O2LgU9DJmTPd0Eg==", + "requires": { + "agent-base": "^4.3.0", + "debug": "^3.1.0" + } + }, + "jwa": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", + "integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==", + "requires": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } + }, + "jws": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", + "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", + "requires": { + "jwa": "^1.4.1", + "safe-buffer": "^5.0.1" + } + }, + "mime": { + "version": "2.4.4", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.4.tgz", + "integrity": "sha512-LRxmNwziLPT828z+4YkNzloCFC2YM4wrB99k+AV5ZbEyfGNWfG8SO1FUXLmLDBSo89NrJZ4DIWeLjy1CHGhMGA==" + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node-forge": { + "version": "0.8.5", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.8.5.tgz", + "integrity": "sha512-vFMQIWt+J/7FLNyKouZ9TazT74PRV3wgv9UT4cRjC8BffxFbKXkgIWR42URCPSnHm/QDz6BOlb2Q0U4+VQT67Q==" + }, "semver": { - "version": "5.7.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.0.tgz", - "integrity": "sha1-eQp89v6lRZuslhELKbYEEtyP+Ws=" + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" }, - "string_decoder": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.2.0.tgz", - "integrity": "sha1-/obnOLGVRK/nBGkkOyoe6SQOro0=", + "teeny-request": { + "version": "3.11.3", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-3.11.3.tgz", + "integrity": "sha512-CKncqSF7sH6p4rzCgkb/z/Pcos5efl0DmolzvlqRQUNcpRIruOhY9+T1FsIlyEbfWd7MsFpodROOwHYh2BaXzw==", "requires": { - "safe-buffer": "~5.1.0" + "https-proxy-agent": "^2.2.1", + "node-fetch": "^2.2.0", + "uuid": "^3.3.2" } }, - "through2": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/through2/-/through2-3.0.1.tgz", - "integrity": "sha1-OSducTwzAu3544jdnIEt07glvVo=", - "requires": { - "readable-stream": "2 || 3" - } + "uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" } } }, "@google-cloud/projectify": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-0.3.3.tgz", - "integrity": "sha1-vekQPVCyCj6jM334xng6dm5w1B0=" + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-1.0.4.tgz", + "integrity": "sha512-ZdzQUN02eRsmTKfBj9FDL0KNDIFNjBn/d6tHQmA/+FImH5DO6ZV8E7FzxMgAUiVAUq41RFAkb25p1oHOZ8psfg==" }, "@google-cloud/promisify": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-0.4.0.tgz", - "integrity": "sha1-T7/PTYW7ai5MzwWqY9KxDWyarZs=" + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-1.0.4.tgz", + "integrity": "sha512-VccZDcOql77obTnFh0TbNED/6ZbbmHDf8UMNnzO1d5g9V0Htfm4k5cllY8P1tJsRKC3zWYGRLaViiupcgVjBoQ==" }, "@google-cloud/trace-agent": { "version": "3.6.1", "resolved": "https://registry.npmjs.org/@google-cloud/trace-agent/-/trace-agent-3.6.1.tgz", - "integrity": "sha1-W+dEE5TQ6ldY8o25IqUAT/PwO+w=", + "integrity": "sha512-KDo85aPN4gSxJ7oEIOlKd7aGENZFXAM1kbIn1Ds+61gh/K1CQWSyepgJo3nUpAwH6D1ezDWV7Iaf8ueoITc8Uw==", "requires": { "@google-cloud/common": "^0.32.1", "builtin-modules": "^3.0.0", @@ -341,10 +623,224 @@ "uuid": "^3.0.1" }, "dependencies": { + "@google-cloud/common": { + "version": "0.32.1", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.32.1.tgz", + "integrity": "sha512-bLdPzFvvBMtVkwsoBtygE9oUm3yrNmPa71gvOgucYI/GqvNP2tb6RYsDHPq98kvignhcgHGDI5wyNgxaCo8bKQ==", + "requires": { + "@google-cloud/projectify": "^0.3.3", + "@google-cloud/promisify": "^0.4.0", + "@types/request": "^2.48.1", + "arrify": "^2.0.0", + "duplexify": "^3.6.0", + "ent": "^2.2.0", + "extend": "^3.0.2", + "google-auth-library": "^3.1.1", + "pify": "^4.0.1", + "retry-request": "^4.0.0", + "teeny-request": "^3.11.3" + } + }, + "@google-cloud/projectify": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-0.3.3.tgz", + "integrity": "sha512-7522YHQ4IhaafgSunsFF15nG0TGVmxgXidy9cITMe+256RgqfcrfWphiMufW+Ou4kqagW/u3yxwbzVEW3dk2Uw==" + }, + "@google-cloud/promisify": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-0.4.0.tgz", + "integrity": "sha512-4yAHDC52TEMCNcMzVC8WlqnKKKq+Ssi2lXoUg9zWWkZ6U6tq9ZBRYLHHCRdfU+EU9YJsVmivwGcKYCjRGjnf4Q==" + }, + "agent-base": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.3.0.tgz", + "integrity": "sha512-salcGninV0nPrwpGNn4VTXBb1SOuXQBiqbrNXoeizJsHrsL6ERFM2Ne3JUSBWRE6aeNJI2ROP/WEEIDUiDe3cg==", + "requires": { + "es6-promisify": "^5.0.0" + } + }, + "debug": { + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", + "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", + "requires": { + "ms": "^2.1.1" + } + }, + "gaxios": { + "version": "1.8.4", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-1.8.4.tgz", + "integrity": "sha512-BoENMnu1Gav18HcpV9IleMPZ9exM+AvUjrAOV4Mzs/vfz2Lu/ABv451iEXByKiMPn2M140uul1txXCg83sAENw==", + "requires": { + "abort-controller": "^3.0.0", + "extend": "^3.0.2", + "https-proxy-agent": "^2.2.1", + "node-fetch": "^2.3.0" + } + }, + "gcp-metadata": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-1.0.0.tgz", + "integrity": "sha512-Q6HrgfrCQeEircnNP3rCcEgiDv7eF9+1B+1MMgpE190+/+0mjQR8PxeOaRgxZWmdDAF9EIryHB9g1moPiw1SbQ==", + "requires": { + "gaxios": "^1.0.2", + "json-bigint": "^0.3.0" + } + }, + "google-auth-library": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-3.1.2.tgz", + "integrity": "sha512-cDQMzTotwyWMrg5jRO7q0A4TL/3GWBgO7I7q5xGKNiiFf9SmGY/OJ1YsLMgI2MVHHsEGyrqYnbnmV1AE+Z6DnQ==", + "requires": { + "base64-js": "^1.3.0", + "fast-text-encoding": "^1.0.0", + "gaxios": "^1.2.1", + "gcp-metadata": "^1.0.0", + "gtoken": "^2.3.2", + "https-proxy-agent": "^2.2.1", + "jws": "^3.1.5", + "lru-cache": "^5.0.0", + "semver": "^5.5.0" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" + } + } + }, + "google-p12-pem": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-1.0.4.tgz", + "integrity": "sha512-SwLAUJqUfTB2iS+wFfSS/G9p7bt4eWcc2LyfvmUXe7cWp6p3mpxDo6LLI29MXdU6wvPcQ/up298X7GMC5ylAlA==", + "requires": { + "node-forge": "^0.8.0", + "pify": "^4.0.0" + } + }, + "gtoken": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-2.3.3.tgz", + "integrity": "sha512-EaB49bu/TCoNeQjhCYKI/CurooBKkGxIqFHsWABW0b25fobBYVTMe84A8EBVVZhl8emiUdNypil9huMOTmyAnw==", + "requires": { + "gaxios": "^1.0.4", + "google-p12-pem": "^1.0.0", + "jws": "^3.1.5", + "mime": "^2.2.0", + "pify": "^4.0.0" + } + }, + "https-proxy-agent": { + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-2.2.4.tgz", + "integrity": "sha512-OmvfoQ53WLjtA9HeYP9RNrWMJzzAz1JGaSFr1nijg0PVR1JaD/xbJq1mdEIIlxGpXp9eSe/O2LgU9DJmTPd0Eg==", + "requires": { + "agent-base": "^4.3.0", + "debug": "^3.1.0" + } + }, + "jwa": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", + "integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==", + "requires": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } + }, + "jws": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", + "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", + "requires": { + "jwa": "^1.4.1", + "safe-buffer": "^5.0.1" + } + }, + "mime": { + "version": "2.4.4", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.4.tgz", + "integrity": "sha512-LRxmNwziLPT828z+4YkNzloCFC2YM4wrB99k+AV5ZbEyfGNWfG8SO1FUXLmLDBSo89NrJZ4DIWeLjy1CHGhMGA==" + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node-forge": { + "version": "0.8.5", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.8.5.tgz", + "integrity": "sha512-vFMQIWt+J/7FLNyKouZ9TazT74PRV3wgv9UT4cRjC8BffxFbKXkgIWR42URCPSnHm/QDz6BOlb2Q0U4+VQT67Q==" + }, + "teeny-request": { + "version": "3.11.3", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-3.11.3.tgz", + "integrity": "sha512-CKncqSF7sH6p4rzCgkb/z/Pcos5efl0DmolzvlqRQUNcpRIruOhY9+T1FsIlyEbfWd7MsFpodROOwHYh2BaXzw==", + "requires": { + "https-proxy-agent": "^2.2.1", + "node-fetch": "^2.2.0", + "uuid": "^3.3.2" + } + }, "uuid": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", - "integrity": "sha1-G0r0lV6zB3xQHCOHL8ZROBFYcTE=" + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" + } + } + }, + "@grpc/grpc-js": { + "version": "0.6.18", + "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-0.6.18.tgz", + "integrity": "sha512-uAzv/tM8qpbf1vpx1xPMfcUMzbfdqJtdCYAqY/LsLeQQlnTb4vApylojr+wlCyr7bZeg3AFfHvtihnNOQQt/nA==", + "requires": { + "semver": "^6.2.0" + } + }, + "@grpc/proto-loader": { + "version": "0.5.3", + "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.5.3.tgz", + "integrity": "sha512-8qvUtGg77G2ZT2HqdqYoM/OY97gQd/0crSG34xNmZ4ZOsv3aQT/FQV9QfZPazTGna6MIoyUd+u6AxsoZjJ/VMQ==", + "requires": { + "lodash.camelcase": "^4.3.0", + "protobufjs": "^6.8.6" + } + }, + "@opencensus/core": { + "version": "0.0.20", + "resolved": "https://registry.npmjs.org/@opencensus/core/-/core-0.0.20.tgz", + "integrity": "sha512-vqOuTd2yuMpKohp8TNNGUAPjWEGjlnGfB9Rh5e3DKqeyR94YgierNs4LbMqxKtsnwB8Dm2yoEtRuUgoe5vD9DA==", + "requires": { + "continuation-local-storage": "^3.2.1", + "log-driver": "^1.2.7", + "semver": "^6.0.0", + "shimmer": "^1.2.0", + "uuid": "^3.2.1" + }, + "dependencies": { + "uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" + } + } + }, + "@opencensus/propagation-stackdriver": { + "version": "0.0.20", + "resolved": "https://registry.npmjs.org/@opencensus/propagation-stackdriver/-/propagation-stackdriver-0.0.20.tgz", + "integrity": "sha512-P8yuHSLtce+yb+2EZjtTVqG7DQ48laC+IuOWi3X9q78s1Gni5F9+hmbmyP6Nb61jb5BEvXQX1s2rtRI6bayUWA==", + "requires": { + "@opencensus/core": "^0.0.20", + "hex2dec": "^1.0.1", + "uuid": "^3.2.1" + }, + "dependencies": { + "uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" } } }, @@ -361,12 +857,12 @@ "@protobufjs/base64": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz", - "integrity": "sha1-TIVzDlm5ofHzSQR9vyQpYDS7JzU=" + "integrity": "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==" }, "@protobufjs/codegen": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz", - "integrity": "sha1-fvN/DQEPsCitGtWXIuUG2SYoFcs=" + "integrity": "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==" }, "@protobufjs/eventemitter": { "version": "1.1.0", @@ -410,21 +906,21 @@ "@sindresorhus/is": { "version": "0.15.0", "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-0.15.0.tgz", - "integrity": "sha1-lpFbqgXmpqHRN7rfSYTT/AWCC7Y=" + "integrity": "sha512-lu8BpxjAtRCAo5ifytTpCPCj99LF7o/2Myn+NXyNCBqvPYn7Pjd76AMmUB5l7XF1U6t0hcWrlEM5ESufW7wAeA==" }, "@sinonjs/commons": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-1.4.0.tgz", - "integrity": "sha1-ez7C2Wr0gdegMhJS57HJRyTsWng=", + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-1.7.1.tgz", + "integrity": "sha512-Debi3Baff1Qu1Unc3mjJ96MgpbwTn43S1+9yJ0llWygPwDNu2aaWBD6yc9y/Z8XDRNhx7U+u2UDg2OGQXkclUQ==", "dev": true, "requires": { "type-detect": "4.0.8" } }, "@sinonjs/formatio": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/@sinonjs/formatio/-/formatio-3.2.1.tgz", - "integrity": "sha1-UjEPL5vLxnvawYyUrUkBuV/eJn4=", + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/@sinonjs/formatio/-/formatio-3.2.2.tgz", + "integrity": "sha512-B8SEsgd8gArBLMD6zpRw3juQ2FVSsmdd7qlevyDqzS9WTCtvF55/gAL+h6gue8ZvPYcdiPdvueM/qm//9XzyTQ==", "dev": true, "requires": { "@sinonjs/commons": "^1", @@ -432,14 +928,14 @@ }, "dependencies": { "@sinonjs/samsam": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/@sinonjs/samsam/-/samsam-3.3.2.tgz", - "integrity": "sha1-Y5QuPV6wt59t4775q/rRX7S2QBs=", + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/@sinonjs/samsam/-/samsam-3.3.3.tgz", + "integrity": "sha512-bKCMKZvWIjYD0BLGnNrxVuw4dkWCYsLqFOUWw8VgKF/+5Y+mE7LfHWPIYoDXowH+3a9LsWDMo0uAP8YDosPvHQ==", "dev": true, "requires": { - "@sinonjs/commons": "^1.0.2", + "@sinonjs/commons": "^1.3.0", "array-from": "^2.1.1", - "lodash": "^4.17.11" + "lodash": "^4.17.15" } } } @@ -447,7 +943,7 @@ "@sinonjs/samsam": { "version": "2.1.3", "resolved": "https://registry.npmjs.org/@sinonjs/samsam/-/samsam-2.1.3.tgz", - "integrity": "sha1-Ys8qm2JO3HlRNBNf43/Cro6ja+M=", + "integrity": "sha512-8zNeBkSKhU9a5cRNbpCKau2WWPfan+Q2zDlcXvXyhn9EsMqgYs4qzo0XHNVlXC6ABQL8fT6nV+zzo5RTHJzyXw==", "dev": true }, "@sinonjs/text-encoding": { @@ -456,20 +952,25 @@ "integrity": "sha512-+iTbntw2IZPb/anVDbypzfQa+ay64MW0Zo8aJ8gZPWMMK6/OubMVb6lUPMagqjOPnmtauXnFCACVl3O7ogjeqQ==", "dev": true }, + "@tootallnate/once": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.0.0.tgz", + "integrity": "sha512-KYyTT/T6ALPkIRd2Ge080X/BsXvy9O0hcWTtMWkPvwAwF99+vn6Dv4GzrFT/Nn1LePr+FFDbRXXlqmsy9lw2zA==" + }, "@types/caseless": { "version": "0.12.2", "resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.2.tgz", - "integrity": "sha1-9l09Y4ngHutFi9VNyPUrlalGO8g=" + "integrity": "sha512-6ckxMjBBD8URvjB6J3NcnuAn5Pkl7t3TizAg+xdlzzQGSPSmBcXf8KoIH0ua/i+tio+ZRUHEXp0HEmvaR4kt0w==" }, "@types/console-log-level": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/@types/console-log-level/-/console-log-level-1.4.0.tgz", - "integrity": "sha1-7/ccQa689RyLpa2LBdfVQkviuPM=" + "integrity": "sha512-x+OscEQwcx5Biair4enH7ov9W+clcqUWaZRaxn5IkT4yNWWjRr2oiYDkY/x1uXSTVZOQ2xlbFQySaQGB+VdXGQ==" }, "@types/duplexify": { "version": "3.6.0", "resolved": "https://registry.npmjs.org/@types/duplexify/-/duplexify-3.6.0.tgz", - "integrity": "sha1-38grZL06IWj1vSZESvFlvwI33Ng=", + "integrity": "sha512-5zOA53RUlzN74bvrSGwjudssD9F3a797sDZQkiYpUOxW+WHaXTCPz4/d5Dgi6FKnOqZ2CpaTo0DhgIfsXAOE/A==", "requires": { "@types/node": "*" } @@ -480,50 +981,62 @@ "integrity": "sha512-OCutwjDZ4aFS6PB1UZ988C4YgwlBHJd6wCeQqaLdmadZ/7e+w79+hbMUFC1QXDNCmdyoRfAFdm0RypzwR+Qpag==", "dev": true }, - "@types/form-data": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/@types/form-data/-/form-data-2.2.1.tgz", - "integrity": "sha1-7is7jqoRwJOCiZU2BrdFtzjFSx4=", + "@types/fs-extra": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-8.1.0.tgz", + "integrity": "sha512-UoOfVEzAUpeSPmjm7h1uk5MH6KZma2z2O7a75onTGjnNvAvMVrPzPL/vBbT65iIGHWj6rokwfmYcmxmlSf2uwg==", "requires": { "@types/node": "*" } }, "@types/json-schema": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.3.tgz", - "integrity": "sha512-Il2DtDVRGDcqjDtE+rF8iqg1CArehSK84HZJCT7AMITlyXRBpuPhqGLDQMowraqqu1coEaimg4ZOqggt6L6L+A==", + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.4.tgz", + "integrity": "sha512-8+KAKzEvSUdeo+kmqnKrqgeE+LcA0tjYWFY7RPProVYwnqDjukzO+3b6dLD56rYX5TdWejnEOLJYOIeh4CXKuA==", "dev": true }, "@types/long": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.0.tgz", - "integrity": "sha1-cZVR0jUtMBrIuB23Mqy2vcKNve8=" + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.1.tgz", + "integrity": "sha512-5tXH6Bx/kNGd3MgffdmP4dy2Z+G4eaXw0SE81Tq3BNadtnMR5/ySMzX4SLEzHJzSmPNn4HIdpQsBvXMUykr58w==" }, "@types/node": { - "version": "12.0.8", - "resolved": "https://registry.npmjs.org/@types/node/-/node-12.0.8.tgz", - "integrity": "sha1-VRRmvhGyrcPz1HFWdY9hC9n2sdg=" + "version": "10.17.16", + "resolved": "https://registry.npmjs.org/@types/node/-/node-10.17.16.tgz", + "integrity": "sha512-A4283YSA1OmnIivcpy/4nN86YlnKRiQp8PYwI2KdPCONEBN093QTb0gCtERtkLyVNGKKIGazTZ2nAmVzQU51zA==" }, "@types/request": { - "version": "2.48.1", - "resolved": "https://registry.npmjs.org/@types/request/-/request-2.48.1.tgz", - "integrity": "sha1-5ALWkapmcPu/8ZV7FfEnAjCrQvo=", + "version": "2.48.4", + "resolved": "https://registry.npmjs.org/@types/request/-/request-2.48.4.tgz", + "integrity": "sha512-W1t1MTKYR8PxICH+A4HgEIPuAC3sbljoEVfyZbeFJJDbr30guDspJri2XOaM2E+Un7ZjrihaDi7cf6fPa2tbgw==", "requires": { "@types/caseless": "*", - "@types/form-data": "*", "@types/node": "*", - "@types/tough-cookie": "*" + "@types/tough-cookie": "*", + "form-data": "^2.5.0" + }, + "dependencies": { + "form-data": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz", + "integrity": "sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA==", + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + } + } } }, "@types/semver": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/@types/semver/-/semver-5.5.0.tgz", - "integrity": "sha1-FGwqKe59O65L8vyydGNuJkyBPEU=" + "integrity": "sha512-41qEJgBH/TWgo5NFSvBCJ1qkoi3Q6ONSF2avrHq1LVEZfYpdHmj0y9SuTK+u9ZhG1sYQKBL1AWXKyLWP4RaUoQ==" }, "@types/tough-cookie": { - "version": "2.3.5", - "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-2.3.5.tgz", - "integrity": "sha1-naRO11VxmZtlw3tgybK4jbVMWF0=" + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-2.3.6.tgz", + "integrity": "sha512-wHNBMnkoEBiRAd3s8KTKwIuO9biFtTf0LehITzBhSco+HQI0xkXZbLOD55SW3Aqw3oUkHstkm5SPv58yaAdFPQ==" }, "@typescript-eslint/experimental-utils": { "version": "1.13.0", @@ -581,60 +1094,86 @@ "abort-controller": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", - "integrity": "sha1-6vVNU7YrrkE46AnKIlyEOabvs5I=", + "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", "requires": { "event-target-shim": "^5.0.0" } }, "accepts": { - "version": "1.3.5", - "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.5.tgz", - "integrity": "sha1-63d99gEXI6OxTopywIBcjoZ0a9I=", + "version": "1.3.7", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.7.tgz", + "integrity": "sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA==", "requires": { - "mime-types": "~2.1.18", - "negotiator": "0.6.1" + "mime-types": "~2.1.24", + "negotiator": "0.6.2" } }, "acorn": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.1.1.tgz", - "integrity": "sha1-fSWuBbuK0fm2mRCOEJTs14hK3B8=" + "version": "6.4.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.4.0.tgz", + "integrity": "sha512-gac8OEcQ2Li1dxIEWGZzsp2BitJxwkwcOm0zHAJLcPJaVvm58FRnk6RkuLRpU1EujipU2ZFODv2P9DLMfnV8mw==" }, "acorn-jsx": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.1.0.tgz", - "integrity": "sha512-tMUqwBWfLFbJbizRmEcWSLw6HnFzfdJs2sOJEOwwtVPMoH/0Ay+E703oZz78VSXZiiDcZrQ5XKjPIUQixhmgVw==", + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.2.0.tgz", + "integrity": "sha512-HiUX/+K2YpkpJ+SzBffkM/AQ2YE03S0U1kjTLVpoJdhZMOWy8qvXVN9JdLqv2QsaQ6MPYQIuNmwD8zOiYUofLQ==", "dev": true }, "agent-base": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.3.0.tgz", - "integrity": "sha1-gWXwHENgCbzK0LHRIvBe13Dvxu4=", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.0.tgz", + "integrity": "sha512-j1Q7cSCqN+AwrmDd+pzgqc0/NpC655x2bUf5ZjRIO77DcNBFmh+OgRNzF6OKdCC9RSCb19fGd99+bhXFdkRNqw==", "requires": { - "es6-promisify": "^5.0.0" + "debug": "4" + }, + "dependencies": { + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "requires": { + "ms": "^2.1.1" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + } } }, "ajv": { - "version": "6.10.0", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.10.0.tgz", - "integrity": "sha1-kNDVRDnaWHzX6EO/twRfUL0ivfE=", + "version": "6.12.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.0.tgz", + "integrity": "sha512-D6gFiFA0RRLyUbvijN74DWAjXSFxWKaWP7mldxkVhyhAV3+SWA9HEJPHQ2c9soIeTFJqcSdFDGFgdqs1iUU2Hw==", "requires": { - "fast-deep-equal": "^2.0.1", + "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", "json-schema-traverse": "^0.4.1", "uri-js": "^4.2.2" } }, "ansi-escapes": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.2.0.tgz", - "integrity": "sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ==", - "dev": true + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.0.tgz", + "integrity": "sha512-EiYhwo0v255HUL6eDyuLrXEkTi7WwVCLAw+SeOQ7M7qdun1z1pum4DEm/nuqIVbPvi9RPPc9k9LbyBv6H0DwVg==", + "dev": true, + "requires": { + "type-fest": "^0.8.1" + }, + "dependencies": { + "type-fest": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", + "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==", + "dev": true + } + } }, "ansi-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", - "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", + "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", "dev": true }, "ansi-styles": { @@ -667,13 +1206,14 @@ "dev": true }, "array-includes": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.0.tgz", - "integrity": "sha512-ONOEQoKrvXPKk7Su92Co0YMqYO32FfqJTzkKU9u2UpIXyYZIzLSvpdg4AwvSw4mSUW0czu6inK+zby6Oj6gDjQ==", + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.1.tgz", + "integrity": "sha512-c2VXaCHl7zPsvpkFsw4nxvFie4fh1ur9bpcgsVkIjqn0H/Xwdg+7fv3n2r/isyS8EBj5b06M9kHyZuIr4El6WQ==", "dev": true, "requires": { "define-properties": "^1.1.3", - "es-abstract": "^1.17.0-next.0" + "es-abstract": "^1.17.0", + "is-string": "^1.0.5" } }, "array.prototype.flat": { @@ -689,12 +1229,12 @@ "arrify": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz", - "integrity": "sha1-yWVekzHgq81YjSp8rX6ZVvZnAfo=" + "integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==" }, "asn1": { "version": "0.2.4", "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz", - "integrity": "sha1-jSR136tVO7M+d7VOWeiAu4ziMTY=", + "integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==", "requires": { "safer-buffer": "~2.1.0" } @@ -707,7 +1247,7 @@ "assertion-error": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", - "integrity": "sha1-5gtrDo8wG9l+U3UhW9pAbIURjAs=", + "integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==", "dev": true }, "astral-regex": { @@ -724,16 +1264,16 @@ "async-listener": { "version": "0.6.10", "resolved": "https://registry.npmjs.org/async-listener/-/async-listener-0.6.10.tgz", - "integrity": "sha1-p8l6vlcLpgLXgic8DeYKUePhfLw=", + "integrity": "sha512-gpuo6xOyF4D5DE5WvyqZdPA3NGhiT6Qf07l7DCB0wwDEsLvDIbCr6j9S5aj5Ch96dLace5tXVzWBZkxU/c5ohw==", "requires": { "semver": "^5.3.0", "shimmer": "^1.1.0" }, "dependencies": { "semver": { - "version": "5.7.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.0.tgz", - "integrity": "sha1-eQp89v6lRZuslhELKbYEEtyP+Ws=" + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" } } }, @@ -743,25 +1283,25 @@ "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=" }, "aws-sdk": { - "version": "2.315.0", - "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.315.0.tgz", - "integrity": "sha1-fzkxYq8DjL73IjdERKm8muG9u+k=", + "version": "2.628.0", + "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.628.0.tgz", + "integrity": "sha512-k0yQqqgTuwCWqPvLe09vGGSJ88oKaWPPCC6uyxQKv7pPQcz9cFZjmjLe6QdGJZeCmeb3rGIZlG+N4Z+9LnwCNg==", "requires": { "buffer": "4.9.1", "events": "1.1.1", - "ieee754": "1.1.8", + "ieee754": "1.1.13", "jmespath": "0.15.0", "querystring": "0.2.0", "sax": "1.2.1", "url": "0.10.3", - "uuid": "3.1.0", + "uuid": "3.3.2", "xml2js": "0.4.19" }, "dependencies": { "uuid": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.1.0.tgz", - "integrity": "sha1-PdPT55Crwk17DToDT/q6vijrvAQ=" + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", + "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==" } } }, @@ -771,42 +1311,31 @@ "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=" }, "aws4": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.8.0.tgz", - "integrity": "sha1-8OAD2cqef1nHpQiUXXsu+aBKVC8=" + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.9.1.tgz", + "integrity": "sha512-wMHVg2EOHaMRxbzgFJ9gtjOOCrI80OHLG14rxi28XwOW8ux6IiEbRCGGGqCtdAIg4FQCbW20k9RsT4y3gJlFug==" }, "axios": { "version": "0.18.1", "resolved": "https://registry.npmjs.org/axios/-/axios-0.18.1.tgz", - "integrity": "sha1-/z8N4ue10YDnV62YAA8Qgbh7zqM=", + "integrity": "sha512-0BfJq4NSfQXd+SkFdrvFbG7addhYSBA2mQwISr46pD6E5iqkWg02RAs8vyTT/j0RTnoYmeXauBuSv1qKwR179g==", "requires": { "follow-redirects": "1.5.10", "is-buffer": "^2.0.2" } }, "babel-eslint": { - "version": "10.0.3", - "resolved": "https://registry.npmjs.org/babel-eslint/-/babel-eslint-10.0.3.tgz", - "integrity": "sha512-z3U7eMY6r/3f3/JB9mTsLjyxrv0Yb1zb8PCWCLpguxfCzBIZUwy23R1t/XKewP+8mEN2Ck8Dtr4q20z6ce6SoA==", + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/babel-eslint/-/babel-eslint-10.1.0.tgz", + "integrity": "sha512-ifWaTHQ0ce+448CYop8AdrQiBsGrnC+bMgfyKFdi6EsPLTAWG+QfyDeM6OH+FmWnKvEq5NnBMLvlBUPKQZoDSg==", "dev": true, "requires": { "@babel/code-frame": "^7.0.0", - "@babel/parser": "^7.0.0", - "@babel/traverse": "^7.0.0", - "@babel/types": "^7.0.0", + "@babel/parser": "^7.7.0", + "@babel/traverse": "^7.7.0", + "@babel/types": "^7.7.0", "eslint-visitor-keys": "^1.0.0", "resolve": "^1.12.0" - }, - "dependencies": { - "resolve": { - "version": "1.13.1", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.13.1.tgz", - "integrity": "sha512-CxqObCX8K8YtAhOBRg+lrcdn+LK+WYOS8tSjqSFbjtrI5PnS63QPhZl4+yKfrU9tdsbMu9Anr/amegT87M9Z6w==", - "dev": true, - "requires": { - "path-parse": "^1.0.6" - } - } } }, "balanced-match": { @@ -815,9 +1344,9 @@ "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=" }, "base64-js": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.3.0.tgz", - "integrity": "sha1-yrHmEY8FEJXli1KBrqjBzSK/wOM=" + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.3.1.tgz", + "integrity": "sha512-mLQ4i2QO1ytvGWFWmcngKO//JXAQueZvwEKtjgQFM4jIK0kU+ytMfplL8j+n5mspOfjHwoAg+9yhb7BwAHm36g==" }, "bcrypt-pbkdf": { "version": "1.0.2", @@ -830,12 +1359,12 @@ "bignumber.js": { "version": "7.2.1", "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-7.2.1.tgz", - "integrity": "sha1-gMBIdZ2CaACAfEv9Uh5Q7bulel8=" + "integrity": "sha512-S4XzBk5sMB+Rcb/LNcpzXr57VRTxgAvaAEDAl1AwRx27j00hT84O6OkteE7u8UB3NuaaygCRrEpqox4uDOrbdQ==" }, "bindings": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", - "integrity": "sha1-EDU8npRTNLwFEabZCzj7x8nFBN8=", + "integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==", "requires": { "file-uri-to-path": "1.0.0" } @@ -846,20 +1375,20 @@ "integrity": "sha1-DmVcm5wkNeqraL9AJyJtK1WjRSQ=" }, "body-parser": { - "version": "1.18.3", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.18.3.tgz", - "integrity": "sha1-WykhmP/dVTs6DyDe0FkrlWlVyLQ=", + "version": "1.19.0", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.19.0.tgz", + "integrity": "sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw==", "requires": { - "bytes": "3.0.0", + "bytes": "3.1.0", "content-type": "~1.0.4", "debug": "2.6.9", "depd": "~1.1.2", - "http-errors": "~1.6.3", - "iconv-lite": "0.4.23", + "http-errors": "1.7.2", + "iconv-lite": "0.4.24", "on-finished": "~2.3.0", - "qs": "6.5.2", - "raw-body": "2.3.3", - "type-is": "~1.6.16" + "qs": "6.7.0", + "raw-body": "2.4.0", + "type-is": "~1.6.17" } }, "boolify": { @@ -871,7 +1400,7 @@ "brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha1-PH/L9SnYcibz0vUrlm/1Jx60Qd0=", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", "requires": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" @@ -880,7 +1409,7 @@ "browser-stdout": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", - "integrity": "sha1-uqVZ7hTO1zRSIputcyZGfGH6vWA=" + "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==" }, "buffer": { "version": "4.9.1", @@ -900,23 +1429,23 @@ "builtin-modules": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.1.0.tgz", - "integrity": "sha1-qtl8FRMet2tltQ7yCOdYTNdqdIQ=" + "integrity": "sha512-k0KL0aWZuBt2lrxrcASWDfwOLMnodeQjodT/1SxEQAXsHANgo6ZC/VEaSEHCXt7aSTZ4/4H5LKa+tBXmW7Vtvw==" }, "bunyan": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.5.1.tgz", - "integrity": "sha1-X259RMQ7lS9WsPQTCeOrEjkbTi0=", - "dev": true, + "version": "1.8.12", + "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.12.tgz", + "integrity": "sha1-8VDw9nSKvdcq6uhPBEA74u8RN5c=", "requires": { - "dtrace-provider": "~0.6", + "dtrace-provider": "~0.8", + "moment": "^2.10.6", "mv": "~2", "safe-json-stringify": "~1" } }, "bytes": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz", - "integrity": "sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg=" + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.0.tgz", + "integrity": "sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg==" }, "callsites": { "version": "3.1.0", @@ -931,9 +1460,9 @@ "dev": true }, "camelcase-keys": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-6.1.1.tgz", - "integrity": "sha512-kEPCddRFChEzO0d6w61yh0WbBiSv9gBnfZWGfXRYPlGqIdIGef6HMR6pgqVSEWCYkrp8B0AtEpEXNY+Jx0xk1A==", + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-6.1.2.tgz", + "integrity": "sha512-QfFrU0CIw2oltVvpndW32kuJ/9YOJwUnmWrjlXt1nnJZHCaS9i6bfOpg9R4Lw8aZjStkJWM+jc0cdXjWBgVJSw==", "dev": true, "requires": { "camelcase": "^5.3.1", @@ -941,10 +1470,15 @@ "quick-lru": "^4.0.1" } }, + "caseless": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", + "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=" + }, "chai": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/chai/-/chai-4.2.0.tgz", - "integrity": "sha1-dgqnLPION5XoSxKHfODoNzeqKeU=", + "integrity": "sha512-XQU3bhBukrOsQCuwZndwGcCVQHyZi53fQ6Ys1Fym7E4olpIqqZZhhoFJoaKVvV17lWQoXYwgWN2nF5crA8J2jw==", "dev": true, "requires": { "assertion-error": "^1.1.0", @@ -988,12 +1522,12 @@ "dev": true }, "cli-cursor": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-2.1.0.tgz", - "integrity": "sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU=", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", + "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", "dev": true, "requires": { - "restore-cursor": "^2.0.0" + "restore-cursor": "^3.1.0" } }, "cli-width": { @@ -1058,6 +1592,19 @@ "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", "dev": true }, + "combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "requires": { + "delayed-stream": "~1.0.0" + } + }, + "commander": { + "version": "2.15.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.15.1.tgz", + "integrity": "sha512-VlfT9F3V0v+jr4yxPc5gg9s62/fIVWsd2Bk2iD435um1NlGMYdVCq+MjcXnhYq2icNOizHr1kK+5TI6H0Hy0ag==" + }, "common-tags": { "version": "1.8.0", "resolved": "https://registry.npmjs.org/common-tags/-/common-tags-1.8.0.tgz", @@ -1072,7 +1619,7 @@ "console-log-level": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/console-log-level/-/console-log-level-1.4.1.tgz", - "integrity": "sha1-nFprue8e9lsFq6gwKLD/iUzfYwo=" + "integrity": "sha512-VZzbIORbP+PPcN/gg3DXClTLPLg5Slwd5fL2MIc+o1qZ4BXBvWyc6QxPk6T/Mkr6IVjRpoAGf32XxP3ZWMVRcQ==" }, "contains-path": { "version": "0.1.0", @@ -1081,28 +1628,31 @@ "dev": true }, "content-disposition": { - "version": "0.5.2", - "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.2.tgz", - "integrity": "sha1-DPaLud318r55YcOoUXjLhdunjLQ=" + "version": "0.5.3", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.3.tgz", + "integrity": "sha512-ExO0774ikEObIAEV9kDo50o+79VCUdEB6n6lzKgGwupcVeRlhrj3qGAfwq8G6uBJjkqLrhT0qEYFcWng8z1z0g==", + "requires": { + "safe-buffer": "5.1.2" + } }, "content-type": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz", - "integrity": "sha1-4TjMdeBAxyexlm/l5fjJruJW/js=" + "integrity": "sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==" }, "continuation-local-storage": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/continuation-local-storage/-/continuation-local-storage-3.2.1.tgz", - "integrity": "sha1-EfYT906RT+mzTJKtLSj+auHbf/s=", + "integrity": "sha512-jx44cconVqkCEEyLSKWwkvUXwO561jXMa3LPjTPsm5QR22PA0/mhe33FT4Xb5y74JDvt/Cq+5lm8S8rskLv9ZA==", "requires": { "async-listener": "^0.6.0", "emitter-listener": "^1.1.1" } }, "cookie": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz", - "integrity": "sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s=" + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.4.0.tgz", + "integrity": "sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg==" }, "cookie-signature": { "version": "1.0.6", @@ -1110,9 +1660,9 @@ "integrity": "sha1-4wOogrNCzD7oylE6eZmXNNqzriw=" }, "core-js": { - "version": "3.5.0", - "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.5.0.tgz", - "integrity": "sha512-Ifh3kj78gzQ7NAoJXeTu+XwzDld0QRIwjBLRqAMhuLhP3d2Av5wmgE9ycfnvK6NAEjTkQ1sDPeoEZAWO3Hx1Uw==", + "version": "3.6.4", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.6.4.tgz", + "integrity": "sha512-4paDGScNgZP2IXXilaffL9X7968RuvwlkK3xWtZRVqgd8SYNiVKRJvkFd1aqqEuPfN7E68ZHEp9hDj6lHj4Hyw==", "dev": true }, "core-util-is": { @@ -1141,6 +1691,11 @@ } } }, + "d64": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/d64/-/d64-1.0.0.tgz", + "integrity": "sha1-QAKofoUMv8n52XBrYPymE6MzbpA=" + }, "dashdash": { "version": "1.14.1", "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", @@ -1152,7 +1707,7 @@ "debug": { "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha1-XRKFFd8TT/Mn6QpMk/Tgd6U2NB8=", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", "requires": { "ms": "2.0.0" } @@ -1166,7 +1721,7 @@ "deep-eql": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-3.0.1.tgz", - "integrity": "sha1-38lARACtHI/gI+faHfHBR8S0RN8=", + "integrity": "sha512-+QeIQyN5ZuO+3Uk5DYh6/1eKO0m0YmJFGNmFHGACpf1ClL1nmlV/p4gNgbl2pJGxgXb4faqo6UE+M5ACEMyVcw==", "dev": true, "requires": { "type-detect": "^4.0.0" @@ -1190,7 +1745,12 @@ "delay": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/delay/-/delay-4.3.0.tgz", - "integrity": "sha1-7+6/uPVFV5yzlrOnIkQ+yW0UxQ4=" + "integrity": "sha512-Lwaf3zVFDMBop1yDuFZ19F9WyGcZcGacsbdlZtWjQmM50tOcMntm1njF/Nb/Vjij3KaSvCF+sEYGKrrjObu2NA==" + }, + "delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=" }, "depd": { "version": "1.1.2", @@ -1205,8 +1765,7 @@ "diff": { "version": "3.5.0", "resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz", - "integrity": "sha1-gAwN0eCov7yVg1wgKtIg/jF+WhI=", - "dev": true + "integrity": "sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA==" }, "disrequire": { "version": "1.1.0", @@ -1229,49 +1788,32 @@ "esutils": "^2.0.2" } }, + "dot-prop": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-5.2.0.tgz", + "integrity": "sha512-uEUyaDKoSQ1M4Oq8l45hSE26SnTxL6snNnqvK/VWx5wJhmff5z0FUVJDKDanor/6w3kzE3i7XZOk+7wC0EXr1A==", + "requires": { + "is-obj": "^2.0.0" + } + }, "dtrace-provider": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.6.0.tgz", - "integrity": "sha1-CweNVReTfYcxAUUtkUZzdVe3XlE=", - "dev": true, + "version": "0.8.8", + "resolved": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.8.8.tgz", + "integrity": "sha512-b7Z7cNtHPhH9EJhNNbbeqTcXB8LGFFZhq1PGgEvpeHlzd36bhbdTWoE/Ba/YguqpBSlAPKnARWhVlhunCMwfxg==", "optional": true, "requires": { - "nan": "^2.0.8" + "nan": "^2.14.0" } }, "duplexify": { "version": "3.7.1", "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.7.1.tgz", - "integrity": "sha1-Kk31MX9sz9kfhtb9JdjYoQO4gwk=", + "integrity": "sha512-07z8uv2wMyS51kKhD1KsdXJg5WQ6t93RneqRxUHnskXVtlYYkLqM0gqStQZ3pj073g687jPCHrqNfCzawLYh5g==", "requires": { "end-of-stream": "^1.0.0", "inherits": "^2.0.1", "readable-stream": "^2.0.0", "stream-shift": "^1.0.0" - }, - "dependencies": { - "readable-stream": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", - "integrity": "sha1-sRwn2IuP8fvgcGQ8+UsMea4bCq8=", - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha1-nPFhG6YmhdcDCunkujQUnDrwP8g=", - "requires": { - "safe-buffer": "~5.1.0" - } - } } }, "ecc-jsbn": { @@ -1286,7 +1828,7 @@ "ecdsa-sig-formatter": { "version": "1.0.11", "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", - "integrity": "sha1-rg8PothQRe8UqBfao86azQSJ5b8=", + "integrity": "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==", "requires": { "safe-buffer": "^5.0.1" } @@ -1299,20 +1841,26 @@ "emitter-listener": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/emitter-listener/-/emitter-listener-1.1.2.tgz", - "integrity": "sha1-VrFA6PaZI3Wz18ssqxzHQy2WMug=", + "integrity": "sha512-Bt1sBAGFHY9DKY+4/2cV6izcKJUf5T7/gkdmkxzX/qv9CcGH8xSwVRW5mtX03SWJtRTWSOpzCuWN9rBFYZepZQ==", "requires": { "shimmer": "^1.2.0" } }, + "emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, "encodeurl": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", "integrity": "sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k=" }, "end-of-stream": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.1.tgz", - "integrity": "sha1-7SljTRm6ukY7bOa4CjchPqtx7EM=", + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", + "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", "requires": { "once": "^1.4.0" } @@ -1332,22 +1880,22 @@ } }, "es-abstract": { - "version": "1.17.0-next.1", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.0-next.1.tgz", - "integrity": "sha512-7MmGr03N7Rnuid6+wyhD9sHNE2n4tFSwExnU2lQl3lIo2ShXWGePY80zYaoMOmILWv57H0amMjZGHNzzGG70Rw==", + "version": "1.17.4", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.4.tgz", + "integrity": "sha512-Ae3um/gb8F0mui/jPL+QiqmglkUsaQf7FwBEHYIFkztkneosu9imhqHpBzQ3h1vit8t5iQ74t6PEVvphBZiuiQ==", "dev": true, "requires": { "es-to-primitive": "^1.2.1", "function-bind": "^1.1.1", "has": "^1.0.3", "has-symbols": "^1.0.1", - "is-callable": "^1.1.4", - "is-regex": "^1.0.4", + "is-callable": "^1.1.5", + "is-regex": "^1.0.5", "object-inspect": "^1.7.0", "object-keys": "^1.1.1", "object.assign": "^4.1.0", - "string.prototype.trimleft": "^2.1.0", - "string.prototype.trimright": "^2.1.0" + "string.prototype.trimleft": "^2.1.1", + "string.prototype.trimright": "^2.1.1" } }, "es-to-primitive": { @@ -1364,7 +1912,7 @@ "es6-promise": { "version": "4.2.8", "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.8.tgz", - "integrity": "sha1-TrIVlMlyvEBVPSduUQU5FD21Pgo=" + "integrity": "sha512-HJDGx5daxeIvxdBxvG2cb9g4tEvwIk3i8+nhX0yGrYmZUzbkdg8QbDevheDB8gd0//uPj4c1EQua8Q+MViT0/w==" }, "es6-promisify": { "version": "5.0.0", @@ -1385,9 +1933,9 @@ "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=" }, "eslint": { - "version": "6.4.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-6.4.0.tgz", - "integrity": "sha512-WTVEzK3lSFoXUovDHEbkJqCVPEPwbhCq4trDktNI6ygs7aO41d4cDT0JFAT5MivzZeVLWlg7vHL+bgrQv/t3vA==", + "version": "6.8.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-6.8.0.tgz", + "integrity": "sha512-K+Iayyo2LtyYhDSYwz5D5QdWw0hCacNzyq1Y821Xna2xSJj7cijoLLYmLxTQgcgZ9mC61nryMy9S7GRbYpI5Ig==", "dev": true, "requires": { "@babel/code-frame": "^7.0.0", @@ -1397,19 +1945,19 @@ "debug": "^4.0.1", "doctrine": "^3.0.0", "eslint-scope": "^5.0.0", - "eslint-utils": "^1.4.2", + "eslint-utils": "^1.4.3", "eslint-visitor-keys": "^1.1.0", - "espree": "^6.1.1", + "espree": "^6.1.2", "esquery": "^1.0.1", "esutils": "^2.0.2", "file-entry-cache": "^5.0.1", "functional-red-black-tree": "^1.0.1", "glob-parent": "^5.0.0", - "globals": "^11.7.0", + "globals": "^12.1.0", "ignore": "^4.0.6", "import-fresh": "^3.0.0", "imurmurhash": "^0.1.4", - "inquirer": "^6.4.1", + "inquirer": "^7.0.0", "is-glob": "^4.0.0", "js-yaml": "^3.13.1", "json-stable-stringify-without-jsonify": "^1.0.1", @@ -1418,7 +1966,7 @@ "minimatch": "^3.0.4", "mkdirp": "^0.5.1", "natural-compare": "^1.4.0", - "optionator": "^0.8.2", + "optionator": "^0.8.3", "progress": "^2.0.0", "regexpp": "^2.0.1", "semver": "^6.1.2", @@ -1438,24 +1986,33 @@ "ms": "^2.1.1" } }, + "globals": { + "version": "12.3.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-12.3.0.tgz", + "integrity": "sha512-wAfjdLgFsPZsklLJvOBUBmzYE8/CwhEqSBEMRXA3qxIiNtyqvjYurAtIfDh6chlEPUfmTY3MnZh5Hfh4q0UlIw==", + "dev": true, + "requires": { + "type-fest": "^0.8.1" + } + }, "ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", "dev": true }, - "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "type-fest": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", + "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==", "dev": true } } }, "eslint-config-prettier": { - "version": "6.7.0", - "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-6.7.0.tgz", - "integrity": "sha512-FamQVKM3jjUVwhG4hEMnbtsq7xOIDm+SY5iBPfR8gKsJoAB2IQnNF+bk1+8Fy44Nq7PPJaLvkRxILYdJWoguKQ==", + "version": "6.10.0", + "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-6.10.0.tgz", + "integrity": "sha512-AtndijGte1rPILInUdHjvKEGbIV06NuvPrqlIEaEaWtbtvJh464mDeyGMdZEQMsGvC0ZVkiex1fSNcC4HAbRGg==", "dev": true, "requires": { "get-stdin": "^6.0.0" @@ -1468,19 +2025,19 @@ "dev": true }, "eslint-import-resolver-node": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.2.tgz", - "integrity": "sha512-sfmTqJfPSizWu4aymbPr4Iidp5yKm8yDkHp+Ir3YiTHiiDfxh69mOUsmiqW6RZ9zRXFaF64GtYmN7e+8GHBv6Q==", + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.3.tgz", + "integrity": "sha512-b8crLDo0M5RSe5YG8Pu2DYBj71tSB6OvXkfzwbJU2w7y8P4/yo0MyF8jU26IEuEuHF2K5/gcAJE3LhQGqBBbVg==", "dev": true, "requires": { "debug": "^2.6.9", - "resolve": "^1.5.0" + "resolve": "^1.13.1" } }, "eslint-module-utils": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.5.0.tgz", - "integrity": "sha512-kCo8pZaNz2dsAW7nCUjuVoI11EBXXpIzfNxmaoLhXoRDOnqXLC4iSGVRdZPhOitfbdEfMEfKOiENaK6wDPZEGw==", + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.5.2.tgz", + "integrity": "sha512-LGScZ/JSlqGKiT8OC+cYRxseMjyqt6QO54nl281CK93unD89ijSeRV6An8Ci/2nvWVKe8K/Tqdm75RQoIOCr+Q==", "dev": true, "requires": { "debug": "^2.6.9", @@ -1518,9 +2075,9 @@ } }, "eslint-plugin-import": { - "version": "2.19.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.19.1.tgz", - "integrity": "sha512-x68131aKoCZlCae7rDXKSAQmbT5DQuManyXo2sK6fJJ0aK5CWAkv6A6HJZGgqC8IhjQxYPgo6/IY4Oz8AFsbBw==", + "version": "2.20.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.20.1.tgz", + "integrity": "sha512-qQHgFOTjguR+LnYRoToeZWT62XM55MBVXObHM6SKFd1VzDcX/vqT1kAz8ssqigh5eMj8qXcRoXXGZpPP6RfdCw==", "dev": true, "requires": { "array-includes": "^3.0.3", @@ -1546,25 +2103,28 @@ "esutils": "^2.0.2", "isarray": "^1.0.0" } - }, - "resolve": { - "version": "1.13.1", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.13.1.tgz", - "integrity": "sha512-CxqObCX8K8YtAhOBRg+lrcdn+LK+WYOS8tSjqSFbjtrI5PnS63QPhZl4+yKfrU9tdsbMu9Anr/amegT87M9Z6w==", - "dev": true, - "requires": { - "path-parse": "^1.0.6" - } } } }, "eslint-plugin-mocha": { - "version": "6.2.2", - "resolved": "https://registry.npmjs.org/eslint-plugin-mocha/-/eslint-plugin-mocha-6.2.2.tgz", - "integrity": "sha512-oNhPzfkT6Q6CJ0HMVJ2KLxEWG97VWGTmuHOoRcDLE0U88ugUyFNV9wrT2XIt5cGtqc5W9k38m4xTN34L09KhBA==", + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-mocha/-/eslint-plugin-mocha-6.3.0.tgz", + "integrity": "sha512-Cd2roo8caAyG21oKaaNTj7cqeYRWW1I2B5SfpKRp0Ip1gkfwoR1Ow0IGlPWnNjzywdF4n+kHL8/9vM6zCJUxdg==", "dev": true, "requires": { - "ramda": "^0.26.1" + "eslint-utils": "^2.0.0", + "ramda": "^0.27.0" + }, + "dependencies": { + "eslint-utils": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.0.0.tgz", + "integrity": "sha512-0HCPuJv+7Wv1bACm8y5/ECVfYdfsAm9xmVb7saeFlxjPYALefjhbYoCkBjPdPzGH8wWyTpAez82Fh3VKYEZ8OA==", + "dev": true, + "requires": { + "eslint-visitor-keys": "^1.1.0" + } + } } }, "eslint-plugin-node": { @@ -1661,9 +2221,9 @@ "dev": true }, "esquery": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.0.1.tgz", - "integrity": "sha512-SmiyZ5zIWH9VM+SRUReLS5Q8a7GxtRdxEBVZpm98rJM7Sb+A9DVCndXfkeFUd3byderg+EbDkfnevfCwynWaNA==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.1.0.tgz", + "integrity": "sha512-MxYW9xKmROWF672KqjO75sszsA8Mxhw06YFeS5VHlB98KDHbOSurm3ArsjO60Eaf3QmGMCP1yn+0JQkNLo/97Q==", "dev": true, "requires": { "estraverse": "^4.0.0" @@ -1698,7 +2258,23 @@ "event-target-shim": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", - "integrity": "sha1-XU0+vflYPWOlMzzi3rdICrKwV4k=" + "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==" + }, + "eventid": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/eventid/-/eventid-1.0.0.tgz", + "integrity": "sha512-4upSDsvpxhWPsmw4fsJCp0zj8S7I0qh1lCDTmZXP8V3TtryQKDI8CgQPN+e5JakbWwzaAX3lrdp2b3KSoMSUpw==", + "requires": { + "d64": "^1.0.0", + "uuid": "^3.0.1" + }, + "dependencies": { + "uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" + } + } }, "events": { "version": "1.1.1", @@ -1706,114 +2282,46 @@ "integrity": "sha1-nr23Y1rQmccNzEwqH1AEKI6L2SQ=" }, "express": { - "version": "4.16.3", - "resolved": "https://registry.npmjs.org/express/-/express-4.16.3.tgz", - "integrity": "sha1-avilAjUNsyRuzEvs9rWjTSL37VM=", + "version": "4.17.1", + "resolved": "https://registry.npmjs.org/express/-/express-4.17.1.tgz", + "integrity": "sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g==", "requires": { - "accepts": "~1.3.5", + "accepts": "~1.3.7", "array-flatten": "1.1.1", - "body-parser": "1.18.2", - "content-disposition": "0.5.2", + "body-parser": "1.19.0", + "content-disposition": "0.5.3", "content-type": "~1.0.4", - "cookie": "0.3.1", + "cookie": "0.4.0", "cookie-signature": "1.0.6", "debug": "2.6.9", "depd": "~1.1.2", "encodeurl": "~1.0.2", "escape-html": "~1.0.3", "etag": "~1.8.1", - "finalhandler": "1.1.1", + "finalhandler": "~1.1.2", "fresh": "0.5.2", "merge-descriptors": "1.0.1", "methods": "~1.1.2", "on-finished": "~2.3.0", - "parseurl": "~1.3.2", + "parseurl": "~1.3.3", "path-to-regexp": "0.1.7", - "proxy-addr": "~2.0.3", - "qs": "6.5.1", - "range-parser": "~1.2.0", - "safe-buffer": "5.1.1", - "send": "0.16.2", - "serve-static": "1.13.2", - "setprototypeof": "1.1.0", - "statuses": "~1.4.0", - "type-is": "~1.6.16", + "proxy-addr": "~2.0.5", + "qs": "6.7.0", + "range-parser": "~1.2.1", + "safe-buffer": "5.1.2", + "send": "0.17.1", + "serve-static": "1.14.1", + "setprototypeof": "1.1.1", + "statuses": "~1.5.0", + "type-is": "~1.6.18", "utils-merge": "1.0.1", "vary": "~1.1.2" - }, - "dependencies": { - "body-parser": { - "version": "1.18.2", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.18.2.tgz", - "integrity": "sha1-h2eKGdhLR9hZuDGZvVm84iKxBFQ=", - "requires": { - "bytes": "3.0.0", - "content-type": "~1.0.4", - "debug": "2.6.9", - "depd": "~1.1.1", - "http-errors": "~1.6.2", - "iconv-lite": "0.4.19", - "on-finished": "~2.3.0", - "qs": "6.5.1", - "raw-body": "2.3.2", - "type-is": "~1.6.15" - } - }, - "iconv-lite": { - "version": "0.4.19", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.19.tgz", - "integrity": "sha1-90aPYBNfXl2tM5nAqBvpoWA6CCs=" - }, - "qs": { - "version": "6.5.1", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.1.tgz", - "integrity": "sha1-NJzfbu+J7EXBLX1es/wMhwNDptg=" - }, - "raw-body": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.3.2.tgz", - "integrity": "sha1-vNYMd9Prk83gBQKVw/N5OJvIj4k=", - "requires": { - "bytes": "3.0.0", - "http-errors": "1.6.2", - "iconv-lite": "0.4.19", - "unpipe": "1.0.0" - }, - "dependencies": { - "depd": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.1.tgz", - "integrity": "sha1-V4O04cRZ8G+lyif5kfPQbnoxA1k=" - }, - "http-errors": { - "version": "1.6.2", - "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.2.tgz", - "integrity": "sha1-CgAsyFcHGSp+eUbO7cERVfYOxzY=", - "requires": { - "depd": "1.1.1", - "inherits": "2.0.3", - "setprototypeof": "1.0.3", - "statuses": ">= 1.3.1 < 2" - } - }, - "setprototypeof": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.0.3.tgz", - "integrity": "sha1-ZlZ+NwQ+608E2RvWWMDL77VbjgQ=" - } - } - }, - "statuses": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.4.0.tgz", - "integrity": "sha1-u3PURtonlhBu/MG2AaJT1sRr0Ic=" - } } }, "extend": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", - "integrity": "sha1-+LETa0Bx+9jrFAr/hYsQGewpFfo=" + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" }, "external-editor": { "version": "3.1.0", @@ -1824,17 +2332,6 @@ "chardet": "^0.7.0", "iconv-lite": "^0.4.24", "tmp": "^0.0.33" - }, - "dependencies": { - "iconv-lite": { - "version": "0.4.24", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", - "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", - "dev": true, - "requires": { - "safer-buffer": ">= 2.1.2 < 3" - } - } } }, "extsprintf": { @@ -1843,9 +2340,9 @@ "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=" }, "fast-deep-equal": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz", - "integrity": "sha1-ewUhjd+WZ79/Nwv3/bLLFf3Qqkk=" + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.1.tgz", + "integrity": "sha512-8UEa58QDLauDNfpbrX55Q9jrGHThw2ZMdOky5Gl1CDtVeJDPVrG4Jxx1N8jw2gkWaff5UUuX1KJd+9zGe2B+ZA==" }, "fast-diff": { "version": "1.2.0", @@ -1854,9 +2351,9 @@ "dev": true }, "fast-json-stable-stringify": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz", - "integrity": "sha1-1RQsDK7msRifh9OnYREGT4bIu/I=" + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==" }, "fast-levenshtein": { "version": "2.0.6", @@ -1867,12 +2364,12 @@ "fast-text-encoding": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fast-text-encoding/-/fast-text-encoding-1.0.0.tgz", - "integrity": "sha1-PlzoKTQJz6pxd6cbnKhOGx5vJe8=" + "integrity": "sha512-R9bHCvweUxxwkDwhjav5vxpFvdPGlVngtqmx4pIZfSUhM/Q4NiIUHB456BAf+Q1Nwu3HEZYONtu+Rya+af4jiQ==" }, "figures": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/figures/-/figures-2.0.0.tgz", - "integrity": "sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI=", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz", + "integrity": "sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==", "dev": true, "requires": { "escape-string-regexp": "^1.0.5" @@ -1890,27 +2387,20 @@ "file-uri-to-path": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", - "integrity": "sha1-VTp7hEb/b2hDWcRF8eN6BdrMM90=" + "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==" }, "finalhandler": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.1.tgz", - "integrity": "sha1-7r9O2EAHnIP0JJA4ydcDAIMBsQU=", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.2.tgz", + "integrity": "sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA==", "requires": { "debug": "2.6.9", "encodeurl": "~1.0.2", "escape-html": "~1.0.3", "on-finished": "~2.3.0", - "parseurl": "~1.3.2", - "statuses": "~1.4.0", + "parseurl": "~1.3.3", + "statuses": "~1.5.0", "unpipe": "~1.0.0" - }, - "dependencies": { - "statuses": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.4.0.tgz", - "integrity": "sha1-u3PURtonlhBu/MG2AaJT1sRr0Ic=" - } } }, "find-up": { @@ -1938,20 +2428,6 @@ "write": "1.0.3" }, "dependencies": { - "glob": { - "version": "7.1.6", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", - "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", - "dev": true, - "requires": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - } - }, "rimraf": { "version": "2.6.3", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.3.tgz", @@ -1972,7 +2448,7 @@ "follow-redirects": { "version": "1.5.10", "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.5.10.tgz", - "integrity": "sha1-e3qfmuov3/NnhqlP9kPtB/T/Xio=", + "integrity": "sha512-0V5l4Cizzvqt5D44aTXbFZz+FtyXV1vrDN6qrelxtfYQKW0KO0W2T/hkE8xvGa/540LkZlkaUjO4ailYTFtHVQ==", "requires": { "debug": "=3.1.0" }, @@ -1980,13 +2456,28 @@ "debug": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz", - "integrity": "sha1-W7WgZyYotkFJVmuhaBnmFRjGcmE=", + "integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==", "requires": { "ms": "2.0.0" } } } }, + "forever-agent": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", + "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=" + }, + "form-data": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", + "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + } + }, "forwarded": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.1.2.tgz", @@ -2025,22 +2516,23 @@ "dev": true }, "gaxios": { - "version": "1.8.4", - "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-1.8.4.tgz", - "integrity": "sha1-4Iw0/pPAqbZ6Ure556ZOZDX5ozk=", + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-2.3.1.tgz", + "integrity": "sha512-DQOesWEx59/bm63lTX0uHDDXpGTW9oKqNsoigwCoRe2lOb5rFqxzHjLTa6aqEBecLcz69dHLw7rbS068z1fvIQ==", "requires": { "abort-controller": "^3.0.0", "extend": "^3.0.2", - "https-proxy-agent": "^2.2.1", + "https-proxy-agent": "^5.0.0", + "is-stream": "^2.0.0", "node-fetch": "^2.3.0" } }, "gcp-metadata": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-1.0.0.tgz", - "integrity": "sha1-UhJEAin6CZ/C98KlzcuVV16bLKY=", + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-3.4.0.tgz", + "integrity": "sha512-fizmBtCXHp8b7FZuzbgKaixO8DzsSYoEVmMgZIna7x8t6cfBF3eqirODWYxVbgmasA5qudCAKiszfB7yVwroIQ==", "requires": { - "gaxios": "^1.0.2", + "gaxios": "^2.1.0", "json-bigint": "^0.3.0" } }, @@ -2104,58 +2596,76 @@ "dev": true }, "google-auth-library": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-3.1.2.tgz", - "integrity": "sha1-/y+IzVzSEYpXvT1a08CTyIN/w1A=", + "version": "5.10.1", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-5.10.1.tgz", + "integrity": "sha512-rOlaok5vlpV9rSiUu5EpR0vVpc+PhN62oF4RyX/6++DG1VsaulAFEMlDYBLjJDDPI6OcNOCGAKy9UVB/3NIDXg==", "requires": { + "arrify": "^2.0.0", "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", "fast-text-encoding": "^1.0.0", - "gaxios": "^1.2.1", - "gcp-metadata": "^1.0.0", - "gtoken": "^2.3.2", - "https-proxy-agent": "^2.2.1", - "jws": "^3.1.5", - "lru-cache": "^5.0.0", - "semver": "^5.5.0" - }, - "dependencies": { - "semver": { - "version": "5.7.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.0.tgz", - "integrity": "sha1-eQp89v6lRZuslhELKbYEEtyP+Ws=" - } + "gaxios": "^2.1.0", + "gcp-metadata": "^3.4.0", + "gtoken": "^4.1.0", + "jws": "^4.0.0", + "lru-cache": "^5.0.0" + } + }, + "google-gax": { + "version": "1.14.2", + "resolved": "https://registry.npmjs.org/google-gax/-/google-gax-1.14.2.tgz", + "integrity": "sha512-Nde+FdqALbV3QgMA4KlkxOHfrj9busnZ3EECwy/1gDJm9vhKGwDLWzErqRU5g80OoGSAMgyY7DWIfqz7ina4Jw==", + "requires": { + "@grpc/grpc-js": "^0.6.18", + "@grpc/proto-loader": "^0.5.1", + "@types/fs-extra": "^8.0.1", + "@types/long": "^4.0.0", + "abort-controller": "^3.0.0", + "duplexify": "^3.6.0", + "google-auth-library": "^5.0.0", + "is-stream-ended": "^0.1.4", + "lodash.at": "^4.6.0", + "lodash.has": "^4.5.2", + "node-fetch": "^2.6.0", + "protobufjs": "^6.8.8", + "retry-request": "^4.0.0", + "semver": "^6.0.0", + "walkdir": "^0.4.0" } }, "google-p12-pem": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-1.0.4.tgz", - "integrity": "sha1-t3+4M6Lrn388aJ4uVPCVJ293dgU=", + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-2.0.4.tgz", + "integrity": "sha512-S4blHBQWZRnEW44OcR7TL9WR+QCqByRvhNDZ/uuQfpxywfupikf/miba8js1jZi6ZOGv5slgSuoshCWh6EMDzg==", "requires": { - "node-forge": "^0.8.0", - "pify": "^4.0.0" + "node-forge": "^0.9.0" } }, "graceful-fs": { - "version": "4.1.11", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.11.tgz", - "integrity": "sha1-Dovf5NHduIVNZOBOp8AOKgJuVlg=" + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.3.tgz", + "integrity": "sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ==" + }, + "growl": { + "version": "1.10.5", + "resolved": "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz", + "integrity": "sha512-qBr4OuELkhPenW6goKVXiv47US3clb3/IbuWF9KNKEijAy9oeHxU9IgzjvJhHkUzhaj7rOUD7+YGWqUjLp5oSA==" }, "gtoken": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-2.3.3.tgz", - "integrity": "sha1-in/hVcXODEtxyIbPsoKpBg2UpkE=", + "version": "4.1.4", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-4.1.4.tgz", + "integrity": "sha512-VxirzD0SWoFUo5p8RDP8Jt2AGyOmyYcT/pOUgDKJCK+iSw0TMqwrVfY37RXTNmoKwrzmDHSk0GMT9FsgVmnVSA==", "requires": { - "gaxios": "^1.0.4", - "google-p12-pem": "^1.0.0", - "jws": "^3.1.5", - "mime": "^2.2.0", - "pify": "^4.0.0" + "gaxios": "^2.1.0", + "google-p12-pem": "^2.0.0", + "jws": "^4.0.0", + "mime": "^2.2.0" }, "dependencies": { "mime": { "version": "2.4.4", "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.4.tgz", - "integrity": "sha1-vXuRE1/GsBzePpuuM9ZZtj2IV+U=" + "integrity": "sha512-LRxmNwziLPT828z+4YkNzloCFC2YM4wrB99k+AV5ZbEyfGNWfG8SO1FUXLmLDBSo89NrJZ4DIWeLjy1CHGhMGA==" } } }, @@ -2167,7 +2677,7 @@ "har-validator": { "version": "5.1.3", "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.3.tgz", - "integrity": "sha1-HvievT5JllV2de7ZiTEQ3DUPoIA=", + "integrity": "sha512-sNvOCzEQNr/qrvJgc3UG/kD4QtlHycrzwS+6mfTrrSq97BvaYcPZZI1ZSqGSPR73Cxn4LKTD4PttRwfU7jWq5g==", "requires": { "ajv": "^6.5.5", "har-schema": "^2.0.0" @@ -2216,30 +2726,59 @@ "integrity": "sha1-k0EP0hsAlzUVH4howvJx80J+I/0=" }, "heapdump": { - "version": "0.3.9", - "resolved": "https://registry.npmjs.org/heapdump/-/heapdump-0.3.9.tgz", - "integrity": "sha1-A8dOsN9dZ74Jgug0KbqcnSs7f3g=" + "version": "0.3.15", + "resolved": "https://registry.npmjs.org/heapdump/-/heapdump-0.3.15.tgz", + "integrity": "sha512-n8aSFscI9r3gfhOcAECAtXFaQ1uy4QSke6bnaL+iymYZ/dWs9cqDqHM+rALfsHUwukUbxsdlECZ0pKmJdQ/4OA==", + "requires": { + "nan": "^2.13.2" + } }, "hex2dec": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/hex2dec/-/hex2dec-1.1.2.tgz", - "integrity": "sha1-jhzkvvNqdPfVcjw/swkMKGAHczg=" + "integrity": "sha512-Yu+q/XWr2fFQ11tHxPq4p4EiNkb2y+lAacJNhAdRXVfRIcDH6gi7htWFnnlIzvqHMHoWeIsfXlNAjZInpAOJDA==" }, "hosted-git-info": { - "version": "2.8.5", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.5.tgz", - "integrity": "sha512-kssjab8CvdXfcXMXVcvsXum4Hwdq9XGtRD3TteMEvEbq0LXyiNQr6AprqKqfeaDXze7SxWvRxdpwE6ku7ikLkg==", + "version": "2.8.7", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.7.tgz", + "integrity": "sha512-ChkjQtKJ3GI6SsI4O5jwr8q8EPrWCnxuc4Tbx+vRI5x6mDOpjKKltNo1lRlszw3xwgTOSns1ZRBiMmmwpcvLxg==", "dev": true }, "http-errors": { - "version": "1.6.3", - "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz", - "integrity": "sha1-i1VoC7S+KDoLW/TqLjhYC+HZMg0=", + "version": "1.7.2", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.7.2.tgz", + "integrity": "sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg==", "requires": { "depd": "~1.1.2", "inherits": "2.0.3", - "setprototypeof": "1.1.0", - "statuses": ">= 1.4.0 < 2" + "setprototypeof": "1.1.1", + "statuses": ">= 1.5.0 < 2", + "toidentifier": "1.0.0" + } + }, + "http-proxy-agent": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz", + "integrity": "sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg==", + "requires": { + "@tootallnate/once": "1", + "agent-base": "6", + "debug": "4" + }, + "dependencies": { + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "requires": { + "ms": "^2.1.1" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + } } }, "http-signature": { @@ -2253,18 +2792,18 @@ } }, "https-proxy-agent": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-2.2.1.tgz", - "integrity": "sha1-UVUpcPoE1yPgTFbQQXjD+SWSu8A=", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.0.tgz", + "integrity": "sha512-EkYm5BcKUGiduxzSt3Eppko+PiNWNEpa4ySk9vTC6wDsQJW9rHSa+UhGNJoRYp7bz6Ht1eaRIa6QaJqO5rCFbA==", "requires": { - "agent-base": "^4.1.0", - "debug": "^3.1.0" + "agent-base": "6", + "debug": "4" }, "dependencies": { "debug": { - "version": "3.2.6", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", - "integrity": "sha1-6D0X3hbYp++3cX7b5fsQE17uYps=", + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", "requires": { "ms": "^2.1.1" } @@ -2272,22 +2811,22 @@ "ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha1-0J0fNXtEP0kzgqjrPM0YOHKuYAk=" + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" } } }, "iconv-lite": { - "version": "0.4.23", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.23.tgz", - "integrity": "sha1-KXhx9jvlB63Pv8pxXQzQ7thOmmM=", + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", "requires": { "safer-buffer": ">= 2.1.2 < 3" } }, "ieee754": { - "version": "1.1.8", - "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.8.tgz", - "integrity": "sha1-vjPUCsEO8ZJnAfbwii2G+/0a0+Q=" + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.13.tgz", + "integrity": "sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg==" }, "ignore": { "version": "4.0.6", @@ -2332,35 +2871,35 @@ "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=" }, "inquirer": { - "version": "6.5.2", - "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-6.5.2.tgz", - "integrity": "sha512-cntlB5ghuB0iuO65Ovoi8ogLHiWGs/5yNrtUcKjFhSSiVeAIVpD7koaSU9RM8mpXw5YDi9RdYXGQMaOURB7ycQ==", + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-7.0.4.tgz", + "integrity": "sha512-Bu5Td5+j11sCkqfqmUTiwv+tWisMtP0L7Q8WrqA2C/BbBhy1YTdFrvjjlrKq8oagA/tLQBski2Gcx/Sqyi2qSQ==", "dev": true, "requires": { - "ansi-escapes": "^3.2.0", + "ansi-escapes": "^4.2.1", "chalk": "^2.4.2", - "cli-cursor": "^2.1.0", + "cli-cursor": "^3.1.0", "cli-width": "^2.0.0", "external-editor": "^3.0.3", - "figures": "^2.0.0", - "lodash": "^4.17.12", - "mute-stream": "0.0.7", + "figures": "^3.0.0", + "lodash": "^4.17.15", + "mute-stream": "0.0.8", "run-async": "^2.2.0", - "rxjs": "^6.4.0", - "string-width": "^2.1.0", + "rxjs": "^6.5.3", + "string-width": "^4.1.0", "strip-ansi": "^5.1.0", "through": "^2.3.6" } }, "ipaddr.js": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.8.0.tgz", - "integrity": "sha1-6qM9bd16zo9/b+DJygRA5wZzix4=" + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", + "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==" }, "is": { "version": "3.3.0", "resolved": "https://registry.npmjs.org/is/-/is-3.3.0.tgz", - "integrity": "sha1-Yc/23TxBk9uUo9YlggcrROVkXXk=" + "integrity": "sha512-nW24QBoPcFGGHJGUwnfpI7Yc5CdqWNdsyHQszVE/z2pKHXzh7FZ5GWhJqSyaQ9wMkQnsTx+kAI8bHlCX4tKdbg==" }, "is-arrayish": { "version": "0.2.1", @@ -2369,20 +2908,20 @@ "dev": true }, "is-buffer": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.3.tgz", - "integrity": "sha1-Ts8/z3ScvR5HJonhCaxmJhol5yU=" + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.4.tgz", + "integrity": "sha512-Kq1rokWXOPXWuaMAqZiJW4XxsmD9zGx9q4aePabbn3qCRGedtH7Cm+zV8WETitMfu1wdh+Rvd6w5egwSngUX2A==" }, "is-callable": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.4.tgz", - "integrity": "sha512-r5p9sxJjYnArLjObpjA4xu5EKI3CuKHkJXMhT7kwbpUyIFD1n5PMAsoPvWnvtZiNz7LjkYDRZhd7FlI0eMijEA==", + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.5.tgz", + "integrity": "sha512-ESKv5sMCJB2jnHTWZ3O5itG+O128Hsus4K4Qh1h2/cgn2vbgnLSVqfV46AeJA9D5EeeLa9w81KUXMtn34zhX+Q==", "dev": true }, "is-date-object": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.1.tgz", - "integrity": "sha1-mqIOtq7rv/d/vTPnTKAbM1gdOhY=", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.2.tgz", + "integrity": "sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g==", "dev": true }, "is-extglob": { @@ -2392,9 +2931,9 @@ "dev": true }, "is-fullwidth-code-point": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", "dev": true }, "is-glob": { @@ -2406,6 +2945,11 @@ "is-extglob": "^2.1.1" } }, + "is-obj": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", + "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==" + }, "is-promise": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-2.1.0.tgz", @@ -2421,6 +2965,22 @@ "has": "^1.0.3" } }, + "is-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.0.tgz", + "integrity": "sha512-XCoy+WlUr7d1+Z8GgSuXmpuUFC9fOhRXglJMx+dwLKTkL44Cjd4W1Z5P+BQZpr+cR93aGP4S/s7Ftw6Nd/kiEw==" + }, + "is-stream-ended": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/is-stream-ended/-/is-stream-ended-0.1.4.tgz", + "integrity": "sha512-xj0XPvmr7bQFTvirqnFr50o0hQIh6ZItDqloxt5aJrR4NQsYeSsyFQERYGCAzfindAcnKjINnwEEgLx4IqVzQw==" + }, + "is-string": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.5.tgz", + "integrity": "sha512-buY6VNRjhQMiF1qWDouloZlQbRhDPCebwxSjxMjxgemYT46YMd2NR0/H+fBhEfWX4A/w9TBJ+ol+okqJKFE6vQ==", + "dev": true + }, "is-symbol": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.3.tgz", @@ -2499,7 +3059,7 @@ "json-schema-traverse": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha1-afaofZUTq4u4/mO9sJecRI5oRmA=" + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" }, "json-stable-stringify-without-jsonify": { "version": "1.0.1", @@ -2532,15 +3092,15 @@ } }, "just-extend": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/just-extend/-/just-extend-4.0.2.tgz", - "integrity": "sha512-FrLwOgm+iXrPV+5zDU6Jqu4gCRXbWEQg2O3SKONsWE4w7AXFRkryS53bpWdaL9cNol+AmR3AEYz6kn+o0fCPnw==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/just-extend/-/just-extend-4.1.0.tgz", + "integrity": "sha512-ApcjaOdVTJ7y4r08xI5wIqpvwS48Q0PBG4DJROcEkH1f8MdAiNFyFxz3xoL0LWAVwjrwPYZdVHHxhRHcx/uGLA==", "dev": true }, "jwa": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", - "integrity": "sha1-dDwymFy56YZVUw1TZBtmyGRbA5o=", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.0.tgz", + "integrity": "sha512-jrZ2Qx916EA+fq9cEAeCROWPTfCwi1IVHqT2tapuqLEVVDKFDENFw1oL+MwrTvH6msKxsd1YTDVw6uKEcsrLEA==", "requires": { "buffer-equal-constant-time": "1.0.1", "ecdsa-sig-formatter": "1.0.11", @@ -2548,11 +3108,11 @@ } }, "jws": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", - "integrity": "sha1-ABCZ82OUaMlBQADpmZX6UvtHgwQ=", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz", + "integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==", "requires": { - "jwa": "^1.4.1", + "jwa": "^2.0.0", "safe-buffer": "^5.0.1" } }, @@ -2609,12 +3169,27 @@ "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==" }, + "lodash.at": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/lodash.at/-/lodash.at-4.6.0.tgz", + "integrity": "sha1-k83OZk8KGZTqM9181A4jr9EbD/g=" + }, + "lodash.camelcase": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", + "integrity": "sha1-soqmKIorn8ZRA1x3EfZathkDMaY=" + }, "lodash.get": { "version": "4.4.2", "resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz", "integrity": "sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk=", "dev": true }, + "lodash.has": { + "version": "4.5.2", + "resolved": "https://registry.npmjs.org/lodash.has/-/lodash.has-4.5.2.tgz", + "integrity": "sha1-0Z9NwQlQWMzL4rDN9O4P5Ko3yGI=" + }, "lodash.memoize": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", @@ -2638,78 +3213,33 @@ "integrity": "sha1-vyJJiGzlFM2hEvrpIYzcBlIR/Jw=", "dev": true }, + "log-driver": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/log-driver/-/log-driver-1.2.7.tgz", + "integrity": "sha512-U7KCmLdqsGHBLeWqYlFA0V0Sl6P08EE1ZrmA9cxjUE0WVqT9qnyVDPz1kzpFEP0jdJuFnasWIfSd7fsaNXkpbg==" + }, "logger-sharelatex": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-1.7.0.tgz", - "integrity": "sha1-XuMje84im1rITZ7SLoXa6eI3/HQ=", + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-1.9.0.tgz", + "integrity": "sha512-yVTuha82047IiMOQLgQHCZGKkJo6I2+2KtiFKpgkIooR2yZaoTEvAeoMwBesSDSpGUpvUJ/+9UI+PmRyc+PQKQ==", "requires": { + "@google-cloud/logging-bunyan": "^2.0.0", + "@overleaf/o-error": "^2.0.0", "bunyan": "1.8.12", "raven": "1.1.3", - "request": "2.88.0" + "request": "2.88.0", + "yn": "^3.1.1" }, "dependencies": { - "bunyan": { - "version": "1.8.12", - "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.12.tgz", - "integrity": "sha1-8VDw9nSKvdcq6uhPBEA74u8RN5c=", - "requires": { - "dtrace-provider": "~0.8", - "moment": "^2.10.6", - "mv": "~2", - "safe-json-stringify": "~1" - } - }, - "caseless": { - "version": "0.12.0", - "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", - "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=" - }, - "combined-stream": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", - "integrity": "sha1-w9RaizT9cwYxoRCoolIGgrMdWn8=", - "requires": { - "delayed-stream": "~1.0.0" - } - }, - "delayed-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", - "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=" - }, - "dtrace-provider": { - "version": "0.8.7", - "resolved": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.8.7.tgz", - "integrity": "sha1-3JObTT4GIM/gwc2APQ0tftBP/QQ=", - "optional": true, - "requires": { - "nan": "^2.10.0" - } - }, - "forever-agent": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", - "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=" - }, - "form-data": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", - "integrity": "sha1-3M5SwF9kTymManq5Nr1yTO/786Y=", - "requires": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.6", - "mime-types": "^2.1.12" - } - }, - "oauth-sign": { - "version": "0.9.0", - "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", - "integrity": "sha1-R6ewFrqmi1+g7PPe4IqFxnmsZFU=" + "qs": { + "version": "6.5.2", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", + "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==" }, "request": { "version": "2.88.0", "resolved": "https://registry.npmjs.org/request/-/request-2.88.0.tgz", - "integrity": "sha1-nC/KT301tZLv5Xx/ClXoEFIST+8=", + "integrity": "sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg==", "requires": { "aws-sign2": "~0.7.0", "aws4": "^1.8.0", @@ -2733,30 +3263,17 @@ "uuid": "^3.3.2" } }, - "safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha1-mR7GnSluAxN0fVm9/St0XDX4go0=" - }, - "tunnel-agent": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", - "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", - "requires": { - "safe-buffer": "^5.0.1" - } - }, "uuid": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", - "integrity": "sha1-G0r0lV6zB3xQHCOHL8ZROBFYcTE=" + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" } } }, "loglevel": { - "version": "1.6.6", - "resolved": "https://registry.npmjs.org/loglevel/-/loglevel-1.6.6.tgz", - "integrity": "sha512-Sgr5lbboAUBo3eXCSPL4/KoVz3ROKquOjcctxmHIt+vol2DrqTQe3SwkKKuYhEiWB5kYa13YyopJ69deJ1irzQ==", + "version": "1.6.7", + "resolved": "https://registry.npmjs.org/loglevel/-/loglevel-1.6.7.tgz", + "integrity": "sha512-cY2eLFrQSAfVPhCgH1s7JI73tMbg9YC3v3+ZHVW67sBS7UxWzNEk/ZBbSfLykBWHp33dqqtOv82gjhKEi81T/A==", "dev": true }, "loglevel-colored-level-prefix": { @@ -2814,18 +3331,18 @@ "lolex": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/lolex/-/lolex-3.1.0.tgz", - "integrity": "sha1-Gn/rL+/XWz46f3nw4RDZR24pRDQ=", + "integrity": "sha512-zFo5MgCJ0rZ7gQg69S4pqBsLURbFw11X68C18OcJjJQbqaXm2NoTrGl1IMM3TIz0/BnN1tIs2tzmmqvCsOMMjw==", "dev": true }, "long": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz", - "integrity": "sha1-mntxz7fTYaGU6lVSQckvdGjVvyg=" + "integrity": "sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA==" }, "lru-cache": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", - "integrity": "sha1-HaJ+ZxAnGUdpXa9oSOhH8B2EuSA=", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", "requires": { "yallist": "^3.0.2" } @@ -2865,8 +3382,7 @@ "map-obj": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.1.0.tgz", - "integrity": "sha512-glc9y00wgtwcDmp7GaE/0b0OnxpNJsVf3ael/An6Fe2Q51LLwN1er6sdomLRzz5h0+yMpiYLhWYF5R7HeqVd4g==", - "dev": true + "integrity": "sha512-glc9y00wgtwcDmp7GaE/0b0OnxpNJsVf3ael/An6Fe2Q51LLwN1er6sdomLRzz5h0+yMpiYLhWYF5R7HeqVd4g==" }, "media-typer": { "version": "0.3.0", @@ -2912,9 +3428,9 @@ "integrity": "sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=" }, "metrics-sharelatex": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/metrics-sharelatex/-/metrics-sharelatex-2.2.0.tgz", - "integrity": "sha1-RM9oy9FuUQYgfrZ+PvkAhaQWwqk=", + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/metrics-sharelatex/-/metrics-sharelatex-2.4.0.tgz", + "integrity": "sha512-FbIRRhReVCEM4ETzh+qVMm3lP33zSSAdrHfSTtegkcB7GGi1kYs+Qt1/dXFawUA8pIZRQTtsfxiS1nZamiSwHg==", "requires": { "@google-cloud/debug-agent": "^3.0.0", "@google-cloud/profiler": "^0.2.3", @@ -2922,7 +3438,8 @@ "coffee-script": "1.6.0", "lynx": "~0.1.1", "prom-client": "^11.1.3", - "underscore": "~1.6.0" + "underscore": "~1.6.0", + "yn": "^3.1.1" }, "dependencies": { "underscore": { @@ -2933,33 +3450,33 @@ } }, "mime": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/mime/-/mime-1.4.1.tgz", - "integrity": "sha1-Eh+evEnjdm8xGnbh+hyAA8SwOqY=" + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", + "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==" }, "mime-db": { - "version": "1.36.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.36.0.tgz", - "integrity": "sha1-UCBHjbPH/pOq17vMTc+GnEM2M5c=" + "version": "1.43.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.43.0.tgz", + "integrity": "sha512-+5dsGEEovYbT8UY9yD7eE4XTc4UwJ1jBYlgaQQF38ENsKR3wj/8q8RFZrF9WIZpB2V1ArTVFUva8sAul1NzRzQ==" }, "mime-types": { - "version": "2.1.20", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.20.tgz", - "integrity": "sha1-kwy3GdVx6QNzhSD4RwkRVIyizBk=", + "version": "2.1.26", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.26.tgz", + "integrity": "sha512-01paPWYgLrkqAyrlDorC1uDwl2p3qZT7yl806vW7DvDoxwXi46jsjFbg+WdwotBIk6/MbEhO/dh5aZ5sNj/dWQ==", "requires": { - "mime-db": "~1.36.0" + "mime-db": "1.43.0" } }, "mimic-fn": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-1.2.0.tgz", - "integrity": "sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", "dev": true }, "minimatch": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "integrity": "sha1-UWbihkV/AzBgZL5Ul+jbsMPTIIM=", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", "requires": { "brace-expansion": "^1.1.7" } @@ -2973,7 +3490,6 @@ "version": "0.5.1", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", - "dev": true, "requires": { "minimist": "0.0.8" } @@ -2981,7 +3497,7 @@ "mocha": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/mocha/-/mocha-5.2.0.tgz", - "integrity": "sha1-bYrlCPWRZ/lA8rWzxKYSrlDJCuY=", + "integrity": "sha512-2IUgKDhc3J7Uug+FxMXuqIyYzH7gJjXECKe/w43IGgQHTSj3InJi+yAA7T24L9bQMRKiUEHxEX37G5JpVUGLcQ==", "requires": { "browser-stdout": "1.3.1", "commander": "2.15.1", @@ -2996,28 +3512,18 @@ "supports-color": "5.4.0" }, "dependencies": { - "commander": { - "version": "2.15.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.15.1.tgz", - "integrity": "sha1-30boZ9D8Kuxmo0ZitAapzK//Ww8=" - }, "debug": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz", - "integrity": "sha1-W7WgZyYotkFJVmuhaBnmFRjGcmE=", + "integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==", "requires": { "ms": "2.0.0" } }, - "diff": { - "version": "3.5.0", - "resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz", - "integrity": "sha1-gAwN0eCov7yVg1wgKtIg/jF+WhI=" - }, "glob": { "version": "7.1.2", "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.2.tgz", - "integrity": "sha1-wZyd+aAocC1nhhI4SmVSQExjbRU=", + "integrity": "sha512-MJTUg1kjuLeQCJ+ccE4Vpa6kKVXkPYJ2mOCQyUuKLcLQsdrMCpBPUi8qVE6+YuaJkozeA9NusTAw3hLr8Xe5EQ==", "requires": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", @@ -3026,27 +3532,6 @@ "once": "^1.3.0", "path-is-absolute": "^1.0.0" } - }, - "growl": { - "version": "1.10.5", - "resolved": "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz", - "integrity": "sha1-8nNdwig2dPpnR4sQGBBZNVw2nl4=" - }, - "mkdirp": { - "version": "0.5.1", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", - "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", - "requires": { - "minimist": "0.0.8" - } - }, - "supports-color": { - "version": "5.4.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.4.0.tgz", - "integrity": "sha1-HGszdALCE3YF7+GfEP7DkPb6q1Q=", - "requires": { - "has-flag": "^3.0.0" - } } } }, @@ -3058,7 +3543,7 @@ "moment": { "version": "2.24.0", "resolved": "https://registry.npmjs.org/moment/-/moment-2.24.0.tgz", - "integrity": "sha1-DQVdU/UFKqZTyfbraLtdEr9cK1s=", + "integrity": "sha512-bV7f+6l2QigeBBZSM/6yTNq4P2fNpSWj/0e7jQcy87A8e7o2nAfP/34/2ky5Vw4B9S446EtIhodAzkFCcR4dQg==", "optional": true }, "ms": { @@ -3067,9 +3552,9 @@ "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" }, "mute-stream": { - "version": "0.0.7", - "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz", - "integrity": "sha1-MHXOk7whuPq0PhvE2n6BFe0ee6s=", + "version": "0.0.8", + "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz", + "integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==", "dev": true }, "mv": { @@ -3083,13 +3568,17 @@ "rimraf": "~2.4.0" }, "dependencies": { - "mkdirp": { - "version": "0.5.1", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", - "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", + "glob": { + "version": "6.0.4", + "resolved": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz", + "integrity": "sha1-DwiGD2oVUSey+t1PnOJLGqtuTSI=", "optional": true, "requires": { - "minimist": "0.0.8" + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "2 || 3", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" } }, "rimraf": { @@ -3099,30 +3588,14 @@ "optional": true, "requires": { "glob": "^6.0.1" - }, - "dependencies": { - "glob": { - "version": "6.0.4", - "resolved": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz", - "integrity": "sha1-DwiGD2oVUSey+t1PnOJLGqtuTSI=", - "optional": true, - "requires": { - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "2 || 3", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - } - } } } } }, "nan": { - "version": "2.11.0", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.11.0.tgz", - "integrity": "sha1-V042Dk2VSrFpZuwQLAwEn9lhoJk=", - "optional": true + "version": "2.14.0", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.14.0.tgz", + "integrity": "sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg==" }, "natural-compare": { "version": "1.4.0", @@ -3137,9 +3610,9 @@ "optional": true }, "negotiator": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.1.tgz", - "integrity": "sha1-KzJxhOiZIQEXeyhWP7XnECrNDKk=" + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.2.tgz", + "integrity": "sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw==" }, "nice-try": { "version": "1.0.5", @@ -3148,15 +3621,15 @@ "dev": true }, "nise": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/nise/-/nise-1.5.0.tgz", - "integrity": "sha1-0D6g5sG3XGOAFao1he3cEylJpQ0=", + "version": "1.5.3", + "resolved": "https://registry.npmjs.org/nise/-/nise-1.5.3.tgz", + "integrity": "sha512-Ymbac/94xeIrMf59REBPOv0thr+CJVFMhrlAkW/gjCIE58BGQdCj0x7KRCb3yz+Ga2Rz3E9XXSvUyyxqqhjQAQ==", "dev": true, "requires": { - "@sinonjs/formatio": "^3.1.0", + "@sinonjs/formatio": "^3.2.1", "@sinonjs/text-encoding": "^0.7.1", "just-extend": "^4.0.2", - "lolex": "^4.1.0", + "lolex": "^5.0.1", "path-to-regexp": "^1.7.0" }, "dependencies": { @@ -3167,15 +3640,18 @@ "dev": true }, "lolex": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/lolex/-/lolex-4.1.0.tgz", - "integrity": "sha1-7N17hlOTkdgjeUejQZqorJdfD+E=", - "dev": true + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/lolex/-/lolex-5.1.2.tgz", + "integrity": "sha512-h4hmjAvHTmd+25JSwrtTIuwbKdwg5NzZVRMLn9saij4SZaepCrTCxPr35H/3bjwfMJtN+t3CX8672UIkglz28A==", + "dev": true, + "requires": { + "@sinonjs/commons": "^1.7.0" + } }, "path-to-regexp": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.7.0.tgz", - "integrity": "sha1-Wf3g9DW62suhA6hOnTvGTpa5k30=", + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.8.0.tgz", + "integrity": "sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA==", "dev": true, "requires": { "isarray": "0.0.1" @@ -3186,12 +3662,12 @@ "node-fetch": { "version": "2.6.0", "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.0.tgz", - "integrity": "sha1-5jNFY4bUqlWGP2dqerDaqP3ssP0=" + "integrity": "sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA==" }, "node-forge": { - "version": "0.8.4", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.8.4.tgz", - "integrity": "sha1-1nOGYrZhvhnicR7wGqOxghLxMDA=" + "version": "0.9.1", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.9.1.tgz", + "integrity": "sha512-G6RlQt5Sb4GMBzXvhfkeFmbqR6MzhtnT7VTHuLadjkii3rdYHNdw0m8zA4BTxVIh68FicCQ2NSUANpsqkr9jvQ==" }, "node-uuid": { "version": "1.4.8", @@ -3218,6 +3694,11 @@ } } }, + "oauth-sign": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", + "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==" + }, "object-inspect": { "version": "1.7.0", "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.7.0.tgz", @@ -3271,12 +3752,12 @@ } }, "onetime": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-2.0.1.tgz", - "integrity": "sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ=", + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.0.tgz", + "integrity": "sha512-5NcSkPHhwTVFIQN+TUqXoS5+dlElHXdpAWu9I0HP20YOtIi+aZ0Ct82jdlILDxjLEAWwvm+qj1m6aEtsDVmm6Q==", "dev": true, "requires": { - "mimic-fn": "^1.0.0" + "mimic-fn": "^2.1.0" } }, "optionator": { @@ -3300,9 +3781,9 @@ "dev": true }, "p-limit": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.2.0.tgz", - "integrity": "sha1-QXyZQeYCepq8ulCS3SkE4lW1+8I=", + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.2.2.tgz", + "integrity": "sha512-WGR+xHecKTr7EbUEhyLSh5Dube9JtdiG78ufaeLxTgpudf/20KqyMioIUZJAezlTIi6evxuoUs9YXc11cU+yzQ==", "requires": { "p-try": "^2.0.0" } @@ -3336,7 +3817,7 @@ "p-try": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", - "integrity": "sha1-yyhoVA4xPWHeWPr741zpAE1VQOY=" + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==" }, "parent-module": { "version": "1.0.1", @@ -3348,9 +3829,9 @@ } }, "parse-duration": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/parse-duration/-/parse-duration-0.1.1.tgz", - "integrity": "sha1-ExFN3JiRwezSgANiRFVN5DZHoiY=" + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/parse-duration/-/parse-duration-0.1.2.tgz", + "integrity": "sha512-0qfMZyjOUFBeEIvJ5EayfXJqaEXxQ+Oj2b7tWJM3hvEXvXsYCk05EDVI23oYnEw2NaFYUWdABEVPBvBMh8L/pA==" }, "parse-json": { "version": "2.2.0", @@ -3364,12 +3845,12 @@ "parse-ms": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/parse-ms/-/parse-ms-2.1.0.tgz", - "integrity": "sha1-NIVlp1PUOR+lJAKZVrFyy3dTCX0=" + "integrity": "sha512-kHt7kzLoS9VBZfUsiKjv43mr91ea+U05EyKkEtqp7vNbHxmaVuEqN7XxeEVnGrMtYOAxGrDElSi96K7EgO1zCA==" }, "parseurl": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.2.tgz", - "integrity": "sha1-/CidTtiZMRlGDBViUyYs3I3mW/M=" + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==" }, "path-exists": { "version": "3.0.0", @@ -3397,7 +3878,7 @@ "path-parse": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz", - "integrity": "sha1-1i27VnlAXXLEc37FhgDp3c8G0kw=" + "integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==" }, "path-to-regexp": { "version": "0.1.7", @@ -3435,7 +3916,7 @@ "pify": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", - "integrity": "sha1-SyzSXFDVmHNcUCkiJP2MbfQeMjE=" + "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==" }, "pkg-dir": { "version": "2.0.0", @@ -3591,26 +4072,6 @@ "escape-string-regexp": "^1.0.5" } }, - "glob": { - "version": "7.1.6", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", - "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", - "dev": true, - "requires": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - } - }, - "globals": { - "version": "11.12.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", - "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", - "dev": true - }, "inquirer": { "version": "6.5.2", "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-6.5.2.tgz", @@ -3878,26 +4339,6 @@ "integrity": "sha512-zRKcywvrXlXsA0v0i9Io4KDRaAw7+a1ZpjRwl9Wox8PFlVCCHra7E9c4kqXCoCM9nR5tBkaTTZRBoCm60bFqTQ==", "dev": true }, - "glob": { - "version": "7.1.6", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", - "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", - "dev": true, - "requires": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - } - }, - "globals": { - "version": "11.12.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", - "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", - "dev": true - }, "ignore": { "version": "5.1.4", "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.1.4.tgz", @@ -4072,15 +4513,15 @@ "pretty-ms": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/pretty-ms/-/pretty-ms-4.0.0.tgz", - "integrity": "sha1-Mbr0G5T9AiJwmKqgO9YmCOsNbpI=", + "integrity": "sha512-qG66ahoLCwpLXD09ZPHSCbUWYTqdosB7SMP4OffgTgL2PBKXMuUsrk5Bwg8q4qPkjTXsKBMr+YK3Ltd/6F9s/Q==", "requires": { "parse-ms": "^2.0.0" } }, "process-nextick-args": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.0.tgz", - "integrity": "sha1-o31zL0JxtKsa0HDTVQjoKQeI/6o=" + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" }, "progress": { "version": "2.0.3", @@ -4089,9 +4530,9 @@ "dev": true }, "prom-client": { - "version": "11.5.1", - "resolved": "https://registry.npmjs.org/prom-client/-/prom-client-11.5.1.tgz", - "integrity": "sha1-FcZsrN7EUwELz68EEJvMNOa92pw=", + "version": "11.5.3", + "resolved": "https://registry.npmjs.org/prom-client/-/prom-client-11.5.3.tgz", + "integrity": "sha512-iz22FmTbtkyL2vt0MdDFY+kWof+S9UB/NACxSn2aJcewtw+EERsen0urSkZ2WrHseNdydsvcxCTAnPcSMZZv4Q==", "requires": { "tdigest": "^0.1.1" } @@ -4099,7 +4540,7 @@ "protobufjs": { "version": "6.8.8", "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.8.8.tgz", - "integrity": "sha1-yLTxKC/XqQ5vWxCe0RyEr4KQjnw=", + "integrity": "sha512-AAmHtD5pXgZfi7GMpllpO3q1Xw1OYldr+dMUlAnffGTAhqkg72WdmSY71uKBF/JuyiKs8psYbtKrhi0ASCD8qw==", "requires": { "@protobufjs/aspromise": "^1.1.2", "@protobufjs/base64": "^1.1.2", @@ -4114,28 +4555,63 @@ "@types/long": "^4.0.0", "@types/node": "^10.1.0", "long": "^4.0.0" - }, - "dependencies": { - "@types/node": { - "version": "10.14.9", - "resolved": "https://registry.npmjs.org/@types/node/-/node-10.14.9.tgz", - "integrity": "sha1-Lo1ngDnSeUPOU6GRM4YTMif9kGY=" - } } }, "proxy-addr": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.4.tgz", - "integrity": "sha1-7PxzO/Iv+Mb0B/onUye5q2fki5M=", + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.6.tgz", + "integrity": "sha512-dh/frvCBVmSsDYzw6n926jv974gddhkFPfiN8hPOi30Wax25QZyZEGveluCgliBnqmuM+UJmBErbAUFIoDbjOw==", "requires": { "forwarded": "~0.1.2", - "ipaddr.js": "1.8.0" + "ipaddr.js": "1.9.1" } }, "psl": { - "version": "1.1.32", - "resolved": "https://registry.npmjs.org/psl/-/psl-1.1.32.tgz", - "integrity": "sha1-PxMnF88vnBaXJLK2yvNzz2lBmNs=" + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/psl/-/psl-1.7.0.tgz", + "integrity": "sha512-5NsSEDv8zY70ScRnOTn7bK7eanl2MvFrOrS/R6x+dBt5g1ghnj9Zv90kO8GwT8gxcu2ANyFprnFYB85IogIJOQ==" + }, + "pump": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", + "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", + "requires": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + }, + "pumpify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/pumpify/-/pumpify-2.0.1.tgz", + "integrity": "sha512-m7KOje7jZxrmutanlkS1daj1dS6z6BgslzOXmcSEpIlCxM3VJH7lG5QLeck/6hgF6F4crFf01UtQmNsJfweTAw==", + "requires": { + "duplexify": "^4.1.1", + "inherits": "^2.0.3", + "pump": "^3.0.0" + }, + "dependencies": { + "duplexify": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", + "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", + "requires": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.0" + } + }, + "readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + } + } }, "punycode": { "version": "1.3.2", @@ -4143,9 +4619,9 @@ "integrity": "sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0=" }, "qs": { - "version": "6.5.2", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", - "integrity": "sha1-yzroBuh0BERYTvFUzo7pjUA/PjY=" + "version": "6.7.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.7.0.tgz", + "integrity": "sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ==" }, "querystring": { "version": "0.2.0", @@ -4159,15 +4635,15 @@ "dev": true }, "ramda": { - "version": "0.26.1", - "resolved": "https://registry.npmjs.org/ramda/-/ramda-0.26.1.tgz", - "integrity": "sha512-hLWjpy7EnsDBb0p+Z3B7rPi3GDeRG5ZtiI33kJhTt+ORCd38AbAIjB/9zRIUoeTbE/AVX5ZkU7m6bznsvrf8eQ==", + "version": "0.27.0", + "resolved": "https://registry.npmjs.org/ramda/-/ramda-0.27.0.tgz", + "integrity": "sha512-pVzZdDpWwWqEVVLshWUHjNwuVP7SfcmPraYuqocJp1yo2U1R7P+5QAfDhdItkuoGqIBnBYrtPp7rEPqDn9HlZA==", "dev": true }, "range-parser": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz", - "integrity": "sha1-9JvmtIeJTdxA3MlKMi9hEJLgDV4=" + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==" }, "raven": { "version": "1.1.3", @@ -4181,6 +4657,11 @@ "uuid": "3.0.0" }, "dependencies": { + "cookie": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz", + "integrity": "sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s=" + }, "uuid": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.0.0.tgz", @@ -4189,13 +4670,13 @@ } }, "raw-body": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.3.3.tgz", - "integrity": "sha1-GzJOzmtXBuFThVvBFIxlu39uoMM=", + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.4.0.tgz", + "integrity": "sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q==", "requires": { - "bytes": "3.0.0", - "http-errors": "1.6.3", - "iconv-lite": "0.4.23", + "bytes": "3.1.0", + "http-errors": "1.7.2", + "iconv-lite": "0.4.24", "unpipe": "1.0.0" } }, @@ -4220,6 +4701,20 @@ "read-pkg": "^2.0.0" } }, + "readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, "regexpp": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-2.0.1.tgz", @@ -4227,9 +4722,9 @@ "dev": true }, "request": { - "version": "2.88.0", - "resolved": "https://registry.npmjs.org/request/-/request-2.88.0.tgz", - "integrity": "sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg==", + "version": "2.88.2", + "resolved": "https://registry.npmjs.org/request/-/request-2.88.2.tgz", + "integrity": "sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==", "requires": { "aws-sign2": "~0.7.0", "aws4": "^1.8.0", @@ -4238,7 +4733,7 @@ "extend": "~3.0.2", "forever-agent": "~0.6.1", "form-data": "~2.3.2", - "har-validator": "~5.1.0", + "har-validator": "~5.1.3", "http-signature": "~1.2.0", "is-typedarray": "~1.0.0", "isstream": "~0.1.2", @@ -4248,66 +4743,34 @@ "performance-now": "^2.1.0", "qs": "~6.5.2", "safe-buffer": "^5.1.2", - "tough-cookie": "~2.4.3", + "tough-cookie": "~2.5.0", "tunnel-agent": "^0.6.0", "uuid": "^3.3.2" }, "dependencies": { - "caseless": { - "version": "0.12.0", - "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", - "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=" + "punycode": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==" }, - "combined-stream": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", - "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "qs": { + "version": "6.5.2", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", + "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==" + }, + "tough-cookie": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz", + "integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==", "requires": { - "delayed-stream": "~1.0.0" - } - }, - "delayed-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", - "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=" - }, - "forever-agent": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", - "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=" - }, - "form-data": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", - "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", - "requires": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.6", - "mime-types": "^2.1.12" - } - }, - "oauth-sign": { - "version": "0.9.0", - "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", - "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==" - }, - "safe-buffer": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.0.tgz", - "integrity": "sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg==" - }, - "tunnel-agent": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", - "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", - "requires": { - "safe-buffer": "^5.0.1" + "psl": "^1.1.28", + "punycode": "^2.1.1" } }, "uuid": { - "version": "3.3.3", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.3.tgz", - "integrity": "sha512-pW0No1RGHgzlpHJO1nsVrHKpOEIxkGg1xB+v0ZmdNH5OAeAwzAVrCnI2/6Mtx+Uys6iaylxa+D3g4j63IKKjSQ==" + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" } } }, @@ -4336,19 +4799,19 @@ "dev": true }, "require-in-the-middle": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/require-in-the-middle/-/require-in-the-middle-4.0.0.tgz", - "integrity": "sha1-PHUoik7EgM30S8d950T4q+WFQFs=", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/require-in-the-middle/-/require-in-the-middle-4.0.1.tgz", + "integrity": "sha512-EfkM2zANyGkrfIExsECMeNn/uzjvHrE9h36yLXSavmrDiH4tgDNvltAmEKnt4PNLbqKPHZz+uszW2wTKrLUX0w==", "requires": { "debug": "^4.1.1", "module-details-from-path": "^1.0.3", - "resolve": "^1.10.0" + "resolve": "^1.12.0" }, "dependencies": { "debug": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", - "integrity": "sha1-O3ImAlUQnGtYnO4FDx1RYTlmR5E=", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", "requires": { "ms": "^2.1.1" } @@ -4356,7 +4819,7 @@ "ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha1-0J0fNXtEP0kzgqjrPM0YOHKuYAk=" + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" } } }, @@ -4379,9 +4842,9 @@ "dev": true }, "resolve": { - "version": "1.11.0", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.11.0.tgz", - "integrity": "sha1-QBSHC6KWF2uGND1Qtg87UGCc4jI=", + "version": "1.15.1", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.15.1.tgz", + "integrity": "sha512-84oo6ZTtoTUpjgNEr5SJyzQhzL72gaRodsSfyxC/AXRvwu0Yse9H8eF9IpGo7b8YetZhlI6v7ZQ6bKBFV/6S7w==", "requires": { "path-parse": "^1.0.6" } @@ -4393,26 +4856,42 @@ "dev": true }, "restore-cursor": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz", - "integrity": "sha1-n37ih/gv0ybU/RYpI9YhKe7g368=", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz", + "integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==", "dev": true, "requires": { - "onetime": "^2.0.0", + "onetime": "^5.1.0", "signal-exit": "^3.0.2" } }, "retry-axios": { "version": "0.3.2", "resolved": "https://registry.npmjs.org/retry-axios/-/retry-axios-0.3.2.tgz", - "integrity": "sha1-V1fID1hbTMTEmGqi/9R6YMbTXhM=" + "integrity": "sha512-jp4YlI0qyDFfXiXGhkCOliBN1G7fRH03Nqy8YdShzGqbY5/9S2x/IR6C88ls2DFkbWuL3ASkP7QD3pVrNpPgwQ==" }, "retry-request": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-4.0.0.tgz", - "integrity": "sha1-XDZhZiebPhDp16oTJ0RnoFy2kpA=", + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-4.1.1.tgz", + "integrity": "sha512-BINDzVtLI2BDukjWmjAIRZ0oglnCAkpP2vQjM3jdLhmT62h0xnQgciPwBRDAvHqpkPT2Wo1XuUyLyn6nbGrZQQ==", "requires": { - "through2": "^2.0.0" + "debug": "^4.1.1", + "through2": "^3.0.1" + }, + "dependencies": { + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "requires": { + "ms": "^2.1.1" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + } } }, "rimraf": { @@ -4421,38 +4900,38 @@ "integrity": "sha1-5Dm+Kq7jJzIZUnMPmaiSnk/FBYI=" }, "run-async": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/run-async/-/run-async-2.3.0.tgz", - "integrity": "sha1-A3GrSuC91yDUFm19/aZP96RFpsA=", + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/run-async/-/run-async-2.4.0.tgz", + "integrity": "sha512-xJTbh/d7Lm7SBhc1tNvTpeCHaEzoyxPrqNlvSdMfBTYwaY++UJFyXUOxAtsRUXjlqOfj8luNaR9vjCh4KeV+pg==", "dev": true, "requires": { "is-promise": "^2.1.0" } }, "rxjs": { - "version": "6.5.3", - "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-6.5.3.tgz", - "integrity": "sha512-wuYsAYYFdWTAnAaPoKGNhfpWwKZbJW+HgAJ+mImp+Epl7BG8oNWBCTyRM8gba9k4lk8BgWdoYm21Mo/RYhhbgA==", + "version": "6.5.4", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-6.5.4.tgz", + "integrity": "sha512-naMQXcgEo3csAEGvw/NydRA0fuS2nDZJiw1YUWFKU7aPPAPGZEsD4Iimit96qwCieH6y614MCLYwdkrWx7z/7Q==", "dev": true, "requires": { "tslib": "^1.9.0" } }, "safe-buffer": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.1.tgz", - "integrity": "sha1-iTMSr2myEj3vcfV4iQAWce6yyFM=" + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" }, "safe-json-stringify": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.2.0.tgz", - "integrity": "sha1-NW5EvJjx+TzkXfFLzXwBzahuCv0=", + "integrity": "sha512-gH8eh2nZudPQO6TytOvbxnuhYBOvDBBLW52tz5q6X58lJcd/tkmqFR+5Z9adS8aJtURSXWThWy/xJtJwixErvg==", "optional": true }, "safer-buffer": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", - "integrity": "sha1-RPoWGwGHuVSd2Eu5GAL5vYOFzWo=" + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" }, "sandboxed-module": { "version": "2.0.3", @@ -4470,14 +4949,14 @@ "integrity": "sha1-e45lYZCyKOgaZq6nSEgNgozS03o=" }, "semver": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.1.1.tgz", - "integrity": "sha1-U/U9qbMLIQPNTxXqs6GOy8shDJs=" + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" }, "send": { - "version": "0.16.2", - "resolved": "https://registry.npmjs.org/send/-/send-0.16.2.tgz", - "integrity": "sha1-bsyh4PjBVtFBWXVZhI32RzCmu8E=", + "version": "0.17.1", + "resolved": "https://registry.npmjs.org/send/-/send-0.17.1.tgz", + "integrity": "sha512-BsVKsiGcQMFwT8UxypobUKyv7irCNRHk1T0G680vk88yf6LBByGcZJOTJCrTP2xVN6yI+XjPJcNuE3V4fT9sAg==", "requires": { "debug": "2.6.9", "depd": "~1.1.2", @@ -4486,30 +4965,30 @@ "escape-html": "~1.0.3", "etag": "~1.8.1", "fresh": "0.5.2", - "http-errors": "~1.6.2", - "mime": "1.4.1", - "ms": "2.0.0", + "http-errors": "~1.7.2", + "mime": "1.6.0", + "ms": "2.1.1", "on-finished": "~2.3.0", - "range-parser": "~1.2.0", - "statuses": "~1.4.0" + "range-parser": "~1.2.1", + "statuses": "~1.5.0" }, "dependencies": { - "statuses": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.4.0.tgz", - "integrity": "sha1-u3PURtonlhBu/MG2AaJT1sRr0Ic=" + "ms": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz", + "integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==" } } }, "serve-static": { - "version": "1.13.2", - "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.13.2.tgz", - "integrity": "sha1-CV6Ecv1bRiN9tQzkhqQ/S4bGzsE=", + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.14.1.tgz", + "integrity": "sha512-JMrvUwE54emCYWlTI+hGrGv5I8dEwmco/00EvkzIIsR7MqrHonbD9pO2MOfFnpFntl7ecpZs+3mW+XbQZu9QCg==", "requires": { "encodeurl": "~1.0.2", "escape-html": "~1.0.3", - "parseurl": "~1.3.2", - "send": "0.16.2" + "parseurl": "~1.3.3", + "send": "0.17.1" } }, "set-blocking": { @@ -4519,23 +4998,16 @@ "dev": true }, "setprototypeof": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.0.tgz", - "integrity": "sha1-0L2FU2iHtv58DYGMuWLZ2RxU5lY=" + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.1.tgz", + "integrity": "sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw==" }, "settings-sharelatex": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/settings-sharelatex/-/settings-sharelatex-1.1.0.tgz", - "integrity": "sha1-Tv4vUpPbjxwVlnEEx5BfqHD/mS0=", + "integrity": "sha512-f7D+0lnlohoteSn6IKTH72NE+JnAdMWTKwQglAuimZWTID2FRRItZSGeYMTRpvEnaQApkoVwRp//WRMsiddnqw==", "requires": { "coffee-script": "1.6.0" - }, - "dependencies": { - "coffee-script": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz", - "integrity": "sha1-gIs5bhEPU9AhoZpO8fZb4OjjX6M=" - } } }, "shebang-command": { @@ -4556,7 +5028,7 @@ "shimmer": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.2.1.tgz", - "integrity": "sha1-YQhZ994ye1h+/r9QH7QxF/mv8zc=" + "integrity": "sha512-sQTKC1Re/rM6XyFM6fIAGHRPVGvyXfgzIDvzoq608vM+jeyVD0Tu1E6Np0Kc2zAIFWIj963V2800iF/9LPieQw==" }, "signal-exit": { "version": "3.0.2", @@ -4567,7 +5039,7 @@ "sinon": { "version": "7.1.1", "resolved": "https://registry.npmjs.org/sinon/-/sinon-7.1.1.tgz", - "integrity": "sha1-EgLzF6oU2Ty5tp/1C2vUnA4F/8k=", + "integrity": "sha512-iYagtjLVt1vN3zZY7D8oH7dkjNJEjLjyuzy8daX5+3bbQl8gaohrheB9VfH1O3L6LKuue5WTJvFluHiuZ9y3nQ==", "dev": true, "requires": { "@sinonjs/commons": "^1.2.0", @@ -4579,12 +5051,23 @@ "nise": "^1.4.6", "supports-color": "^5.5.0", "type-detect": "^4.0.8" + }, + "dependencies": { + "supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "requires": { + "has-flag": "^3.0.0" + } + } } }, "sinon-chai": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/sinon-chai/-/sinon-chai-3.3.0.tgz", - "integrity": "sha512-r2JhDY7gbbmh5z3Q62pNbrjxZdOAjpsqW/8yxAZRSqLZqowmfGZPGUZPFf3UX36NLis0cv8VEM5IJh9HgkSOAA==", + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/sinon-chai/-/sinon-chai-3.5.0.tgz", + "integrity": "sha512-IifbusYiQBpUxxFJkR3wTU68xzBN0+bxCScEaKMjBvAQERg6FnTTc1F17rseLb1tjmkJ23730AXpFI0c47FgAg==", "dev": true }, "slice-ansi": { @@ -4606,10 +5089,19 @@ } } }, + "snakecase-keys": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/snakecase-keys/-/snakecase-keys-3.1.2.tgz", + "integrity": "sha512-NrzHj8ctStnd1LYx3+L4buS7yildFum7WAbQQxkhPCNi3Qeqv7hoBne2c9n++HWxDG9Nv23pNEyyLCITZTv24Q==", + "requires": { + "map-obj": "^4.0.0", + "to-snake-case": "^1.0.0" + } + }, "source-map": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha1-dHIq8y6WFOnCh6jQu95IteLxomM=" + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" }, "spdx-correct": { "version": "3.1.0", @@ -4646,7 +5138,7 @@ "split": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz", - "integrity": "sha1-YFvZvjA6pZ+zX5Ip++oN3snqB9k=", + "integrity": "sha512-mTyOoPbrivtXnwnIxZRFYRrPNtEFKlpB2fvjSnCQUiAA6qAZzqwna5envK4uk6OIeP17CsdF3rSBGYVBsU0Tkg==", "requires": { "through": "2" } @@ -4660,7 +5152,7 @@ "sshpk": { "version": "1.16.1", "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz", - "integrity": "sha1-+2YcC+8ps520B2nuOfpwCT1vaHc=", + "integrity": "sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg==", "requires": { "asn1": "~0.2.3", "assert-plus": "^1.0.0", @@ -4698,42 +5190,26 @@ "resolved": "https://registry.npmjs.org/stream-buffers/-/stream-buffers-0.2.6.tgz", "integrity": "sha1-GBwI1bs2kARfaUAbmuanoM8zE/w=" }, + "stream-events": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/stream-events/-/stream-events-1.0.5.tgz", + "integrity": "sha512-E1GUzBSgvct8Jsb3v2X15pjzN1tYebtbLaMg+eBOUOAxgbLoSbT2NS91ckc5lJD1KfLjId+jXJRgo0qnV5Nerg==", + "requires": { + "stubs": "^3.0.0" + } + }, "stream-meter": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/stream-meter/-/stream-meter-1.0.4.tgz", "integrity": "sha1-Uq+Vql6nYKJJFxZwTb/5D3Ov3R0=", "requires": { "readable-stream": "^2.1.4" - }, - "dependencies": { - "readable-stream": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", - "integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "requires": { - "safe-buffer": "~5.1.0" - } - } } }, "stream-shift": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.0.tgz", - "integrity": "sha1-1cdSgl5TZ+eG944Y5EXqIjoVWVI=" + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.1.tgz", + "integrity": "sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ==" }, "streamifier": { "version": "0.1.1", @@ -4742,30 +5218,31 @@ "dev": true }, "string-width": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", - "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", + "integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==", "dev": true, "requires": { - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^4.0.0" + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.0" }, "dependencies": { "strip-ansi": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", - "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", + "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", "dev": true, "requires": { - "ansi-regex": "^3.0.0" + "ansi-regex": "^5.0.0" } } } }, "string.prototype.trimleft": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/string.prototype.trimleft/-/string.prototype.trimleft-2.1.0.tgz", - "integrity": "sha512-FJ6b7EgdKxxbDxc79cOlok6Afd++TTs5szo+zJTUyow3ycrRfJVE2pq3vcN53XexvKZu/DJMDfeI/qMiZTrjTw==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string.prototype.trimleft/-/string.prototype.trimleft-2.1.1.tgz", + "integrity": "sha512-iu2AGd3PuP5Rp7x2kEZCrB2Nf41ehzh+goo8TV7z8/XDBbsvc6HQIlUl9RjkZ4oyrW1XM5UwlGl1oVEaDjg6Ag==", "dev": true, "requires": { "define-properties": "^1.1.3", @@ -4773,15 +5250,23 @@ } }, "string.prototype.trimright": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/string.prototype.trimright/-/string.prototype.trimright-2.1.0.tgz", - "integrity": "sha512-fXZTSV55dNBwv16uw+hh5jkghxSnc5oHq+5K/gXgizHwAvMetdAJlHqqoFC1FSDVPYWLkAKl2cxpUT41sV7nSg==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string.prototype.trimright/-/string.prototype.trimright-2.1.1.tgz", + "integrity": "sha512-qFvWL3/+QIgZXVmJBfpHmxLB7xsUXz6HsUmP8+5dRaC3Q7oKUv9Vo6aMCRZC1smrtyECFsIT30PqBJ1gTjAs+g==", "dev": true, "requires": { "define-properties": "^1.1.3", "function-bind": "^1.1.1" } }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "requires": { + "safe-buffer": "~5.1.0" + } + }, "strip-ansi": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", @@ -4811,11 +5296,15 @@ "integrity": "sha512-VTyMAUfdm047mwKl+u79WIdrZxtFtn+nBxHeb844XBQ9uMNTuTHdx2hc5RiAJYqwTj3wc/xe5HLSdJSkJ+WfZw==", "dev": true }, + "stubs": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz", + "integrity": "sha1-6NK6H6nJBXAwPAMLaQD31fiavls=" + }, "supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha1-4uaaRKyHcveKHsCzW2id9lMO/I8=", - "dev": true, + "version": "5.4.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.4.0.tgz", + "integrity": "sha512-zjaXglF5nnWpsq470jSv6P9DwPvgLkuapYmfDm3JWOm0vkNTVF2tI4UrN2r6jH1qM/uc/WtxYY1hYoA2dOKj5w==", "requires": { "has-flag": "^3.0.0" } @@ -4832,18 +5321,6 @@ "string-width": "^3.0.0" }, "dependencies": { - "ajv": { - "version": "6.10.2", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.10.2.tgz", - "integrity": "sha512-TXtUUEYHuaTEbLZWIKUr5pmBuhDLy+8KYtPYdcV8qC+pOZL+NKqYwvWSRrVXHn+ZmRRAu8vJTAznH7Oag6RVRw==", - "dev": true, - "requires": { - "fast-deep-equal": "^2.0.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", - "uri-js": "^4.2.2" - } - }, "emoji-regex": { "version": "7.0.3", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", @@ -4878,19 +5355,21 @@ } }, "teeny-request": { - "version": "3.11.3", - "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-3.11.3.tgz", - "integrity": "sha1-M1xin3ZF5dZZk2LfLzIwxMvCOlU=", + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-6.0.2.tgz", + "integrity": "sha512-B6fxA0fSnY/bul06NggdN1nywtr5U5Uvt96pHfTi8pi4MNe6++VUWcAAFBrcMeha94s+gULwA5WvagoSZ+AcYg==", "requires": { - "https-proxy-agent": "^2.2.1", + "http-proxy-agent": "^4.0.0", + "https-proxy-agent": "^5.0.0", "node-fetch": "^2.2.0", + "stream-events": "^1.0.5", "uuid": "^3.3.2" }, "dependencies": { "uuid": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", - "integrity": "sha1-G0r0lV6zB3xQHCOHL8ZROBFYcTE=" + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" } } }, @@ -4906,36 +5385,11 @@ "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=" }, "through2": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", - "integrity": "sha1-AcHjnrMdB8t9A6lqcIIyYLIxMs0=", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/through2/-/through2-3.0.1.tgz", + "integrity": "sha512-M96dvTalPT3YbYLaKaCuwu+j06D/8Jfib0o/PxbVt6Amhv3dUAtW6rTV1jPgJSBG83I/e04Y6xkVdVhSRhi0ww==", "requires": { - "readable-stream": "~2.3.6", - "xtend": "~4.0.1" - }, - "dependencies": { - "readable-stream": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", - "integrity": "sha1-sRwn2IuP8fvgcGQ8+UsMea4bCq8=", - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha1-nPFhG6YmhdcDCunkujQUnDrwP8g=", - "requires": { - "safe-buffer": "~5.1.0" - } - } + "readable-stream": "2 || 3" } }, "tmp": { @@ -4953,10 +5407,36 @@ "integrity": "sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4=", "dev": true }, + "to-no-case": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/to-no-case/-/to-no-case-1.0.2.tgz", + "integrity": "sha1-xyKQcWTvaxeBMsjmmTAhLRtKoWo=" + }, + "to-snake-case": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/to-snake-case/-/to-snake-case-1.0.0.tgz", + "integrity": "sha1-znRpE4l5RgGah+Yu366upMYIq4w=", + "requires": { + "to-space-case": "^1.0.0" + } + }, + "to-space-case": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/to-space-case/-/to-space-case-1.0.0.tgz", + "integrity": "sha1-sFLar7Gysp3HcM6gFj5ewOvJ/Bc=", + "requires": { + "to-no-case": "^1.0.0" + } + }, + "toidentifier": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.0.tgz", + "integrity": "sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw==" + }, "tough-cookie": { "version": "2.4.3", "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.4.3.tgz", - "integrity": "sha1-U/Nto/R3g7CSWvoG/587FlKA94E=", + "integrity": "sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ==", "requires": { "psl": "^1.1.24", "punycode": "^1.4.1" @@ -4970,11 +5450,19 @@ } }, "tslib": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.10.0.tgz", - "integrity": "sha512-qOebF53frne81cf0S9B41ByenJ3/IuH8yJKngAX35CmiZySA0khhkovshKK+jGCaMnVomla7gVlIcc3EvKPbTQ==", + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.11.1.tgz", + "integrity": "sha512-aZW88SY8kQbU7gpV19lN24LtXh/yD4ZZg6qieAJDDg+YBsJcSmLGK9QpnUjAKVG/xefmvJGd1WUmfpT/g6AJGA==", "dev": true }, + "tunnel-agent": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", + "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", + "requires": { + "safe-buffer": "^5.0.1" + } + }, "tweetnacl": { "version": "0.14.5", "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", @@ -4992,22 +5480,27 @@ "type-detect": { "version": "4.0.8", "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", - "integrity": "sha1-dkb7XxiHHPu3dJ5pvTmmOI63RQw=", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", "dev": true }, + "type-fest": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.11.0.tgz", + "integrity": "sha512-OdjXJxnCN1AvyLSzeKIgXTXxV+99ZuXl3Hpo9XpJAv9MBcHrrJOQ5kV7ypXOuQie+AmWG25hLbiKdwYTifzcfQ==" + }, "type-is": { - "version": "1.6.16", - "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.16.tgz", - "integrity": "sha1-+JzjQVQcZysl7nrjxz3uOyvlAZQ=", + "version": "1.6.18", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", + "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", "requires": { "media-typer": "0.3.0", - "mime-types": "~2.1.18" + "mime-types": "~2.1.24" } }, "typescript": { - "version": "3.7.3", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.7.3.tgz", - "integrity": "sha512-Mcr/Qk7hXqFBXMN7p7Lusj1ktCBydylfQM/FZCk5glCNQJrCUKPkMHdo9R0MTFWsC/4kPFvDS0fDPvukfCkFsw==", + "version": "3.8.2", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.8.2.tgz", + "integrity": "sha512-EgOVgL/4xfVrCMbhYKUQTdF37SQn4Iw73H5BgCrF1Abdun7Kwy/QZsE/ssAy0y4LxBbvua3PIbFsbRczWWnDdQ==", "dev": true }, "underscore": { @@ -5023,7 +5516,7 @@ "uri-js": { "version": "4.2.2", "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.2.2.tgz", - "integrity": "sha1-lMVA4f93KVbiKZUHwBCupsiDjrA=", + "integrity": "sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ==", "requires": { "punycode": "^2.1.0" }, @@ -5031,7 +5524,7 @@ "punycode": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", - "integrity": "sha1-tYsBCsQMIsVldhbI0sLALHv0eew=" + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==" } } }, @@ -5159,6 +5652,11 @@ } } }, + "walkdir": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/walkdir/-/walkdir-0.4.1.tgz", + "integrity": "sha512-3eBwRyEln6E1MSzcxcVpQIhRG8Q1jLvEqRmCZqS3dsfXEDR/AhOF4d+jHg1qvDCpYaVRZjENPQyrVxAkQqxPgQ==" + }, "which": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", @@ -5244,11 +5742,6 @@ "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-9.0.7.tgz", "integrity": "sha1-Ey7mPS7FVlxVfiD0wi35rKaGsQ0=" }, - "xtend": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz", - "integrity": "sha1-pcbVMr5lbiPbgg77lDofBJmNY68=" - }, "y18n": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.0.tgz", @@ -5256,9 +5749,9 @@ "dev": true }, "yallist": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.0.3.tgz", - "integrity": "sha1-tLBJ4xS+VF486AIjbWzSLNkcPek=" + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==" }, "yargs": { "version": "13.3.0", @@ -5340,6 +5833,11 @@ "camelcase": "^5.0.0", "decamelize": "^1.2.0" } + }, + "yn": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", + "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==" } } } From ebabe526321ff6ea4bad69f1fc24b26569f8ce29 Mon Sep 17 00:00:00 2001 From: Brian Gough Date: Fri, 28 Feb 2020 15:27:29 +0000 Subject: [PATCH 438/555] fix settings unit test --- services/filestore/test/unit/js/SettingsTests.js | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/services/filestore/test/unit/js/SettingsTests.js b/services/filestore/test/unit/js/SettingsTests.js index 4563449fde..91981c7de8 100644 --- a/services/filestore/test/unit/js/SettingsTests.js +++ b/services/filestore/test/unit/js/SettingsTests.js @@ -1,5 +1,6 @@ const chai = require('chai') const { expect } = chai +const SandboxedModule = require('sandboxed-module') describe('Settings', function() { describe('s3', function() { @@ -11,8 +12,9 @@ describe('Settings', function() { } } process.env.S3_BUCKET_CREDENTIALS = JSON.stringify(s3Settings) - - const settings = require('settings-sharelatex') + const settings = SandboxedModule.require('settings-sharelatex', { + globals: { console } + }) expect(settings.filestore.s3BucketCreds).to.deep.equal(s3Settings) }) }) From c13d7f4197b0b60818e14f96b7846c21463bd4bc Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 2 Mar 2020 11:15:56 +0000 Subject: [PATCH 439/555] Bump aws-sdk version in package.json --- services/filestore/package-lock.json | 430 +++++++++++++-------------- services/filestore/package.json | 2 +- 2 files changed, 216 insertions(+), 216 deletions(-) diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index 2217f87c53..f8b4a70297 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -28,7 +28,7 @@ "source-map": { "version": "0.5.7", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", - "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", + "integrity": "sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ==", "dev": true } } @@ -451,7 +451,7 @@ "arrify": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", - "integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=" + "integrity": "sha512-3CYzex9M9FGQjCGMGyi6/31c8GJbgb0qGyrx5HWxPd0aCwh4cB2YjMb2Xf9UuoogrMrlO9cTqnB5rI5GHZTcUA==" }, "debug": { "version": "3.2.6", @@ -852,7 +852,7 @@ "@protobufjs/aspromise": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", - "integrity": "sha1-m4sMxmPWaafY9vXQiToU00jzD78=" + "integrity": "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==" }, "@protobufjs/base64": { "version": "1.1.2", @@ -867,12 +867,12 @@ "@protobufjs/eventemitter": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz", - "integrity": "sha1-NVy8mLr61ZePntCV85diHx0Ga3A=" + "integrity": "sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==" }, "@protobufjs/fetch": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz", - "integrity": "sha1-upn7WYYUr2VwDBYZ/wbUVLDYTEU=", + "integrity": "sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==", "requires": { "@protobufjs/aspromise": "^1.1.1", "@protobufjs/inquire": "^1.1.0" @@ -881,27 +881,27 @@ "@protobufjs/float": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz", - "integrity": "sha1-Xp4avctz/Ap8uLKR33jIy9l7h9E=" + "integrity": "sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==" }, "@protobufjs/inquire": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz", - "integrity": "sha1-/yAOPnzyQp4tyvwRQIKOjMY48Ik=" + "integrity": "sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==" }, "@protobufjs/path": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz", - "integrity": "sha1-bMKyDFya1q0NzP0hynZz2Nf79o0=" + "integrity": "sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==" }, "@protobufjs/pool": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz", - "integrity": "sha1-Cf0V8tbTq/qbZbw2ZQbWrXhG/1Q=" + "integrity": "sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==" }, "@protobufjs/utf8": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz", - "integrity": "sha1-p3c2C1s5oaLlEG+OhY8v0tBgxXA=" + "integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==" }, "@sindresorhus/is": { "version": "0.15.0", @@ -1197,12 +1197,12 @@ "array-flatten": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", - "integrity": "sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=" + "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==" }, "array-from": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/array-from/-/array-from-2.1.1.tgz", - "integrity": "sha1-z+nYwmYoudxa7MYqn12PHzUsEZU=", + "integrity": "sha512-GQTc6Uupx1FCavi5mPzBvVT7nEOeWMmUA9P95wpfpW1XwMSKs+KaymD5C2Up7KAUKg/mYwbsUYzdZWcoajlNZg==", "dev": true }, "array-includes": { @@ -1242,7 +1242,7 @@ "assert-plus": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", - "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=" + "integrity": "sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw==" }, "assertion-error": { "version": "1.1.0", @@ -1259,7 +1259,7 @@ "async": { "version": "0.2.10", "resolved": "https://registry.npmjs.org/async/-/async-0.2.10.tgz", - "integrity": "sha1-trvgsGdLnXGXCMo43owjfLUmw9E=" + "integrity": "sha512-eAkdoKxU6/LkKDBzLpT+t6Ff5EtfSF4wx1WfJiPEEV7WNLnDaRXk0oVysiEPm262roaachGexwUv94WhSgN5TQ==" }, "async-listener": { "version": "0.6.10", @@ -1280,7 +1280,7 @@ "asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=" + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" }, "aws-sdk": { "version": "2.628.0", @@ -1308,7 +1308,7 @@ "aws-sign2": { "version": "0.7.0", "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", - "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=" + "integrity": "sha512-08kcGqnYf/YmjoRhfxyu+CLxBjUtHLXLXX/vUfx9l2LYzG3c1m61nrpyFUZI6zeS+Li/wWMMidD9KgrqtGq3mA==" }, "aws4": { "version": "1.9.1", @@ -1341,7 +1341,7 @@ "balanced-match": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", - "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=" + "integrity": "sha512-9Y0g0Q8rmSt+H33DfKv7FOc3v+iRI+o1lbzt8jGcIosYW37IIW/2XVYq5NPdmaD5NQ59Nk26Kl/vZbwW9Fr8vg==" }, "base64-js": { "version": "1.3.1", @@ -1351,7 +1351,7 @@ "bcrypt-pbkdf": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", - "integrity": "sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=", + "integrity": "sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==", "requires": { "tweetnacl": "^0.14.3" } @@ -1372,7 +1372,7 @@ "bintrees": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/bintrees/-/bintrees-1.0.1.tgz", - "integrity": "sha1-DmVcm5wkNeqraL9AJyJtK1WjRSQ=" + "integrity": "sha512-tbaUB1QpTIj4cKY8c1rvNAvEQXA+ekzHmbe4jzNfW3QWsF9GnnP/BRWyl6/qqS53heoYJ93naaFcm/jooONH8g==" }, "body-parser": { "version": "1.19.0", @@ -1394,7 +1394,7 @@ "boolify": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/boolify/-/boolify-1.0.1.tgz", - "integrity": "sha1-tcCeF8rNET0Rt7s+04TMASmU2Gs=", + "integrity": "sha512-ma2q0Tc760dW54CdOyJjhrg/a54317o1zYADQJFgperNGKIKgAUGIcKnuMiff8z57+yGlrGNEt4lPgZfCgTJgA==", "dev": true }, "brace-expansion": { @@ -1414,7 +1414,7 @@ "buffer": { "version": "4.9.1", "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.1.tgz", - "integrity": "sha1-bRu2AbB6TvztlwlBMgkwJ8lbwpg=", + "integrity": "sha512-DNK4ruAqtyHaN8Zne7PkBTO+dD1Lr0YfTduMqlIyjvQIoztBkUxrvL+hKeLW8NXFKHOq/2upkxuoS9znQ9bW9A==", "requires": { "base64-js": "^1.0.2", "ieee754": "^1.1.4", @@ -1424,7 +1424,7 @@ "buffer-equal-constant-time": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", - "integrity": "sha1-+OcRMvf/5uAaXJaXpMbz5I1cyBk=" + "integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==" }, "builtin-modules": { "version": "3.1.0", @@ -1434,7 +1434,7 @@ "bunyan": { "version": "1.8.12", "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.12.tgz", - "integrity": "sha1-8VDw9nSKvdcq6uhPBEA74u8RN5c=", + "integrity": "sha512-dmDUbGHeGcvCDLRFOscZkwx1ZO/aFz3bJOCi5nCgzdhFGPxwK+y5AcDBnqagNGlJZ7lje/l6JUEz9mQcutttdg==", "requires": { "dtrace-provider": "~0.8", "moment": "^2.10.6", @@ -1473,7 +1473,7 @@ "caseless": { "version": "0.12.0", "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", - "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=" + "integrity": "sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw==" }, "chai": { "version": "4.2.0", @@ -1518,7 +1518,7 @@ "check-error": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz", - "integrity": "sha1-V00xLt2Iu13YkS6Sht1sCu1KrII=", + "integrity": "sha512-BrgHpW9NURQgzoNyjfq0Wu6VFO6D7IZEmJNdtgNqpzGG8RuNFHt2jQxWlAs4HMe119chBnv+34syEZtc6IhLtA==", "dev": true }, "cli-cursor": { @@ -1533,7 +1533,7 @@ "cli-width": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-2.2.0.tgz", - "integrity": "sha1-/xnt6Kml5XkyQUewwR8PvLq+1jk=", + "integrity": "sha512-EJLbKSuvHTrVRynOXCYFTbQKZOFXWNe3/6DN1yrEH3TuuZT1x4dMQnCHnfCrBUUiGjO63enEIfaB17VaRl2d4A==", "dev": true }, "cliui": { @@ -1556,7 +1556,7 @@ "is-fullwidth-code-point": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "integrity": "sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w==", "dev": true }, "string-width": { @@ -1575,7 +1575,7 @@ "coffee-script": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz", - "integrity": "sha1-gIs5bhEPU9AhoZpO8fZb4OjjX6M=" + "integrity": "sha512-Tx8itEfCsQp8RbLDFt7qwjqXycAx2g6SI7//4PPUR2j6meLmNifYm6zKrNDcU1+Q/GWRhjhEZk7DaLG1TfIzGA==" }, "color-convert": { "version": "1.9.3", @@ -1589,7 +1589,7 @@ "color-name": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", "dev": true }, "combined-stream": { @@ -1614,7 +1614,7 @@ "concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" }, "console-log-level": { "version": "1.4.1", @@ -1624,7 +1624,7 @@ "contains-path": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/contains-path/-/contains-path-0.1.0.tgz", - "integrity": "sha1-/ozxhP9mcLa67wGp1IYaXL7EEgo=", + "integrity": "sha512-OKZnPGeMQy2RPaUIBPFFd71iNf4791H12MCRuVQDnzGRwCYNYmTDy5pdafo2SLAcEMKzTOQnLWG4QdcjeJUMEg==", "dev": true }, "content-disposition": { @@ -1657,7 +1657,7 @@ "cookie-signature": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", - "integrity": "sha1-4wOogrNCzD7oylE6eZmXNNqzriw=" + "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==" }, "core-js": { "version": "3.6.4", @@ -1668,7 +1668,7 @@ "core-util-is": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", - "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" + "integrity": "sha512-3lqz5YjWTYnW6dlDa5TLaTCcShfar1e40rmcJVwCBJC6mWlFuj0eCHIElmG1g5kyuJ/GD+8Wn4FFCcz4gJPfaQ==" }, "cross-spawn": { "version": "6.0.5", @@ -1694,12 +1694,12 @@ "d64": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/d64/-/d64-1.0.0.tgz", - "integrity": "sha1-QAKofoUMv8n52XBrYPymE6MzbpA=" + "integrity": "sha512-5eNy3WZziVYnrogqgXhcdEmqcDB2IHurTqLcrgssJsfkMVCUoUaZpK6cJjxxvLV2dUm5SuJMNcYfVGoin9UIRw==" }, "dashdash": { "version": "1.14.1", "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", - "integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=", + "integrity": "sha512-jRFi8UDGo6j+odZiEpjazZaWqEal3w/basFjQHQEwVtZJGDpxbH1MeYluwCS8Xq5wmLJooDlMgvVarmWfGM44g==", "requires": { "assert-plus": "^1.0.0" } @@ -1715,7 +1715,7 @@ "decamelize": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", - "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=", + "integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==", "dev": true }, "deep-eql": { @@ -1730,7 +1730,7 @@ "deep-is": { "version": "0.1.3", "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.3.tgz", - "integrity": "sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ=", + "integrity": "sha512-GtxAN4HvBachZzm4OnWqc45ESpUCMwkYcsjnsPs23FwJbsO+k4t0k9bQCgOmzIlpHO28+WPK/KRbRk0DDHuuDw==", "dev": true }, "define-properties": { @@ -1750,17 +1750,17 @@ "delayed-stream": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", - "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=" + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==" }, "depd": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", - "integrity": "sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=" + "integrity": "sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==" }, "destroy": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.0.4.tgz", - "integrity": "sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA=" + "integrity": "sha512-3NdhDuEXnfun/z7x9GOElY49LoqVHoGScmOKwmxhsS8N5Y+Z8KyPPDnaSzqWgYt/ji4mqwfTS34Htrk0zPIXVg==" }, "diff": { "version": "3.5.0", @@ -1819,7 +1819,7 @@ "ecc-jsbn": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", - "integrity": "sha1-OoOpBOVDUyh4dMVkt1SThoSamMk=", + "integrity": "sha512-eh9O+hwRHNbG4BLTjEl3nw044CkGm5X6LoaCf7LPp7UU8Qrt47JYNi6nPX8xjW97TKGKm1ouctg0QSpZe9qrnw==", "requires": { "jsbn": "~0.1.0", "safer-buffer": "^2.1.0" @@ -1836,7 +1836,7 @@ "ee-first": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", - "integrity": "sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=" + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" }, "emitter-listener": { "version": "1.1.2", @@ -1855,7 +1855,7 @@ "encodeurl": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", - "integrity": "sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k=" + "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==" }, "end-of-stream": { "version": "1.4.4", @@ -1868,7 +1868,7 @@ "ent": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/ent/-/ent-2.2.0.tgz", - "integrity": "sha1-6WQhkyWiHQX0RGai9obtbOX13R0=" + "integrity": "sha512-GHrMyVZQWvTIdDtpiEXdHZnFQKzeO09apj8Cbl4pKWy4i0Oprcq17usfDt5aO63swf0JOeMWjWQE/LzgSRuWpA==" }, "error-ex": { "version": "1.3.2", @@ -1917,7 +1917,7 @@ "es6-promisify": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/es6-promisify/-/es6-promisify-5.0.0.tgz", - "integrity": "sha1-UQnWLz5W6pZ8S2NQWu8IKRyKUgM=", + "integrity": "sha512-C+d6UdsYDk0lMebHNR4S2NybQMMngAOnOwYBQjTOiv0MkoJMP0Myw2mgpDLBcpfCmRLxyFqYhS/CfOENq4SJhQ==", "requires": { "es6-promise": "^4.0.3" } @@ -1925,12 +1925,12 @@ "escape-html": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", - "integrity": "sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg=" + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==" }, "escape-string-regexp": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=" + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==" }, "eslint": { "version": "6.8.0", @@ -2097,7 +2097,7 @@ "doctrine": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-1.5.0.tgz", - "integrity": "sha1-N53Ocw9hZvds76TmcHoVmwLFpvo=", + "integrity": "sha512-lsGyRuYr4/PIB0txi+Fy2xOMI2dGaTguCaotzFGkVZuKR5usKfcRWIFKNM3QNrU7hh/+w2bwTW+ZeXPK5l8uVg==", "dev": true, "requires": { "esutils": "^2.0.2", @@ -2253,7 +2253,7 @@ "etag": { "version": "1.8.1", "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", - "integrity": "sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc=" + "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==" }, "event-target-shim": { "version": "5.0.1", @@ -2279,7 +2279,7 @@ "events": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/events/-/events-1.1.1.tgz", - "integrity": "sha1-nr23Y1rQmccNzEwqH1AEKI6L2SQ=" + "integrity": "sha512-kEcvvCBByWXGnZy6JUlgAp2gBIUjfCAV6P6TgT1/aaQKcmuAEC4OZTV1I4EWQLz2gxZw76atuVyvHhTxvi0Flw==" }, "express": { "version": "4.17.1", @@ -2337,7 +2337,7 @@ "extsprintf": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", - "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=" + "integrity": "sha512-11Ndz7Nv+mvAC1j0ktTa7fAb0vLyGGX+rMHNBYQviQDGU0Hw7lhctJANqbPhu9nV9/izT/IntTgZ7Im/9LJs9g==" }, "fast-deep-equal": { "version": "3.1.1", @@ -2358,7 +2358,7 @@ "fast-levenshtein": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", - "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", "dev": true }, "fast-text-encoding": { @@ -2406,7 +2406,7 @@ "find-up": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", - "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=", + "integrity": "sha512-NWzkk0jSJtTt08+FBFMvXoeZnOJD+jTtsRmBYbAIzJdX6l7dLgR7CTubCM5/eDdPUBvLCeVasP1brfVR/9/EZQ==", "dev": true, "requires": { "locate-path": "^2.0.0" @@ -2415,7 +2415,7 @@ "findit2": { "version": "2.2.3", "resolved": "https://registry.npmjs.org/findit2/-/findit2-2.2.3.tgz", - "integrity": "sha1-WKRmaX34piBc39vzlVNri9d3pfY=" + "integrity": "sha512-lg/Moejf4qXovVutL0Lz4IsaPoNYMuxt4PA0nGqFxnJ1CTTGGlEO2wKgoDpwknhvZ8k4Q2F+eesgkLbG2Mxfog==" }, "flat-cache": { "version": "2.0.1", @@ -2466,7 +2466,7 @@ "forever-agent": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", - "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=" + "integrity": "sha512-j0KLYPhm6zeac4lz3oJ3o65qvgQCcPubiyotZrXqEaG4hNagNYO8qdlUrX5vwqv9ohqeT/Z3j6+yW067yWWdUw==" }, "form-data": { "version": "2.3.3", @@ -2481,17 +2481,17 @@ "forwarded": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.1.2.tgz", - "integrity": "sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ=" + "integrity": "sha512-Ua9xNhH0b8pwE3yRbFfXJvfdWF0UHNCdeyb2sbi9Ul/M+r3PTdrz7Cv4SCfZRMjmzEM9PhraqfZFbGTIg3OMyA==" }, "fresh": { "version": "0.5.2", "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", - "integrity": "sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac=" + "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==" }, "fs-extra": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-1.0.0.tgz", - "integrity": "sha1-zTzl9+fLYUWIP8rjGR6Yd/hYeVA=", + "integrity": "sha512-VerQV6vEKuhDWD2HGOybV6v5I73syoc/cXAbKlgTC7M/oFVEtklWlp9QH2Ijw3IaWDOQcMkldSPa7zXy79Z/UQ==", "requires": { "graceful-fs": "^4.1.2", "jsonfile": "^2.1.0", @@ -2501,7 +2501,7 @@ "fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" }, "function-bind": { "version": "1.1.1", @@ -2512,7 +2512,7 @@ "functional-red-black-tree": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz", - "integrity": "sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=", + "integrity": "sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g==", "dev": true }, "gaxios": { @@ -2545,7 +2545,7 @@ "get-func-name": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz", - "integrity": "sha1-6td0q+5y4gQJQzoGY2YCPdaIekE=", + "integrity": "sha512-Hm0ixYtaSZ/V7C8FJrtZIuBBI+iSgL+1Aq82zSu8VQNB4S3Gk8e7Qs3VwBDJAhmRZcFqkl3tQu36g/Foh5I5ig==", "dev": true }, "get-stdin": { @@ -2557,7 +2557,7 @@ "getpass": { "version": "0.1.7", "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", - "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=", + "integrity": "sha512-0fzj9JxOLfJ+XGLhR8ze3unN0KZCgZwiSSDz168VERjK8Wl8kVSdcu2kspd4s4wtAa1y/qrVRiAA0WclVsu0ng==", "requires": { "assert-plus": "^1.0.0" } @@ -2565,7 +2565,7 @@ "gettemporaryfilepath": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/gettemporaryfilepath/-/gettemporaryfilepath-0.0.1.tgz", - "integrity": "sha1-uKLHAUu1zUFTTpg7XKFgo3RwhGk=" + "integrity": "sha512-7avwQWP8MP42u7mtc+KjCRuUE3nafRJPuGaZaySD9NN1KEbfVTfSAywP4KOkK8gaxhdOxx11ZTWH28DwjAF70Q==" }, "glob": { "version": "7.1.6", @@ -2672,7 +2672,7 @@ "har-schema": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", - "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=" + "integrity": "sha512-Oqluz6zhGX8cyRaTQlFMPw80bSJVG2x/cFb8ZPhUILGgHka9SsokCCOQgpveePerqidZOrT14ipqfJb7ILcW5Q==" }, "har-validator": { "version": "5.1.3", @@ -2695,7 +2695,7 @@ "has-ansi": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz", - "integrity": "sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE=", + "integrity": "sha512-C8vBJ8DwUCx19vhm7urhTuUsr4/IyP6l4VzNQDv+ryHQObW3TTTp9yB68WpYgRe2bbaGuZ/se74IqFeVnMnLZg==", "dev": true, "requires": { "ansi-regex": "^2.0.0" @@ -2704,7 +2704,7 @@ "ansi-regex": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", - "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", + "integrity": "sha512-TIGnTpdo+E3+pCyAluZvtED5p5wCqLdezCyhPZzKPcxvFplEt4i+W7OONCKgeZFT3+y5NZZfOOS/Bdcanm1MYA==", "dev": true } } @@ -2712,7 +2712,7 @@ "has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=" + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==" }, "has-symbols": { "version": "1.0.1", @@ -2723,7 +2723,7 @@ "he": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/he/-/he-1.1.1.tgz", - "integrity": "sha1-k0EP0hsAlzUVH4howvJx80J+I/0=" + "integrity": "sha512-z/GDPjlRMNOa2XJiB4em8wJpuuBfrFOlYKTZxtpkdr1uPdibHI8rYA3MY0KDObpVyaes0e/aunid/t88ZI2EKA==" }, "heapdump": { "version": "0.3.15", @@ -2784,7 +2784,7 @@ "http-signature": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", - "integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=", + "integrity": "sha512-CAbnr6Rz4CYQkLYUtSNXxQPUH2gK8f3iWexVlsnMeD+GjlsQ0Xsy1cOX+mN3dtxYomRy21CiOzU8Uhw6OwncEQ==", "requires": { "assert-plus": "^1.0.0", "jsprim": "^1.2.2", @@ -2847,7 +2847,7 @@ "imurmurhash": { "version": "0.1.4", "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", - "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", "dev": true }, "indent-string": { @@ -2859,7 +2859,7 @@ "inflight": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", "requires": { "once": "^1.3.0", "wrappy": "1" @@ -2868,7 +2868,7 @@ "inherits": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", - "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=" + "integrity": "sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw==" }, "inquirer": { "version": "7.0.4", @@ -2904,7 +2904,7 @@ "is-arrayish": { "version": "0.2.1", "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", - "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", + "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", "dev": true }, "is-buffer": { @@ -2927,7 +2927,7 @@ "is-extglob": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", "dev": true }, "is-fullwidth-code-point": { @@ -2953,7 +2953,7 @@ "is-promise": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-2.1.0.tgz", - "integrity": "sha1-eaKp7OfwlugPNtKy87wWwf9L8/o=", + "integrity": "sha512-NECAi6wp6CgMesHuVUEK8JwjCvm/tvnn5pCbB42JOHp3mgUizN0nagXu4HEqQZBkieGEQ+jVcMKWqoVd6CDbLQ==", "dev": true }, "is-regex": { @@ -2993,28 +2993,28 @@ "is-typedarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", - "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=" + "integrity": "sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==" }, "isarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" }, "isexe": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", "dev": true }, "isstream": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", - "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=" + "integrity": "sha512-Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g==" }, "jmespath": { "version": "0.15.0", "resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.15.0.tgz", - "integrity": "sha1-o/Iiqarp+Wb10nx5ZRDigJF2Qhc=" + "integrity": "sha512-+kHj8HXArPfpPEKGLZ+kB5ONRTCiGQXo8RQYL0hH8t6pWXUBBK5KkkQmTNOwKK4LEsd0yTsgtjJVm4UBSZea4w==" }, "js-tokens": { "version": "4.0.0", @@ -3035,7 +3035,7 @@ "jsbn": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", - "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=" + "integrity": "sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg==" }, "jsesc": { "version": "2.5.2", @@ -3046,7 +3046,7 @@ "json-bigint": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-0.3.0.tgz", - "integrity": "sha1-DM2RLEuCcNBfBW+9E4FLU9OCWx4=", + "integrity": "sha512-u+c/u/F+JNPUekHCFyGVycRPyh9UHD5iUhSyIAn10kxbDTJxijwAbT6XHaONEOXuGGfmWUSroheXgHcml4gLgg==", "requires": { "bignumber.js": "^7.0.0" } @@ -3054,7 +3054,7 @@ "json-schema": { "version": "0.2.3", "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", - "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=" + "integrity": "sha512-a3xHnILGMtk+hDOqNwHzF6e2fNbiMrXZvxKQiEv2MlgQP+pjIOzqAmKYD2mDpXYE/44M7g+n9p2bKkYWDUcXCQ==" }, "json-schema-traverse": { "version": "0.4.1", @@ -3064,18 +3064,18 @@ "json-stable-stringify-without-jsonify": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", - "integrity": "sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=", + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", "dev": true }, "json-stringify-safe": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", - "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=" + "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==" }, "jsonfile": { "version": "2.4.0", "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-2.4.0.tgz", - "integrity": "sha1-NzaitCi4e72gzIO1P6PWM6NcKug=", + "integrity": "sha512-PKllAqbgLgxHaj8TElYymKCAgrASebJrWpTnEkOaTowt23VKXXN0sUeriJ+eh7y6ufb/CC5ap11pz71/cM0hUw==", "requires": { "graceful-fs": "^4.1.6" } @@ -3083,7 +3083,7 @@ "jsprim": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", - "integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=", + "integrity": "sha512-4Dj8Rf+fQ+/Pn7C5qeEX02op1WfOss3PKTE9Nsop3Dx+6UPxlm1dr/og7o2cRa5hNN07CACr4NFzRLtj/rjWog==", "requires": { "assert-plus": "1.0.0", "extsprintf": "1.3.0", @@ -3119,7 +3119,7 @@ "klaw": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/klaw/-/klaw-1.3.1.tgz", - "integrity": "sha1-QIhDO0azsbolnXh4XY6W9zugJDk=", + "integrity": "sha512-TED5xi9gGQjGpNnvRWknrwAB1eL5GciPfVFOt3Vk1OJCVDQbzuSfrF3hkUQKlsgKrG1F+0t5W0m+Fje1jIt8rw==", "requires": { "graceful-fs": "^4.1.9" } @@ -3127,7 +3127,7 @@ "levn": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", - "integrity": "sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4=", + "integrity": "sha512-0OO4y2iOHix2W6ujICbKIaEQXvFQHue65vUG3pb5EUomzPI90z9hsA1VsO/dbIIpC53J8gxM9Q4Oho0jrCM/yA==", "dev": true, "requires": { "prelude-ls": "~1.1.2", @@ -3137,7 +3137,7 @@ "load-json-file": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-2.0.0.tgz", - "integrity": "sha1-eUfkIUmvgNaWy/eXvKq8/h/inKg=", + "integrity": "sha512-3p6ZOGNbiX4CdvEd1VcE6yi78UrGNpjHO33noGwHCnT/o2fyllJDepsm8+mFFv/DvtwFHht5HIHSyOy5a+ChVQ==", "dev": true, "requires": { "graceful-fs": "^4.1.2", @@ -3149,7 +3149,7 @@ "pify": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", + "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", "dev": true } } @@ -3157,7 +3157,7 @@ "locate-path": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", - "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=", + "integrity": "sha512-NCI2kiDkyR7VeEKm27Kda/iQHyKJe1Bu0FlTbYp3CqJu+9IFe9bLyAjMxf5ZDDbEg+iMPzB5zYyUTSm8wVTKmA==", "dev": true, "requires": { "p-locate": "^2.0.0", @@ -3172,28 +3172,28 @@ "lodash.at": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/lodash.at/-/lodash.at-4.6.0.tgz", - "integrity": "sha1-k83OZk8KGZTqM9181A4jr9EbD/g=" + "integrity": "sha512-GOTh0SEp+Yosnlpjic+8cl2WM9MykorogkGA9xyIFkkObQ3H3kNZqZ+ohuq4K3FrSVo7hMcZBMataJemrxC3BA==" }, "lodash.camelcase": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", - "integrity": "sha1-soqmKIorn8ZRA1x3EfZathkDMaY=" + "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==" }, "lodash.get": { "version": "4.4.2", "resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz", - "integrity": "sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk=", + "integrity": "sha512-z+Uw/vLuy6gQe8cfaFWD7p0wVv8fJl3mbzXh33RS+0oW2wvUqiRXiQ69gLWSLpgB5/6sU+r6BlQR0MBILadqTQ==", "dev": true }, "lodash.has": { "version": "4.5.2", "resolved": "https://registry.npmjs.org/lodash.has/-/lodash.has-4.5.2.tgz", - "integrity": "sha1-0Z9NwQlQWMzL4rDN9O4P5Ko3yGI=" + "integrity": "sha512-rnYUdIo6xRCJnQmbVFEwcxF144erlD+M3YcJUVesflU9paQaE8p+fJDcIQrlMYbxoANFL+AB9hZrzSBBk5PL+g==" }, "lodash.memoize": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", - "integrity": "sha1-vMbEmkKihA7Zl/Mj6tpezRguC/4=", + "integrity": "sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==", "dev": true }, "lodash.merge": { @@ -3205,12 +3205,12 @@ "lodash.pickby": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/lodash.pickby/-/lodash.pickby-4.6.0.tgz", - "integrity": "sha1-feoh2MGNdwOifHBMFdO4SmfjOv8=" + "integrity": "sha512-AZV+GsS/6ckvPOVQPXSiFFacKvKB4kOQu6ynt9wz0F3LO4R9Ij4K1ddYsIytDpSgLz88JHd9P+oaLeej5/Sl7Q==" }, "lodash.unescape": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/lodash.unescape/-/lodash.unescape-4.0.1.tgz", - "integrity": "sha1-vyJJiGzlFM2hEvrpIYzcBlIR/Jw=", + "integrity": "sha512-DhhGRshNS1aX6s5YdBE3njCCouPgnG29ebyHvImlZzXZf2SHgt+J08DHgytTPnpywNbO1Y8mNUFyQuIDBq2JZg==", "dev": true }, "log-driver": { @@ -3279,7 +3279,7 @@ "loglevel-colored-level-prefix": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/loglevel-colored-level-prefix/-/loglevel-colored-level-prefix-1.0.0.tgz", - "integrity": "sha1-akAhj9x64V/HbD0PPmdsRlOIYD4=", + "integrity": "sha512-u45Wcxxc+SdAlh4yeF/uKlC1SPUPCy0gullSNKXod5I4bmifzk+Q4lSLExNEVn19tGaJipbZ4V4jbFn79/6mVA==", "dev": true, "requires": { "chalk": "^1.1.3", @@ -3289,19 +3289,19 @@ "ansi-regex": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", - "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", + "integrity": "sha512-TIGnTpdo+E3+pCyAluZvtED5p5wCqLdezCyhPZzKPcxvFplEt4i+W7OONCKgeZFT3+y5NZZfOOS/Bdcanm1MYA==", "dev": true }, "ansi-styles": { "version": "2.2.1", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz", - "integrity": "sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4=", + "integrity": "sha512-kmCevFghRiWM7HB5zTPULl4r9bVFSWjz62MhqizDGUrq2NWuNMQyuv4tHHoKJHs69M/MF64lEcHdYIocrdWQYA==", "dev": true }, "chalk": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz", - "integrity": "sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg=", + "integrity": "sha512-U3lRVLMSlsCfjqYPbLyVv11M9CPW4I728d6TCKMAOJueEeB9/8o+eSsMnxPJD+Q+K909sdESg7C+tIkoH6on1A==", "dev": true, "requires": { "ansi-styles": "^2.2.1", @@ -3314,7 +3314,7 @@ "strip-ansi": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", - "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "integrity": "sha512-VhumSSbBqDTP8p2ZLKj40UjBCV4+v8bUSEpUb4KjRgWk9pbqGF4REFj6KEagidb2f/M6AzC0EmFyDNGaw9OCzg==", "dev": true, "requires": { "ansi-regex": "^2.0.0" @@ -3323,7 +3323,7 @@ "supports-color": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz", - "integrity": "sha1-U10EXOa2Nj+kARcIRimZXp3zJMc=", + "integrity": "sha512-KKNVtd6pCYgPIKU4cp2733HWYCpplQhddZLBUryaAHou723x+FRzQ5Df824Fj+IyyuiQTRoub4SnIFfIcrp70g==", "dev": true } } @@ -3350,12 +3350,12 @@ "lsmod": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/lsmod/-/lsmod-1.0.0.tgz", - "integrity": "sha1-mgD3bco26yP6BTUK/htYXUKZ5ks=" + "integrity": "sha512-Y+6V75r+mGWzWEPr9h6PFmStielICu5JBHLUg18jCsD2VFmEfgHbq/EgnY4inElsUD9eKL9id1qp34w46rSIKQ==" }, "lynx": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/lynx/-/lynx-0.1.1.tgz", - "integrity": "sha1-Mxjc7xaQi4KG6Bisz9sxzXQkj50=", + "integrity": "sha512-JI52N0NwK2b/Md0TFPdPtUBI46kjyJXF7+q08l2yvQ56q6QA8s7ZjZQQRoxFpS2jDXNf/B0p8ID+OIKcTsZwzw==", "requires": { "mersenne": "~0.0.3", "statsd-parser": "~0.0.4" @@ -3373,7 +3373,7 @@ "minimist": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz", - "integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ=", + "integrity": "sha512-7Wl+Jz+IGWuSdgsQEJ4JunV0si/iMhg42MnQQG6h1R6TNeVenp4U9x5CC5v/gYqz/fENLQITAWXidNtVL0NNbw==", "dev": true, "optional": true } @@ -3387,17 +3387,17 @@ "media-typer": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", - "integrity": "sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=" + "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==" }, "merge-descriptors": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", - "integrity": "sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E=" + "integrity": "sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==" }, "mersenne": { "version": "0.0.4", "resolved": "https://registry.npmjs.org/mersenne/-/mersenne-0.0.4.tgz", - "integrity": "sha1-QB/ex+whzbngPNPTAhOY2iGycIU=" + "integrity": "sha512-XoSUL+nF8hMTKGQxUs8r3Btdsf1yuKKBdCCGbh3YXgCXuVKishpZv1CNc385w9s8t4Ynwc5h61BwW/FCVulkbg==" }, "messageformat": { "version": "2.3.0", @@ -3425,7 +3425,7 @@ "methods": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", - "integrity": "sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=" + "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==" }, "metrics-sharelatex": { "version": "2.4.0", @@ -3445,7 +3445,7 @@ "underscore": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz", - "integrity": "sha1-izixDKze9jM3uLJOT/htRa6lKag=" + "integrity": "sha512-z4o1fvKUojIWh9XuaVLUDdf86RQiq13AC1dmHbTpoyuu+bquHms76v16CjycCbec87J7z0k//SiQVk0sMdFmpQ==" } } }, @@ -3484,12 +3484,12 @@ "minimist": { "version": "0.0.8", "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", - "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=" + "integrity": "sha512-miQKw5Hv4NS1Psg2517mV4e4dYNaO3++hjAvLOAzKqZ61rH8NS1SK+vbfBWZ5PY/Me/bEWhUwqMghEW5Fb9T7Q==" }, "mkdirp": { "version": "0.5.1", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", - "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", + "integrity": "sha512-SknJC52obPfGQPnjIkXbmA6+5H15E+fR+E4iR2oQ3zzCLbd7/ONua69R/Gw7AgkTLsRG+r5fzksYwWe1AgTyWA==", "requires": { "minimist": "0.0.8" } @@ -3538,7 +3538,7 @@ "module-details-from-path": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/module-details-from-path/-/module-details-from-path-1.0.3.tgz", - "integrity": "sha1-EUyUlnPiqKNenTV4hSeqN7Z52is=" + "integrity": "sha512-ySViT69/76t8VhE1xXHK6Ch4NcDd26gx0MzKXLO+F7NOtnqH68d9zF94nT8ZWSxXh8ELOERsnJO/sWt1xZYw5A==" }, "moment": { "version": "2.24.0", @@ -3549,7 +3549,7 @@ "ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" }, "mute-stream": { "version": "0.0.8", @@ -3560,7 +3560,7 @@ "mv": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/mv/-/mv-2.1.1.tgz", - "integrity": "sha1-rmzg1vbV4KT32JN5jQPB6pVZtqI=", + "integrity": "sha512-at/ZndSy3xEGJ8i0ygALh8ru9qy7gWW1cmkaqBN29JmMlIvM//MEO9y1sk/avxuwnPcfhkejkLsuPxH81BrkSg==", "optional": true, "requires": { "mkdirp": "~0.5.1", @@ -3571,7 +3571,7 @@ "glob": { "version": "6.0.4", "resolved": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz", - "integrity": "sha1-DwiGD2oVUSey+t1PnOJLGqtuTSI=", + "integrity": "sha512-MKZeRNyYZAVVVG1oZeLaWie1uweH40m9AZwIwxyPbTSX4hHrVYSzLg0Ro5Z5R7XKkIX+Cc6oD1rqeDJnwsB8/A==", "optional": true, "requires": { "inflight": "^1.0.4", @@ -3584,7 +3584,7 @@ "rimraf": { "version": "2.4.5", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.5.tgz", - "integrity": "sha1-7nEM5dk6j9uFb7Xqj/Di11k0sto=", + "integrity": "sha512-J5xnxTyqaiw06JjMftq7L9ouA448dw/E7dKghkP9WpKNuwmARNNg+Gk8/u5ryb9N/Yo2+z3MCwuqFK/+qPOPfQ==", "optional": true, "requires": { "glob": "^6.0.1" @@ -3600,13 +3600,13 @@ "natural-compare": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", - "integrity": "sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", "dev": true }, "ncp": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ncp/-/ncp-2.0.0.tgz", - "integrity": "sha1-GVoh1sRuNh0vsSgbo4uR6d9727M=", + "integrity": "sha512-zIdGUrPRFTUELUvr3Gmc7KZ2Sw/h1PiVM0Af/oHB6zgnV1ikqSfRk+TOufi79aHYCW3NiOXmr1BP5nWbzojLaA==", "optional": true }, "negotiator": { @@ -3636,7 +3636,7 @@ "isarray": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", - "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=", + "integrity": "sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ==", "dev": true }, "lolex": { @@ -3672,7 +3672,7 @@ "node-uuid": { "version": "1.4.8", "resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.8.tgz", - "integrity": "sha1-sEDrCSOWivq/jTL7HxfxFn/auQc=" + "integrity": "sha512-TkCET/3rr9mUuRp+CpO7qfgT++aAxfDRaalQhwPFzI9BY/2rCDn6OfpZOVggi1AXfTPpfkTrg5f5WQx5G1uLxA==" }, "normalize-package-data": { "version": "2.5.0", @@ -3738,7 +3738,7 @@ "on-finished": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", - "integrity": "sha1-IPEzZIGwg811M3mSoWlxqi2QaUc=", + "integrity": "sha512-ikqdkGAAyf/X/gPhXGvfgAytDZtDbr+bkNUJ0N9h5MI/dmdgCs3l6hoHrcUv41sRKew3jIwrp4qQDXiK99Utww==", "requires": { "ee-first": "1.1.1" } @@ -3746,7 +3746,7 @@ "once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", "requires": { "wrappy": "1" } @@ -3777,7 +3777,7 @@ "os-tmpdir": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", - "integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=", + "integrity": "sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g==", "dev": true }, "p-limit": { @@ -3791,7 +3791,7 @@ "p-locate": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", - "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=", + "integrity": "sha512-nQja7m7gSKuewoVRen45CtVfODR3crN3goVQ0DDZ9N3yHxgpkuBhZqsaiotSQRrADUrne346peY7kT3TSACykg==", "dev": true, "requires": { "p-limit": "^1.1.0" @@ -3809,7 +3809,7 @@ "p-try": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", - "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=", + "integrity": "sha512-U1etNYuMJoIz3ZXSrrySFjsXQTWOx2/jdi86L+2pRvph/qMKL6sbcCYdH23fqsbm8TH2Gn0OybpT4eSFlCVHww==", "dev": true } } @@ -3836,7 +3836,7 @@ "parse-json": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", - "integrity": "sha1-9ID0BDTvgHQfhGkJn43qGPVaTck=", + "integrity": "sha512-QR/GGaKCkhwk1ePQNYDRKYZ3mwU9ypsKhB0XyFnLQdomyEqk3e8wpW3V5Jp88zbxK4n5ST1nqo+g9juTpownhQ==", "dev": true, "requires": { "error-ex": "^1.2.0" @@ -3855,24 +3855,24 @@ "path-exists": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", - "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=", + "integrity": "sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ==", "dev": true }, "path-is-absolute": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=" + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==" }, "path-is-inside": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/path-is-inside/-/path-is-inside-1.0.2.tgz", - "integrity": "sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM=", + "integrity": "sha512-DUWJr3+ULp4zXmol/SZkFf3JGsS9/SIv+Y3Rt93/UjPpDpklB5f1er4O3POIbUuUJ3FXgqte2Q7SrU6zAqwk8w==", "dev": true }, "path-key": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", - "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=", + "integrity": "sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==", "dev": true }, "path-parse": { @@ -3883,12 +3883,12 @@ "path-to-regexp": { "version": "0.1.7", "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", - "integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=" + "integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==" }, "path-type": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/path-type/-/path-type-2.0.0.tgz", - "integrity": "sha1-8BLMuEFbcJb8LaoQVMPXI4lZTHM=", + "integrity": "sha512-dUnb5dXUf+kzhC/W/F4e5/SkluXIFf5VUHolW1Eg1irn1hGWjPGdsRcvYJ1nD6lhk8Ir7VM0bHJKsYTx8Jx9OQ==", "dev": true, "requires": { "pify": "^2.0.0" @@ -3897,7 +3897,7 @@ "pify": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", + "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", "dev": true } } @@ -3905,13 +3905,13 @@ "pathval": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.0.tgz", - "integrity": "sha1-uULm1L3mUwBe9rcTYd74cn0GReA=", + "integrity": "sha512-qZ181q3ICkag/+lv1X6frDUF84pqCm30qild3LGbD84n0AC75CYwnWsQRDlpz7zDkU5NVcmhHh4LjXK0goLYZA==", "dev": true }, "performance-now": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", - "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=" + "integrity": "sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow==" }, "pify": { "version": "4.0.1", @@ -3921,7 +3921,7 @@ "pkg-dir": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-2.0.0.tgz", - "integrity": "sha1-9tXREJ4Z1j7fQo4L1X4Sd3YVM0s=", + "integrity": "sha512-ojakdnUgL5pzJYWw2AIDEupaQCX5OPbM688ZevubICjdIX01PRSYKqm33fJoCOJBRseYCTUlQRnBNX+Pchaejw==", "dev": true, "requires": { "find-up": "^2.1.0" @@ -3930,7 +3930,7 @@ "pngcrush": { "version": "0.0.3", "resolved": "https://registry.npmjs.org/pngcrush/-/pngcrush-0.0.3.tgz", - "integrity": "sha1-v2dW6s2h+rNJwHdo6AXMEA0o+Tc=", + "integrity": "sha512-RVaPWGv0PUUzGeSQJHH78rw2ks8NxKbFn8uENFM+/3bfsUs39MaFDG+eul5902gH97zZLQ0zd0h2yb0YBaMKDw==", "requires": { "gettemporaryfilepath": "=0.0.1" } @@ -3938,7 +3938,7 @@ "prelude-ls": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", - "integrity": "sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ=", + "integrity": "sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w==", "dev": true }, "prettier": { @@ -3977,13 +3977,13 @@ "ansi-regex": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", - "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", + "integrity": "sha512-wFUFA5bg5dviipbQQ32yOQhl6gcJaJXiHE7dvR8VYPG97+J/GNC5FKGepKdEDUFeXRzDxPF1X/Btc8L+v7oqIQ==", "dev": true }, "cli-cursor": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-2.1.0.tgz", - "integrity": "sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU=", + "integrity": "sha512-8lgKz8LmCRYZZQDpRyT2m5rKJ08TnU4tR9FFFW2rxpxR1FzWi4PQ/NfyODchAatHaUgnSPVcx/R5w6NuTBzFiw==", "dev": true, "requires": { "restore-cursor": "^2.0.0" @@ -4066,7 +4066,7 @@ "figures": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/figures/-/figures-2.0.0.tgz", - "integrity": "sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI=", + "integrity": "sha512-Oa2M9atig69ZkfwiApY8F2Yy+tzMbazyvqv21R0NsSC8floSOC09BbT1ITWAdoMGQvJ/aZnR1KMwdx9tvHnTNA==", "dev": true, "requires": { "escape-string-regexp": "^1.0.5" @@ -4113,7 +4113,7 @@ "is-fullwidth-code-point": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "integrity": "sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w==", "dev": true }, "mimic-fn": { @@ -4131,13 +4131,13 @@ "mute-stream": { "version": "0.0.7", "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz", - "integrity": "sha1-MHXOk7whuPq0PhvE2n6BFe0ee6s=", + "integrity": "sha512-r65nCZhrbXXb6dXOACihYApHw2Q6pV0M3V0PSxd74N0+D8nzAdEAITq2oAjA1jVnKI+tGvEBUpqiMh0+rW6zDQ==", "dev": true }, "onetime": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/onetime/-/onetime-2.0.1.tgz", - "integrity": "sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ=", + "integrity": "sha512-oyyPpiMaKARvvcgip+JV+7zci5L8D1W9RZIz2l1o08AM3pfspitVWnPt3mzHcBPp12oYMTy0pqrFs/C+m3EwsQ==", "dev": true, "requires": { "mimic-fn": "^1.0.0" @@ -4146,7 +4146,7 @@ "restore-cursor": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz", - "integrity": "sha1-n37ih/gv0ybU/RYpI9YhKe7g368=", + "integrity": "sha512-6IzJLuGi4+R14vwagDHX+JrXmPVtPpn4mffDJ1UdR7/Edm87fl6yi8mMBIVvFtJaNTUvjughmW4hwLhRG7gC1Q==", "dev": true, "requires": { "onetime": "^2.0.0", @@ -4172,7 +4172,7 @@ "strip-ansi": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", - "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "integrity": "sha512-4XaJ2zQdCzROZDivEVIDPkcQn8LMFSa8kj8Gxb/Lnwzv9A8VctNZ+lfivC/sV3ivW8ElJTERXZoPBRrZKkNKow==", "dev": true, "requires": { "ansi-regex": "^3.0.0" @@ -4181,7 +4181,7 @@ "strip-json-comments": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", - "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=", + "integrity": "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==", "dev": true } } @@ -4220,13 +4220,13 @@ "ansi-regex": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", - "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", + "integrity": "sha512-wFUFA5bg5dviipbQQ32yOQhl6gcJaJXiHE7dvR8VYPG97+J/GNC5FKGepKdEDUFeXRzDxPF1X/Btc8L+v7oqIQ==", "dev": true }, "cli-cursor": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-2.1.0.tgz", - "integrity": "sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU=", + "integrity": "sha512-8lgKz8LmCRYZZQDpRyT2m5rKJ08TnU4tR9FFFW2rxpxR1FzWi4PQ/NfyODchAatHaUgnSPVcx/R5w6NuTBzFiw==", "dev": true, "requires": { "restore-cursor": "^2.0.0" @@ -4317,7 +4317,7 @@ "figures": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/figures/-/figures-2.0.0.tgz", - "integrity": "sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI=", + "integrity": "sha512-Oa2M9atig69ZkfwiApY8F2Yy+tzMbazyvqv21R0NsSC8floSOC09BbT1ITWAdoMGQvJ/aZnR1KMwdx9tvHnTNA==", "dev": true, "requires": { "escape-string-regexp": "^1.0.5" @@ -4386,7 +4386,7 @@ "is-fullwidth-code-point": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "integrity": "sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w==", "dev": true }, "locate-path": { @@ -4413,13 +4413,13 @@ "mute-stream": { "version": "0.0.7", "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz", - "integrity": "sha1-MHXOk7whuPq0PhvE2n6BFe0ee6s=", + "integrity": "sha512-r65nCZhrbXXb6dXOACihYApHw2Q6pV0M3V0PSxd74N0+D8nzAdEAITq2oAjA1jVnKI+tGvEBUpqiMh0+rW6zDQ==", "dev": true }, "onetime": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/onetime/-/onetime-2.0.1.tgz", - "integrity": "sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ=", + "integrity": "sha512-oyyPpiMaKARvvcgip+JV+7zci5L8D1W9RZIz2l1o08AM3pfspitVWnPt3mzHcBPp12oYMTy0pqrFs/C+m3EwsQ==", "dev": true, "requires": { "mimic-fn": "^1.0.0" @@ -4443,7 +4443,7 @@ "restore-cursor": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz", - "integrity": "sha1-n37ih/gv0ybU/RYpI9YhKe7g368=", + "integrity": "sha512-6IzJLuGi4+R14vwagDHX+JrXmPVtPpn4mffDJ1UdR7/Edm87fl6yi8mMBIVvFtJaNTUvjughmW4hwLhRG7gC1Q==", "dev": true, "requires": { "onetime": "^2.0.0", @@ -4469,7 +4469,7 @@ "strip-ansi": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", - "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "integrity": "sha512-4XaJ2zQdCzROZDivEVIDPkcQn8LMFSa8kj8Gxb/Lnwzv9A8VctNZ+lfivC/sV3ivW8ElJTERXZoPBRrZKkNKow==", "dev": true, "requires": { "ansi-regex": "^3.0.0" @@ -4478,7 +4478,7 @@ "strip-json-comments": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", - "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=", + "integrity": "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==", "dev": true } } @@ -4505,7 +4505,7 @@ "ansi-regex": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", - "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", + "integrity": "sha512-wFUFA5bg5dviipbQQ32yOQhl6gcJaJXiHE7dvR8VYPG97+J/GNC5FKGepKdEDUFeXRzDxPF1X/Btc8L+v7oqIQ==", "dev": true } } @@ -4616,7 +4616,7 @@ "punycode": { "version": "1.3.2", "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", - "integrity": "sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0=" + "integrity": "sha512-RofWgt/7fL5wP1Y7fxE7/EmTLzQVnB0ycyibJ0OOHIlJqTNzglYFxVwETOcIoJqJmpDXJ9xImDv+Fq34F/d4Dw==" }, "qs": { "version": "6.7.0", @@ -4626,7 +4626,7 @@ "querystring": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", - "integrity": "sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=" + "integrity": "sha512-X/xY82scca2tau62i9mDyU9K+I+djTMUsvwf7xnUX5GLvVzgJybOJf4Y6o9Zx3oJK/LSXg5tTZBjwzqVPaPO2g==" }, "quick-lru": { "version": "4.0.1", @@ -4648,7 +4648,7 @@ "raven": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/raven/-/raven-1.1.3.tgz", - "integrity": "sha1-QnPBrm005CMPUbLAEEGjK5Iygio=", + "integrity": "sha512-RYov4wAaflZasWiCrZuizd3jNXxCOkW1WrXgWsGVb8kRpdHNZ+vPY27R6RhVtqzWp+DG9a5l6iP0QUPK4EgzaQ==", "requires": { "cookie": "0.3.1", "json-stringify-safe": "5.0.1", @@ -4660,12 +4660,12 @@ "cookie": { "version": "0.3.1", "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz", - "integrity": "sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s=" + "integrity": "sha512-+IJOX0OqlHCszo2mBUq+SrEbCj6w7Kpffqx60zYbPTFaO4+yYgRjHwcZNpWvaTylDHaV7PPmBHzSecZiMhtPgw==" }, "uuid": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.0.0.tgz", - "integrity": "sha1-Zyj8BFnEUNeWqZwxg3VpvfZy1yg=" + "integrity": "sha512-rqE1LoOVLv3QrZMjb4NkF5UWlkurCfPyItVnFPNKDDGkHw4dQUdE4zMcLqx28+0Kcf3+bnUk4PisaiRJT4aiaQ==" } } }, @@ -4683,7 +4683,7 @@ "read-pkg": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-2.0.0.tgz", - "integrity": "sha1-jvHAYjxqbbDcZxPEv6xGMysjaPg=", + "integrity": "sha512-eFIBOPW7FGjzBuk3hdXEuNSiTZS/xEMlH49HxMyzb0hyPfu4EhVjT2DH32K1hSSmVq4sebAWnZuuY5auISUTGA==", "dev": true, "requires": { "load-json-file": "^2.0.0", @@ -4694,7 +4694,7 @@ "read-pkg-up": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-2.0.0.tgz", - "integrity": "sha1-a3KoBImE4MQeeVEP1en6mbO1Sb4=", + "integrity": "sha512-1orxQfbWGUiTn9XsPlChs6rLie/AV9jwZTGmu2NZw/CUDJQchXJFYE0Fq5j7+n558T1JhDWLdhyd1Zj+wLY//w==", "dev": true, "requires": { "find-up": "^2.0.0", @@ -4795,7 +4795,7 @@ "require-directory": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", - "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", "dev": true }, "require-in-the-middle": { @@ -4826,7 +4826,7 @@ "require-like": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz", - "integrity": "sha1-rW8wwTvs15cBDEaK+ndcDAprR/o=", + "integrity": "sha512-oyrU88skkMtDdauHDuKVrgR+zuItqr6/c//FXzvmxRGMexSDc6hNvJInGW3LL46n+8b50RykrvwSUIIQH2LQ5A==", "dev": true }, "require-main-filename": { @@ -4838,7 +4838,7 @@ "require-relative": { "version": "0.8.7", "resolved": "https://registry.npmjs.org/require-relative/-/require-relative-0.8.7.tgz", - "integrity": "sha1-eZlTn8ngR6N5KPoZb44VY9q9Nt4=", + "integrity": "sha512-AKGr4qvHiryxRb19m3PsLRGuKVAbJLUD7E6eOaHkfKhwc+vSgVOCY5xNvm9EkolBKTOf0GrQAZKLimOCz81Khg==", "dev": true }, "resolve": { @@ -4897,7 +4897,7 @@ "rimraf": { "version": "2.2.8", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.2.8.tgz", - "integrity": "sha1-5Dm+Kq7jJzIZUnMPmaiSnk/FBYI=" + "integrity": "sha512-R5KMKHnPAQaZMqLOsyuyUmcIjSeDm+73eoqQpaXA7AZ22BL+6C+1mcUscgOsNd8WVlJuvlgAPsegcx7pjlV0Dg==" }, "run-async": { "version": "2.4.0", @@ -4936,7 +4936,7 @@ "sandboxed-module": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/sandboxed-module/-/sandboxed-module-2.0.3.tgz", - "integrity": "sha1-x+VFkzm7y6KMUwPusz9ug4e/upY=", + "integrity": "sha512-wXiA6ULoGjCDwjn6evQF/Qi+oe77P+aCxizUktLBBKdqNbTxwec4GySJcS+O7iZFhme2ex04m+14KgknKKqFsw==", "dev": true, "requires": { "require-like": "0.1.2", @@ -4946,7 +4946,7 @@ "sax": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz", - "integrity": "sha1-e45lYZCyKOgaZq6nSEgNgozS03o=" + "integrity": "sha512-8I2a3LovHTOpm7NV5yOyO8IHqgVsfK4+UuySrXU8YXkSRX7k6hCV9b3HrkKCr3nMpgj+0bmocaJJWpvp1oc7ZA==" }, "semver": { "version": "6.3.0", @@ -4994,7 +4994,7 @@ "set-blocking": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", - "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=", + "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==", "dev": true }, "setprototypeof": { @@ -5013,7 +5013,7 @@ "shebang-command": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", - "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=", + "integrity": "sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==", "dev": true, "requires": { "shebang-regex": "^1.0.0" @@ -5022,7 +5022,7 @@ "shebang-regex": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", - "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=", + "integrity": "sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ==", "dev": true }, "shimmer": { @@ -5033,7 +5033,7 @@ "signal-exit": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz", - "integrity": "sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0=", + "integrity": "sha512-meQNNykwecVxdu1RlYMKpQx4+wefIYpmxi6gexo/KAbwquJrBUrBmKYJrE8KFkVQAAVWEnwNdu21PgrD77J3xA==", "dev": true }, "sinon": { @@ -5084,7 +5084,7 @@ "is-fullwidth-code-point": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "integrity": "sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w==", "dev": true } } @@ -5146,7 +5146,7 @@ "sprintf-js": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", - "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=", + "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", "dev": true }, "sshpk": { @@ -5168,27 +5168,27 @@ "stack-trace": { "version": "0.0.9", "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.9.tgz", - "integrity": "sha1-qPbq7KkGdMMz58Q5U/J1tFFRBpU=" + "integrity": "sha512-vjUc6sfgtgY0dxCdnc40mK6Oftjo9+2K8H/NG81TMhgL392FtiPA9tn9RLyTxXmTLPJPjF3VyzFp6bsWFLisMQ==" }, "statsd-parser": { "version": "0.0.4", "resolved": "https://registry.npmjs.org/statsd-parser/-/statsd-parser-0.0.4.tgz", - "integrity": "sha1-y9JDlTzELv/VSLXSI4jtaJ7GOb0=" + "integrity": "sha512-7XO+ur89EalMXXFQaydsczB8sclr5nDsNIoUu0IzJx1pIbHUhO3LtpSzBwetIuU9DyTLMiVaJBMtWS/Nb2KR4g==" }, "statuses": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", - "integrity": "sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=" + "integrity": "sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA==" }, "stealthy-require": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/stealthy-require/-/stealthy-require-1.1.1.tgz", - "integrity": "sha1-NbCYdbT/SfJqd35QmzCQoyJr8ks=" + "integrity": "sha512-ZnWpYnYugiOVEY5GkcuJK1io5V8QmNYChG62gSit9pQVGErXtrKuPC55ITaVSukmMta5qpMU7vqLt2Lnni4f/g==" }, "stream-buffers": { "version": "0.2.6", "resolved": "https://registry.npmjs.org/stream-buffers/-/stream-buffers-0.2.6.tgz", - "integrity": "sha1-GBwI1bs2kARfaUAbmuanoM8zE/w=" + "integrity": "sha512-ZRpmWyuCdg0TtNKk8bEqvm13oQvXMmzXDsfD4cBgcx5LouborvU5pm3JMkdTP3HcszyUI08AM1dHMXA5r2g6Sg==" }, "stream-events": { "version": "1.0.5", @@ -5201,7 +5201,7 @@ "stream-meter": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/stream-meter/-/stream-meter-1.0.4.tgz", - "integrity": "sha1-Uq+Vql6nYKJJFxZwTb/5D3Ov3R0=", + "integrity": "sha512-4sOEtrbgFotXwnEuzzsQBYEV1elAeFSO8rSGeTwabuX1RRn/kEq9JVH7I0MRBhKVRR0sJkr0M0QCH7yOLf9fhQ==", "requires": { "readable-stream": "^2.1.4" } @@ -5214,7 +5214,7 @@ "streamifier": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/streamifier/-/streamifier-0.1.1.tgz", - "integrity": "sha1-l+mNj6TRBdYqJpHR3AfoINuN/E8=", + "integrity": "sha512-zDgl+muIlWzXNsXeyUfOk9dChMjlpkq0DRsxujtYPgyJ676yQ8jEm6zzaaWHFDg5BNcLuif0eD2MTyJdZqXpdg==", "dev": true }, "string-width": { @@ -5287,7 +5287,7 @@ "strip-bom": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", - "integrity": "sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=", + "integrity": "sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==", "dev": true }, "strip-json-comments": { @@ -5299,7 +5299,7 @@ "stubs": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz", - "integrity": "sha1-6NK6H6nJBXAwPAMLaQD31fiavls=" + "integrity": "sha512-PdHt7hHUJKxvTCgbKX9C1V/ftOcjJQgz8BZwNfV5c4B6dcGqlpelTbJ999jBGZ2jYiPAwcX5dP6oBwVlBlUbxw==" }, "supports-color": { "version": "5.4.0", @@ -5330,7 +5330,7 @@ "is-fullwidth-code-point": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "integrity": "sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w==", "dev": true }, "string-width": { @@ -5349,7 +5349,7 @@ "tdigest": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/tdigest/-/tdigest-0.1.1.tgz", - "integrity": "sha1-Ljyyw56kSeVdHmzZEReszKRYgCE=", + "integrity": "sha512-CXcDY/NIgIbKZPx5H4JJNpq6JwJhU5Z4+yWj4ZghDc7/9nVajiRlPPyMXRePPPlBfcayUqtoCXjo7/Hm82ecUA==", "requires": { "bintrees": "1.0.1" } @@ -5376,13 +5376,13 @@ "text-table": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", - "integrity": "sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=", + "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==", "dev": true }, "through": { "version": "2.3.8", "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", - "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=" + "integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==" }, "through2": { "version": "3.0.1", @@ -5404,18 +5404,18 @@ "to-fast-properties": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", - "integrity": "sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4=", + "integrity": "sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==", "dev": true }, "to-no-case": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/to-no-case/-/to-no-case-1.0.2.tgz", - "integrity": "sha1-xyKQcWTvaxeBMsjmmTAhLRtKoWo=" + "integrity": "sha512-Z3g735FxuZY8rodxV4gH7LxClE4H0hTIyHNIHdk+vpQxjLm0cwnKXq/OFVZ76SOQmto7txVcwSCwkU5kqp+FKg==" }, "to-snake-case": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/to-snake-case/-/to-snake-case-1.0.0.tgz", - "integrity": "sha1-znRpE4l5RgGah+Yu366upMYIq4w=", + "integrity": "sha512-joRpzBAk1Bhi2eGEYBjukEWHOe/IvclOkiJl3DtA91jV6NwQ3MwXA4FHYeqk8BNp/D8bmi9tcNbRu/SozP0jbQ==", "requires": { "to-space-case": "^1.0.0" } @@ -5423,7 +5423,7 @@ "to-space-case": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/to-space-case/-/to-space-case-1.0.0.tgz", - "integrity": "sha1-sFLar7Gysp3HcM6gFj5ewOvJ/Bc=", + "integrity": "sha512-rLdvwXZ39VOn1IxGL3V6ZstoTbwLRckQmn/U8ZDLuWwIXNpuZDhQ3AiRUlhTbOXFVE9C+dR51wM0CBDhk31VcA==", "requires": { "to-no-case": "^1.0.0" } @@ -5445,7 +5445,7 @@ "punycode": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", - "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4=" + "integrity": "sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ==" } } }, @@ -5458,7 +5458,7 @@ "tunnel-agent": { "version": "0.6.0", "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", - "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", + "integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==", "requires": { "safe-buffer": "^5.0.1" } @@ -5466,12 +5466,12 @@ "tweetnacl": { "version": "0.14.5", "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", - "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=" + "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==" }, "type-check": { "version": "0.3.2", "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", - "integrity": "sha1-WITKtRLPHTVeP7eE8wgEsrUg23I=", + "integrity": "sha512-ZCmOJdvOWDBYJlzAoFkC+Q0+bUyEOS1ltgp1MGU03fqHG+dbi9tBFU2Rd9QKiDZFAYrhPh2JUf7rZRIuHRKtOg==", "dev": true, "requires": { "prelude-ls": "~1.1.2" @@ -5506,12 +5506,12 @@ "underscore": { "version": "1.5.2", "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.5.2.tgz", - "integrity": "sha1-EzXF5PXm0zu7SwBrqMhqAPVW3gg=" + "integrity": "sha512-yejOFsRnTJs0N9CK5Apzf6maDO2djxGoLLrlZlvGs2o9ZQuhIhDL18rtFyy4FBIbOkzA6+4hDgXbgz5EvDQCXQ==" }, "unpipe": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", - "integrity": "sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw=" + "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==" }, "uri-js": { "version": "4.2.2", @@ -5531,7 +5531,7 @@ "url": { "version": "0.10.3", "resolved": "https://registry.npmjs.org/url/-/url-0.10.3.tgz", - "integrity": "sha1-Ah5NnHcF8hu/N9A861h2dAJ3TGQ=", + "integrity": "sha512-hzSUW2q06EqL1gKM/a+obYHLIO6ct2hwPuviqTTOcfFVc61UbfJ2Q32+uGL/HCPxKqrdGB5QUwIe7UqlDgwsOQ==", "requires": { "punycode": "1.3.2", "querystring": "0.2.0" @@ -5540,12 +5540,12 @@ "util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" }, "utils-merge": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", - "integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=" + "integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==" }, "v8-compile-cache": { "version": "2.1.0", @@ -5566,12 +5566,12 @@ "vary": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", - "integrity": "sha1-IpnwLG3tMNSllhsLn3RSShj2NPw=" + "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==" }, "verror": { "version": "1.10.0", "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", - "integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=", + "integrity": "sha512-ZZKSmDAEFOijERBLkmYfJ+vmk3w+7hOLYDNkRCuRuMJGEmqYNCNLyBBFwWKVMhfwaEF3WOd0Zlw86U/WC/+nYw==", "requires": { "assert-plus": "^1.0.0", "core-util-is": "1.0.2", @@ -5601,7 +5601,7 @@ "acorn-jsx": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-3.0.1.tgz", - "integrity": "sha1-r9+UiPsezvyDSPb7IvRk4ypYs2s=", + "integrity": "sha512-AU7pnZkguthwBjKgCg6998ByQNIMjbuDQZ8bb78QAFZwPfmKia8AIzgY/gWgqCjnht8JLdXmB4YxA0KaV60ncQ==", "dev": true, "requires": { "acorn": "^3.0.4" @@ -5610,7 +5610,7 @@ "acorn": { "version": "3.3.0", "resolved": "https://registry.npmjs.org/acorn/-/acorn-3.3.0.tgz", - "integrity": "sha1-ReN/s56No/JbruP/U2niu18iAXo=", + "integrity": "sha512-OLUyIIZ7mF5oaAUT1w0TFqQS81q3saT46x8t7ukpPjMNk+nbs4ZHhs7ToV8EWnLYLepjETXd4XaCE4uxkMeqUw==", "dev": true } } @@ -5669,7 +5669,7 @@ "which-module": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz", - "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=", + "integrity": "sha512-B+enWhmw6cjfVC7kS8Pj9pCrKSc5txArRyaYGe088shv/FGWH+0Rjx/xPgtsWfsUtS27FkP697E4DDhgrgoc0Q==", "dev": true }, "word-wrap": { @@ -5698,7 +5698,7 @@ "is-fullwidth-code-point": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "integrity": "sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w==", "dev": true }, "string-width": { @@ -5717,7 +5717,7 @@ "wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" }, "write": { "version": "1.0.3", @@ -5740,7 +5740,7 @@ "xmlbuilder": { "version": "9.0.7", "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-9.0.7.tgz", - "integrity": "sha1-Ey7mPS7FVlxVfiD0wi35rKaGsQ0=" + "integrity": "sha512-7YXTQc3P2l9+0rjaUbLwMKRhtmwg1M1eDf6nag7urC7pIPYLD9W/jmzQ4ptRSUbodw5S0jfoGTflLemQibSpeQ==" }, "y18n": { "version": "4.0.0", @@ -5789,7 +5789,7 @@ "is-fullwidth-code-point": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "integrity": "sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w==", "dev": true }, "locate-path": { diff --git a/services/filestore/package.json b/services/filestore/package.json index 6f1dde0e8a..4a5d72abb5 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -22,7 +22,7 @@ "dependencies": { "@overleaf/o-error": "^2.1.0", "async": "~0.2.10", - "aws-sdk": "^2.1.39", + "aws-sdk": "^2.628.0", "body-parser": "^1.2.0", "express": "^4.2.0", "fs-extra": "^1.0.0", From 0db4a17a143198c7eb6f90ad9d9815028347c64e Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Fri, 10 Jan 2020 01:13:32 +0100 Subject: [PATCH 440/555] [HealthCheckController] use fs.copyFile instead of fs-extra.copy the fs-extra method has a HUGE overhead of JS code and also syscalls for no particular benefit in this case: just copy the tiny.pdf file. Here is an overview of the major operations: paths are relative to https://github.com/jprichardson/node-fs-extra/blob/1.0.0 We start in /lib/copy/copy.js - sys: check that the source file exists - sys: check that the source file has an existing parent directory?! Continue in /lib/copy/ncp.js - sys: more stat calls on both source and dest to determine permissions - read/write streams to pipe the file content through the process - sys: chmod on the destination to match the source permissions What we actually need is a call to the binding and let node/the os figure out the best way to copy the contents. Signed-off-by: Jakob Ackermann --- .../filestore/app/js/HealthCheckController.js | 4 +-- services/filestore/package-lock.json | 29 ++----------------- services/filestore/package.json | 1 - 3 files changed, 4 insertions(+), 30 deletions(-) diff --git a/services/filestore/app/js/HealthCheckController.js b/services/filestore/app/js/HealthCheckController.js index a52d02a444..0a4b10387e 100644 --- a/services/filestore/app/js/HealthCheckController.js +++ b/services/filestore/app/js/HealthCheckController.js @@ -1,4 +1,4 @@ -const fs = require('fs-extra') +const fs = require('fs') const path = require('path') const Settings = require('settings-sharelatex') const streamBuffers = require('stream-buffers') @@ -6,7 +6,7 @@ const { promisify } = require('util') const Stream = require('stream') const pipeline = promisify(Stream.pipeline) -const fsCopy = promisify(fs.copy) +const fsCopy = promisify(fs.copyFile) const fsUnlink = promisify(fs.unlink) const { HealthCheckError } = require('./Errors') diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index f8b4a70297..2fd623af1c 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -2488,16 +2488,6 @@ "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==" }, - "fs-extra": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-1.0.0.tgz", - "integrity": "sha512-VerQV6vEKuhDWD2HGOybV6v5I73syoc/cXAbKlgTC7M/oFVEtklWlp9QH2Ijw3IaWDOQcMkldSPa7zXy79Z/UQ==", - "requires": { - "graceful-fs": "^4.1.2", - "jsonfile": "^2.1.0", - "klaw": "^1.0.0" - } - }, "fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", @@ -2644,7 +2634,8 @@ "graceful-fs": { "version": "4.2.3", "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.3.tgz", - "integrity": "sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ==" + "integrity": "sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ==", + "dev": true }, "growl": { "version": "1.10.5", @@ -3072,14 +3063,6 @@ "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==" }, - "jsonfile": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-2.4.0.tgz", - "integrity": "sha512-PKllAqbgLgxHaj8TElYymKCAgrASebJrWpTnEkOaTowt23VKXXN0sUeriJ+eh7y6ufb/CC5ap11pz71/cM0hUw==", - "requires": { - "graceful-fs": "^4.1.6" - } - }, "jsprim": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", @@ -3116,14 +3099,6 @@ "safe-buffer": "^5.0.1" } }, - "klaw": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/klaw/-/klaw-1.3.1.tgz", - "integrity": "sha512-TED5xi9gGQjGpNnvRWknrwAB1eL5GciPfVFOt3Vk1OJCVDQbzuSfrF3hkUQKlsgKrG1F+0t5W0m+Fje1jIt8rw==", - "requires": { - "graceful-fs": "^4.1.9" - } - }, "levn": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", diff --git a/services/filestore/package.json b/services/filestore/package.json index 4a5d72abb5..45d7eed1c3 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -25,7 +25,6 @@ "aws-sdk": "^2.628.0", "body-parser": "^1.2.0", "express": "^4.2.0", - "fs-extra": "^1.0.0", "glob": "^7.1.6", "heapdump": "^0.3.2", "logger-sharelatex": "^1.7.0", From 32557ab1d7fe941a54a99afe4acd95dd600ce5f2 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Fri, 10 Jan 2020 10:18:55 +0000 Subject: [PATCH 441/555] [SafeExec] replace _.once with lodash.once --- services/filestore/app/js/SafeExec.js | 4 ++-- services/filestore/package-lock.json | 10 +++++----- services/filestore/package.json | 4 ++-- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/services/filestore/app/js/SafeExec.js b/services/filestore/app/js/SafeExec.js index 5ed0f18425..a9d1398441 100644 --- a/services/filestore/app/js/SafeExec.js +++ b/services/filestore/app/js/SafeExec.js @@ -1,4 +1,4 @@ -const _ = require('underscore') +const lodashOnce = require('lodash.once') const childProcess = require('child_process') const Settings = require('settings-sharelatex') const { ConversionsDisabledError, FailedCommandError } = require('./Errors') @@ -28,7 +28,7 @@ function safeExec(command, options, callback) { let killTimer - const cleanup = _.once(function(err) { + const cleanup = lodashOnce(function(err) { if (killTimer) { clearTimeout(killTimer) } diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index 2fd623af1c..6356c89d30 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -3177,6 +3177,11 @@ "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", "dev": true }, + "lodash.once": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz", + "integrity": "sha1-DdOXEhPHxW34gJd9UEyI+0cal6w=" + }, "lodash.pickby": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/lodash.pickby/-/lodash.pickby-4.6.0.tgz", @@ -5478,11 +5483,6 @@ "integrity": "sha512-EgOVgL/4xfVrCMbhYKUQTdF37SQn4Iw73H5BgCrF1Abdun7Kwy/QZsE/ssAy0y4LxBbvua3PIbFsbRczWWnDdQ==", "dev": true }, - "underscore": { - "version": "1.5.2", - "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.5.2.tgz", - "integrity": "sha512-yejOFsRnTJs0N9CK5Apzf6maDO2djxGoLLrlZlvGs2o9ZQuhIhDL18rtFyy4FBIbOkzA6+4hDgXbgz5EvDQCXQ==" - }, "unpipe": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", diff --git a/services/filestore/package.json b/services/filestore/package.json index 45d7eed1c3..ad7fd42bb6 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -27,6 +27,7 @@ "express": "^4.2.0", "glob": "^7.1.6", "heapdump": "^0.3.2", + "lodash.once": "^4.1.1", "logger-sharelatex": "^1.7.0", "metrics-sharelatex": "^2.2.0", "mocha": "5.2.0", @@ -38,8 +39,7 @@ "rimraf": "2.2.8", "settings-sharelatex": "^1.1.0", "stream-buffers": "~0.2.5", - "stream-meter": "^1.0.4", - "underscore": "~1.5.2" + "stream-meter": "^1.0.4" }, "devDependencies": { "babel-eslint": "^10.0.3", From 6f27f7a1bffcb5dde5da4adb57091038692d543a Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Sun, 23 Feb 2020 16:27:55 +0100 Subject: [PATCH 442/555] [misc] drop the /heapdump route and related heapdump package --- services/filestore/README.md | 1 - services/filestore/app.js | 12 ------------ services/filestore/package-lock.json | 8 -------- services/filestore/package.json | 1 - 4 files changed, 22 deletions(-) diff --git a/services/filestore/README.md b/services/filestore/README.md index 2772b71494..3ee6cadff6 100644 --- a/services/filestore/README.md +++ b/services/filestore/README.md @@ -10,7 +10,6 @@ filestore acts as a proxy between the CLSIs and (currently) Amazon S3 storage, p * `/project/:project_id/public/:public_file_id` * `/project/:project_id/size` * `/bucket/:bucket/key/*` -* `/heapdump` * `/shutdown` * `/status` - returns `filestore sharelatex up` or `server is being shut down` (HTTP 500) * `/health_check` diff --git a/services/filestore/app.js b/services/filestore/app.js index 278997b9aa..ea2c2ca1d8 100644 --- a/services/filestore/app.js +++ b/services/filestore/app.js @@ -122,18 +122,6 @@ app.get( fileController.getFile ) -app.get('/heapdump', (req, res, next) => - require('heapdump').writeSnapshot( - '/tmp/' + Date.now() + '.filestore.heapsnapshot', - (err, filename) => { - if (err) { - return next(err) - } - res.send(filename) - } - ) -) - app.get('/status', function(req, res) { res.send('filestore sharelatex up') }) diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index 6356c89d30..fa85d92a20 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -2716,14 +2716,6 @@ "resolved": "https://registry.npmjs.org/he/-/he-1.1.1.tgz", "integrity": "sha512-z/GDPjlRMNOa2XJiB4em8wJpuuBfrFOlYKTZxtpkdr1uPdibHI8rYA3MY0KDObpVyaes0e/aunid/t88ZI2EKA==" }, - "heapdump": { - "version": "0.3.15", - "resolved": "https://registry.npmjs.org/heapdump/-/heapdump-0.3.15.tgz", - "integrity": "sha512-n8aSFscI9r3gfhOcAECAtXFaQ1uy4QSke6bnaL+iymYZ/dWs9cqDqHM+rALfsHUwukUbxsdlECZ0pKmJdQ/4OA==", - "requires": { - "nan": "^2.13.2" - } - }, "hex2dec": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/hex2dec/-/hex2dec-1.1.2.tgz", diff --git a/services/filestore/package.json b/services/filestore/package.json index ad7fd42bb6..c40f2a65f6 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -26,7 +26,6 @@ "body-parser": "^1.2.0", "express": "^4.2.0", "glob": "^7.1.6", - "heapdump": "^0.3.2", "lodash.once": "^4.1.1", "logger-sharelatex": "^1.7.0", "metrics-sharelatex": "^2.2.0", From 3c61e53918026545e1313dfa5cb590dde6c5090a Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Sun, 23 Feb 2020 18:41:14 +0000 Subject: [PATCH 443/555] [misc] move mocha to the devDependencies --- services/filestore/package-lock.json | 25 ++++++++++++++++++------- services/filestore/package.json | 2 +- 2 files changed, 19 insertions(+), 8 deletions(-) diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index fa85d92a20..12b57d5975 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -1409,7 +1409,8 @@ "browser-stdout": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", - "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==" + "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", + "dev": true }, "buffer": { "version": "4.9.1", @@ -1603,7 +1604,8 @@ "commander": { "version": "2.15.1", "resolved": "https://registry.npmjs.org/commander/-/commander-2.15.1.tgz", - "integrity": "sha512-VlfT9F3V0v+jr4yxPc5gg9s62/fIVWsd2Bk2iD435um1NlGMYdVCq+MjcXnhYq2icNOizHr1kK+5TI6H0Hy0ag==" + "integrity": "sha512-VlfT9F3V0v+jr4yxPc5gg9s62/fIVWsd2Bk2iD435um1NlGMYdVCq+MjcXnhYq2icNOizHr1kK+5TI6H0Hy0ag==", + "dev": true }, "common-tags": { "version": "1.8.0", @@ -1765,7 +1767,8 @@ "diff": { "version": "3.5.0", "resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz", - "integrity": "sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA==" + "integrity": "sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA==", + "dev": true }, "disrequire": { "version": "1.1.0", @@ -1930,7 +1933,8 @@ "escape-string-regexp": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==" + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "dev": true }, "eslint": { "version": "6.8.0", @@ -2640,7 +2644,8 @@ "growl": { "version": "1.10.5", "resolved": "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz", - "integrity": "sha512-qBr4OuELkhPenW6goKVXiv47US3clb3/IbuWF9KNKEijAy9oeHxU9IgzjvJhHkUzhaj7rOUD7+YGWqUjLp5oSA==" + "integrity": "sha512-qBr4OuELkhPenW6goKVXiv47US3clb3/IbuWF9KNKEijAy9oeHxU9IgzjvJhHkUzhaj7rOUD7+YGWqUjLp5oSA==", + "dev": true }, "gtoken": { "version": "4.1.4", @@ -2703,7 +2708,8 @@ "has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==" + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "dev": true }, "has-symbols": { "version": "1.0.1", @@ -2714,7 +2720,8 @@ "he": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/he/-/he-1.1.1.tgz", - "integrity": "sha512-z/GDPjlRMNOa2XJiB4em8wJpuuBfrFOlYKTZxtpkdr1uPdibHI8rYA3MY0KDObpVyaes0e/aunid/t88ZI2EKA==" + "integrity": "sha512-z/GDPjlRMNOa2XJiB4em8wJpuuBfrFOlYKTZxtpkdr1uPdibHI8rYA3MY0KDObpVyaes0e/aunid/t88ZI2EKA==", + "dev": true }, "hex2dec": { "version": "1.1.2", @@ -3470,6 +3477,7 @@ "version": "5.2.0", "resolved": "https://registry.npmjs.org/mocha/-/mocha-5.2.0.tgz", "integrity": "sha512-2IUgKDhc3J7Uug+FxMXuqIyYzH7gJjXECKe/w43IGgQHTSj3InJi+yAA7T24L9bQMRKiUEHxEX37G5JpVUGLcQ==", + "dev": true, "requires": { "browser-stdout": "1.3.1", "commander": "2.15.1", @@ -3488,6 +3496,7 @@ "version": "3.1.0", "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz", "integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==", + "dev": true, "requires": { "ms": "2.0.0" } @@ -3496,6 +3505,7 @@ "version": "7.1.2", "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.2.tgz", "integrity": "sha512-MJTUg1kjuLeQCJ+ccE4Vpa6kKVXkPYJ2mOCQyUuKLcLQsdrMCpBPUi8qVE6+YuaJkozeA9NusTAw3hLr8Xe5EQ==", + "dev": true, "requires": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", @@ -5277,6 +5287,7 @@ "version": "5.4.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.4.0.tgz", "integrity": "sha512-zjaXglF5nnWpsq470jSv6P9DwPvgLkuapYmfDm3JWOm0vkNTVF2tI4UrN2r6jH1qM/uc/WtxYY1hYoA2dOKj5w==", + "dev": true, "requires": { "has-flag": "^3.0.0" } diff --git a/services/filestore/package.json b/services/filestore/package.json index c40f2a65f6..509e4f962a 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -29,7 +29,6 @@ "lodash.once": "^4.1.1", "logger-sharelatex": "^1.7.0", "metrics-sharelatex": "^2.2.0", - "mocha": "5.2.0", "node-uuid": "~1.4.1", "pngcrush": "0.0.3", "range-parser": "^1.0.2", @@ -57,6 +56,7 @@ "eslint-plugin-prettier": "^3.1.2", "eslint-plugin-promise": "^4.2.1", "eslint-plugin-standard": "^4.0.1", + "mocha": "5.2.0", "prettier-eslint": "^9.0.1", "prettier-eslint-cli": "^5.0.0", "sandboxed-module": "2.0.3", From 4b7c6fafd2d053c689747980d3b6c915bbcc399d Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Sun, 23 Feb 2020 21:51:14 +0000 Subject: [PATCH 444/555] [misc] drop unused dependency pngcrush --- services/filestore/package-lock.json | 13 ------------- services/filestore/package.json | 1 - 2 files changed, 14 deletions(-) diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index 12b57d5975..c3ed1ad560 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -2556,11 +2556,6 @@ "assert-plus": "^1.0.0" } }, - "gettemporaryfilepath": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/gettemporaryfilepath/-/gettemporaryfilepath-0.0.1.tgz", - "integrity": "sha512-7avwQWP8MP42u7mtc+KjCRuUE3nafRJPuGaZaySD9NN1KEbfVTfSAywP4KOkK8gaxhdOxx11ZTWH28DwjAF70Q==" - }, "glob": { "version": "7.1.6", "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", @@ -3909,14 +3904,6 @@ "find-up": "^2.1.0" } }, - "pngcrush": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/pngcrush/-/pngcrush-0.0.3.tgz", - "integrity": "sha512-RVaPWGv0PUUzGeSQJHH78rw2ks8NxKbFn8uENFM+/3bfsUs39MaFDG+eul5902gH97zZLQ0zd0h2yb0YBaMKDw==", - "requires": { - "gettemporaryfilepath": "=0.0.1" - } - }, "prelude-ls": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", diff --git a/services/filestore/package.json b/services/filestore/package.json index 509e4f962a..04774dc5e4 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -30,7 +30,6 @@ "logger-sharelatex": "^1.7.0", "metrics-sharelatex": "^2.2.0", "node-uuid": "~1.4.1", - "pngcrush": "0.0.3", "range-parser": "^1.0.2", "request": "^2.88.0", "request-promise-native": "^1.0.8", From 0312b00150dc919fd0ac555dff14a8e97e186cee Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 2 Mar 2020 16:31:35 +0000 Subject: [PATCH 445/555] Fix prettier task --- services/filestore/package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/filestore/package.json b/services/filestore/package.json index 4a5d72abb5..920347c50d 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -14,8 +14,8 @@ "start": "node $NODE_APP_OPTIONS app.js", "nodemon": "nodemon --config nodemon.json", "lint": "node_modules/.bin/eslint .", - "format": "node_modules/.bin/prettier-eslint '**/*.js' --list-different", - "format:fix": "node_modules/.bin/prettier-eslint '**/*.js' --write", + "format": "node_modules/.bin/prettier-eslint $PWD'/**/*.js' --list-different", + "format:fix": "node_modules/.bin/prettier-eslint $PWD'/**/*.js' --write", "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", "test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js" }, From 1276fb426af6a3cd2b0a873cda6f5fbbb44f22a7 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Mon, 2 Mar 2020 17:50:45 +0100 Subject: [PATCH 446/555] [misc] bump buildscript version to 1.3.6 --- services/filestore/.eslintrc | 2 +- services/filestore/.prettierrc | 2 +- services/filestore/Dockerfile | 2 +- services/filestore/Makefile | 2 +- services/filestore/buildscript.txt | 2 +- services/filestore/docker-compose.ci.yml | 2 +- services/filestore/docker-compose.yml | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/services/filestore/.eslintrc b/services/filestore/.eslintrc index 73103de7f6..50dd4050bf 100644 --- a/services/filestore/.eslintrc +++ b/services/filestore/.eslintrc @@ -1,7 +1,7 @@ // this file was auto-generated, do not edit it directly. // instead run bin/update_build_scripts from // https://github.com/sharelatex/sharelatex-dev-environment -// Version: 1.3.5 +// Version: 1.3.6 { "extends": [ "standard", diff --git a/services/filestore/.prettierrc b/services/filestore/.prettierrc index 5845b82113..b5f22cc658 100644 --- a/services/filestore/.prettierrc +++ b/services/filestore/.prettierrc @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.3.5 +# Version: 1.3.6 { "semi": false, "singleQuote": true diff --git a/services/filestore/Dockerfile b/services/filestore/Dockerfile index c4a7b37f9a..91ec6303ec 100644 --- a/services/filestore/Dockerfile +++ b/services/filestore/Dockerfile @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.3.5 +# Version: 1.3.6 FROM node:10.19.0 as base diff --git a/services/filestore/Makefile b/services/filestore/Makefile index 86514a2121..ec1324e9a8 100644 --- a/services/filestore/Makefile +++ b/services/filestore/Makefile @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.3.5 +# Version: 1.3.6 BUILD_NUMBER ?= local BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) diff --git a/services/filestore/buildscript.txt b/services/filestore/buildscript.txt index 75478ce00e..cad06f2700 100644 --- a/services/filestore/buildscript.txt +++ b/services/filestore/buildscript.txt @@ -8,4 +8,4 @@ filestore --docker-repos=gcr.io/overleaf-ops --env-pass-through= --data-dirs=uploads,user_files,template_files ---script-version=1.3.5 +--script-version=1.3.6 diff --git a/services/filestore/docker-compose.ci.yml b/services/filestore/docker-compose.ci.yml index 38ee4d81f4..824ba815c0 100644 --- a/services/filestore/docker-compose.ci.yml +++ b/services/filestore/docker-compose.ci.yml @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.3.5 +# Version: 1.3.6 version: "2.3" diff --git a/services/filestore/docker-compose.yml b/services/filestore/docker-compose.yml index 40984ea078..c2634432ba 100644 --- a/services/filestore/docker-compose.yml +++ b/services/filestore/docker-compose.yml @@ -1,7 +1,7 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.3.5 +# Version: 1.3.6 version: "2.3" From ada6ac0764dd13d732901ed69fea734f5ffa42ac Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Mon, 2 Mar 2020 18:12:55 +0100 Subject: [PATCH 447/555] [misc] revert eslint rules --- services/filestore/.eslintrc | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/services/filestore/.eslintrc b/services/filestore/.eslintrc index 50dd4050bf..7cc2ffbf8a 100644 --- a/services/filestore/.eslintrc +++ b/services/filestore/.eslintrc @@ -23,8 +23,7 @@ "rules": { // Swap the no-unused-expressions rule with a more chai-friendly one "no-unused-expressions": 0, - "chai-friendly/no-unused-expressions": "error", - "no-console": "error" + "chai-friendly/no-unused-expressions": "error" }, "overrides": [ { From 2b1572965845d1f827d17def7d720ed22ac49e3d Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Sun, 23 Feb 2020 22:34:40 +0000 Subject: [PATCH 448/555] [misc] promisify FileHandler and remove dependency on async Signed-off-by: Jakob Ackermann --- services/filestore/app/js/FileHandler.js | 253 ++++++++---------- services/filestore/package-lock.json | 5 - services/filestore/package.json | 1 - .../test/unit/js/FileHandlerTests.js | 108 +++++--- 4 files changed, 180 insertions(+), 187 deletions(-) diff --git a/services/filestore/app/js/FileHandler.js b/services/filestore/app/js/FileHandler.js index 3c5b50e693..02831fa3d0 100644 --- a/services/filestore/app/js/FileHandler.js +++ b/services/filestore/app/js/FileHandler.js @@ -1,176 +1,153 @@ -const { promisify } = require('util') +const { callbackify } = require('util') const fs = require('fs') const PersistorManager = require('./PersistorManager') const LocalFileWriter = require('./LocalFileWriter') const FileConverter = require('./FileConverter') const KeyBuilder = require('./KeyBuilder') -const async = require('async') const ImageOptimiser = require('./ImageOptimiser') const { ConversionError } = require('./Errors') module.exports = { - insertFile, - deleteFile, - getFile, - getFileSize, - getDirectorySize, + insertFile: callbackify(insertFile), + deleteFile: callbackify(deleteFile), + getFile: callbackify(getFile), + getFileSize: callbackify(getFileSize), + getDirectorySize: callbackify(getDirectorySize), promises: { - getFile: promisify(getFile), - insertFile: promisify(insertFile), - deleteFile: promisify(deleteFile), - getFileSize: promisify(getFileSize), - getDirectorySize: promisify(getDirectorySize) + getFile, + insertFile, + deleteFile, + getFileSize, + getDirectorySize } } -function insertFile(bucket, key, stream, callback) { +async function insertFile(bucket, key, stream) { const convertedKey = KeyBuilder.getConvertedFolderKey(key) - PersistorManager.deleteDirectory(bucket, convertedKey, function(error) { - if (error) { - return callback(error) - } - PersistorManager.sendStream(bucket, key, stream, callback) - }) + await PersistorManager.promises.deleteDirectory(bucket, convertedKey) + await PersistorManager.promises.sendStream(bucket, key, stream) } -function deleteFile(bucket, key, callback) { +async function deleteFile(bucket, key) { const convertedKey = KeyBuilder.getConvertedFolderKey(key) - async.parallel( - [ - done => PersistorManager.deleteFile(bucket, key, done), - done => PersistorManager.deleteDirectory(bucket, convertedKey, done) - ], - callback - ) + await Promise.all([ + PersistorManager.promises.deleteFile(bucket, key), + PersistorManager.promises.deleteDirectory(bucket, convertedKey) + ]) } -function getFile(bucket, key, opts, callback) { +async function getFile(bucket, key, opts) { opts = opts || {} if (!opts.format && !opts.style) { - PersistorManager.getFileStream(bucket, key, opts, callback) + return PersistorManager.promises.getFileStream(bucket, key, opts) } else { - _getConvertedFile(bucket, key, opts, callback) + return _getConvertedFile(bucket, key, opts) } } -function getFileSize(bucket, key, callback) { - PersistorManager.getFileSize(bucket, key, callback) +async function getFileSize(bucket, key) { + return PersistorManager.promises.getFileSize(bucket, key) } -function getDirectorySize(bucket, projectId, callback) { - PersistorManager.directorySize(bucket, projectId, callback) +async function getDirectorySize(bucket, projectId) { + return PersistorManager.promises.directorySize(bucket, projectId) } -function _getConvertedFile(bucket, key, opts, callback) { +async function _getConvertedFile(bucket, key, opts) { const convertedKey = KeyBuilder.addCachingToKey(key, opts) - PersistorManager.checkIfFileExists(bucket, convertedKey, (err, exists) => { - if (err) { - return callback(err) - } - - if (exists) { - PersistorManager.getFileStream(bucket, convertedKey, opts, callback) - } else { - _getConvertedFileAndCache(bucket, key, convertedKey, opts, callback) - } - }) -} - -function _getConvertedFileAndCache(bucket, key, convertedKey, opts, callback) { - let convertedFsPath - - async.series( - [ - cb => { - _convertFile(bucket, key, opts, function(err, fileSystemPath) { - convertedFsPath = fileSystemPath - cb(err) - }) - }, - cb => ImageOptimiser.compressPng(convertedFsPath, cb), - cb => PersistorManager.sendFile(bucket, convertedKey, convertedFsPath, cb) - ], - function(err) { - if (err) { - LocalFileWriter.deleteFile(convertedFsPath, function() {}) - return callback( - new ConversionError({ - message: 'failed to convert file', - info: { opts, bucket, key, convertedKey } - }).withCause(err) - ) - } - // Send back the converted file from the local copy to avoid problems - // with the file not being present in S3 yet. As described in the - // documentation below, we have already made a 'HEAD' request in - // checkIfFileExists so we only have "eventual consistency" if we try - // to stream it from S3 here. This was a cause of many 403 errors. - // - // "Amazon S3 provides read-after-write consistency for PUTS of new - // objects in your S3 bucket in all regions with one caveat. The - // caveat is that if you make a HEAD or GET request to the key name - // (to find if the object exists) before creating the object, Amazon - // S3 provides eventual consistency for read-after-write."" - // https://docs.aws.amazon.com/AmazonS3/latest/dev/Introduction.html#ConsistencyModel - const readStream = fs.createReadStream(convertedFsPath) - readStream.on('end', function() { - LocalFileWriter.deleteFile(convertedFsPath, function() {}) - }) - callback(null, readStream) - } + const exists = await PersistorManager.promises.checkIfFileExists( + bucket, + convertedKey ) + if (exists) { + return PersistorManager.promises.getFileStream(bucket, convertedKey, opts) + } else { + return _getConvertedFileAndCache(bucket, key, convertedKey, opts) + } } -function _convertFile(bucket, originalKey, opts, callback) { - _writeFileToDisk(bucket, originalKey, opts, function(err, originalFsPath) { - if (err) { - return callback( - new ConversionError({ - message: 'unable to write file to disk', - info: { bucket, originalKey, opts } - }).withCause(err) - ) - } +async function _getConvertedFileAndCache(bucket, key, convertedKey, opts) { + let convertedFsPath + try { + convertedFsPath = await _convertFile(bucket, key, opts) + await ImageOptimiser.promises.compressPng(convertedFsPath) + await PersistorManager.promises.sendFile( + bucket, + convertedKey, + convertedFsPath + ) + } catch (err) { + LocalFileWriter.deleteFile(convertedFsPath, () => {}) + throw new ConversionError({ + message: 'failed to convert file', + info: { opts, bucket, key, convertedKey } + }).withCause(err) + } + // Send back the converted file from the local copy to avoid problems + // with the file not being present in S3 yet. As described in the + // documentation below, we have already made a 'HEAD' request in + // checkIfFileExists so we only have "eventual consistency" if we try + // to stream it from S3 here. This was a cause of many 403 errors. + // + // "Amazon S3 provides read-after-write consistency for PUTS of new + // objects in your S3 bucket in all regions with one caveat. The + // caveat is that if you make a HEAD or GET request to the key name + // (to find if the object exists) before creating the object, Amazon + // S3 provides eventual consistency for read-after-write."" + // https://docs.aws.amazon.com/AmazonS3/latest/dev/Introduction.html#ConsistencyModel + const readStream = fs.createReadStream(convertedFsPath) + readStream.on('end', function() { + LocalFileWriter.deleteFile(convertedFsPath, function() {}) + }) + return readStream +} - const done = function(err, destPath) { - if (err) { - return callback( - new ConversionError({ - message: 'error converting file', - info: { bucket, originalKey, opts } - }).withCause(err) - ) +async function _convertFile(bucket, originalKey, opts) { + let originalFsPath + try { + originalFsPath = await _writeFileToDisk(bucket, originalKey, opts) + } catch (err) { + throw new ConversionError({ + message: 'unable to write file to disk', + info: { bucket, originalKey, opts } + }).withCause(err) + } + + let promise + if (opts.format) { + promise = FileConverter.promises.convert(originalFsPath, opts.format) + } else if (opts.style === 'thumbnail') { + promise = FileConverter.promises.thumbnail(originalFsPath) + } else if (opts.style === 'preview') { + promise = FileConverter.promises.preview(originalFsPath) + } else { + throw new ConversionError({ + message: 'invalid file conversion options', + info: { + bucket, + originalKey, + opts } - LocalFileWriter.deleteFile(originalFsPath, function() {}) - callback(err, destPath) - } - - if (opts.format) { - FileConverter.convert(originalFsPath, opts.format, done) - } else if (opts.style === 'thumbnail') { - FileConverter.thumbnail(originalFsPath, done) - } else if (opts.style === 'preview') { - FileConverter.preview(originalFsPath, done) - } else { - callback( - new ConversionError({ - message: 'invalid file conversion options', - info: { - bucket, - originalKey, - opts - } - }) - ) - } - }) + }) + } + let destPath + try { + destPath = await promise + } catch (err) { + throw new ConversionError({ + message: 'error converting file', + info: { bucket, originalKey, opts } + }).withCause(err) + } + LocalFileWriter.deleteFile(originalFsPath, function() {}) + return destPath } -function _writeFileToDisk(bucket, key, opts, callback) { - PersistorManager.getFileStream(bucket, key, opts, function(err, fileStream) { - if (err) { - return callback(err) - } - LocalFileWriter.writeStream(fileStream, key, callback) - }) +async function _writeFileToDisk(bucket, key, opts) { + const fileStream = await PersistorManager.promises.getFileStream( + bucket, + key, + opts + ) + return LocalFileWriter.promises.writeStream(fileStream, key) } diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index f8b4a70297..132250950e 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -1256,11 +1256,6 @@ "integrity": "sha512-+Ryf6g3BKoRc7jfp7ad8tM4TtMiaWvbF/1/sQcZPkkS7ag3D5nMBCe2UfOTONtAkaG0tO0ij3C5Lwmf1EiyjHg==", "dev": true }, - "async": { - "version": "0.2.10", - "resolved": "https://registry.npmjs.org/async/-/async-0.2.10.tgz", - "integrity": "sha512-eAkdoKxU6/LkKDBzLpT+t6Ff5EtfSF4wx1WfJiPEEV7WNLnDaRXk0oVysiEPm262roaachGexwUv94WhSgN5TQ==" - }, "async-listener": { "version": "0.6.10", "resolved": "https://registry.npmjs.org/async-listener/-/async-listener-0.6.10.tgz", diff --git a/services/filestore/package.json b/services/filestore/package.json index 4a5d72abb5..1c77eea173 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -21,7 +21,6 @@ }, "dependencies": { "@overleaf/o-error": "^2.1.0", - "async": "~0.2.10", "aws-sdk": "^2.628.0", "body-parser": "^1.2.0", "express": "^4.2.0", diff --git a/services/filestore/test/unit/js/FileHandlerTests.js b/services/filestore/test/unit/js/FileHandlerTests.js index 771ff998eb..623ed440b0 100644 --- a/services/filestore/test/unit/js/FileHandlerTests.js +++ b/services/filestore/test/unit/js/FileHandlerTests.js @@ -4,6 +4,9 @@ const { expect } = chai const modulePath = '../../../app/js/FileHandler.js' const SandboxedModule = require('sandboxed-module') +chai.use(require('sinon-chai')) +chai.use(require('chai-as-promised')) + describe('FileHandler', function() { let PersistorManager, LocalFileWriter, @@ -32,29 +35,41 @@ describe('FileHandler', function() { beforeEach(function() { PersistorManager = { - getFileStream: sinon.stub().yields(null, sourceStream), - checkIfFileExists: sinon.stub().yields(), - deleteFile: sinon.stub().yields(), - deleteDirectory: sinon.stub().yields(), - sendStream: sinon.stub().yields(), - insertFile: sinon.stub().yields(), - sendFile: sinon.stub().yields(), - directorySize: sinon.stub().yields() + promises: { + getFileStream: sinon.stub().resolves(sourceStream), + checkIfFileExists: sinon.stub().resolves(), + deleteFile: sinon.stub().resolves(), + deleteDirectory: sinon.stub().resolves(), + sendStream: sinon.stub().resolves(), + insertFile: sinon.stub().resolves(), + sendFile: sinon.stub().resolves(), + directorySize: sinon.stub().resolves() + } } LocalFileWriter = { - writeStream: sinon.stub().yields(), - deleteFile: sinon.stub().yields() + // the callback style is used for detached cleanup calls + deleteFile: sinon.stub().yields(), + promises: { + writeStream: sinon.stub().resolves(), + deleteFile: sinon.stub().resolves() + } } FileConverter = { - convert: sinon.stub().yields(), - thumbnail: sinon.stub().yields(), - preview: sinon.stub().yields() + promises: { + convert: sinon.stub().resolves(), + thumbnail: sinon.stub().resolves(), + preview: sinon.stub().resolves() + } } KeyBuilder = { addCachingToKey: sinon.stub().returns(convertedKey), getConvertedFolderKey: sinon.stub().returns(convertedFolderKey) } - ImageOptimiser = { compressPng: sinon.stub().yields() } + ImageOptimiser = { + promises: { + compressPng: sinon.stub().resolves() + } + } fs = { createReadStream: sinon.stub().returns(readStream) } @@ -79,7 +94,7 @@ describe('FileHandler', function() { it('should send file to the filestore', function(done) { FileHandler.insertFile(bucket, key, stream, err => { expect(err).not.to.exist - expect(PersistorManager.sendStream).to.have.been.calledWith( + expect(PersistorManager.promises.sendStream).to.have.been.calledWith( bucket, key, stream @@ -91,10 +106,9 @@ describe('FileHandler', function() { it('should delete the convertedKey folder', function(done) { FileHandler.insertFile(bucket, key, stream, err => { expect(err).not.to.exist - expect(PersistorManager.deleteDirectory).to.have.been.calledWith( - bucket, - convertedFolderKey - ) + expect( + PersistorManager.promises.deleteDirectory + ).to.have.been.calledWith(bucket, convertedFolderKey) done() }) }) @@ -104,7 +118,10 @@ describe('FileHandler', function() { it('should tell the filestore manager to delete the file', function(done) { FileHandler.deleteFile(bucket, key, err => { expect(err).not.to.exist - expect(PersistorManager.deleteFile).to.have.been.calledWith(bucket, key) + expect(PersistorManager.promises.deleteFile).to.have.been.calledWith( + bucket, + key + ) done() }) }) @@ -112,10 +129,9 @@ describe('FileHandler', function() { it('should tell the filestore manager to delete the cached folder', function(done) { FileHandler.deleteFile(bucket, key, err => { expect(err).not.to.exist - expect(PersistorManager.deleteDirectory).to.have.been.calledWith( - bucket, - convertedFolderKey - ) + expect( + PersistorManager.promises.deleteDirectory + ).to.have.been.calledWith(bucket, convertedFolderKey) done() }) }) @@ -134,7 +150,7 @@ describe('FileHandler', function() { const options = { start: 0, end: 8 } FileHandler.getFile(bucket, key, options, err => { expect(err).not.to.exist - expect(PersistorManager.getFileStream).to.have.been.calledWith( + expect(PersistorManager.promises.getFileStream).to.have.been.calledWith( bucket, key, options @@ -155,23 +171,27 @@ describe('FileHandler', function() { }) it('should convert the file', function() { - expect(FileConverter.convert).to.have.been.called - expect(ImageOptimiser.compressPng).to.have.been.called + expect(FileConverter.promises.convert).to.have.been.called + }) + + it('should compress the converted file', function() { + expect(ImageOptimiser.promises.compressPng).to.have.been.called }) it('should return the the converted stream', function() { expect(result.err).not.to.exist expect(result.stream).to.equal(readStream) - expect(PersistorManager.getFileStream).to.have.been.calledWith( - bucket, - key - ) + expect( + PersistorManager.promises.getFileStream + ).to.have.been.calledWith(bucket, key) }) }) describe('when the file is cached', function() { beforeEach(function(done) { - PersistorManager.checkIfFileExists = sinon.stub().yields(null, true) + PersistorManager.promises.checkIfFileExists = sinon + .stub() + .resolves(true) FileHandler.getFile(bucket, key, { format: 'png' }, (err, stream) => { result = { err, stream } done() @@ -179,17 +199,19 @@ describe('FileHandler', function() { }) it('should not convert the file', function() { - expect(FileConverter.convert).not.to.have.been.called - expect(ImageOptimiser.compressPng).not.to.have.been.called + expect(FileConverter.promises.convert).not.to.have.been.called + }) + + it('should not compress the converted file again', function() { + expect(ImageOptimiser.promises.compressPng).not.to.have.been.called }) it('should return the cached stream', function() { expect(result.err).not.to.exist expect(result.stream).to.equal(sourceStream) - expect(PersistorManager.getFileStream).to.have.been.calledWith( - bucket, - convertedKey - ) + expect( + PersistorManager.promises.getFileStream + ).to.have.been.calledWith(bucket, convertedKey) }) }) }) @@ -198,8 +220,8 @@ describe('FileHandler', function() { it('generates a thumbnail when requested', function(done) { FileHandler.getFile(bucket, key, { style: 'thumbnail' }, err => { expect(err).not.to.exist - expect(FileConverter.thumbnail).to.have.been.called - expect(FileConverter.preview).not.to.have.been.called + expect(FileConverter.promises.thumbnail).to.have.been.called + expect(FileConverter.promises.preview).not.to.have.been.called done() }) }) @@ -207,8 +229,8 @@ describe('FileHandler', function() { it('generates a preview when requested', function(done) { FileHandler.getFile(bucket, key, { style: 'preview' }, err => { expect(err).not.to.exist - expect(FileConverter.thumbnail).not.to.have.been.called - expect(FileConverter.preview).to.have.been.called + expect(FileConverter.promises.thumbnail).not.to.have.been.called + expect(FileConverter.promises.preview).to.have.been.called done() }) }) @@ -219,7 +241,7 @@ describe('FileHandler', function() { it('should call the filestore manager to get directory size', function(done) { FileHandler.getDirectorySize(bucket, key, err => { expect(err).not.to.exist - expect(PersistorManager.directorySize).to.have.been.calledWith( + expect(PersistorManager.promises.directorySize).to.have.been.calledWith( bucket, key ) From a3782422b75557d157e5a638348c0f14668357b0 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Wed, 4 Mar 2020 10:06:18 +0000 Subject: [PATCH 449/555] Upgrade to node 12 --- services/filestore/.nvmrc | 2 +- services/filestore/Dockerfile | 2 +- services/filestore/buildscript.txt | 10 +++++----- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/services/filestore/.nvmrc b/services/filestore/.nvmrc index 5b7269c0a9..66df3b7ab2 100644 --- a/services/filestore/.nvmrc +++ b/services/filestore/.nvmrc @@ -1 +1 @@ -10.19.0 +12.16.1 diff --git a/services/filestore/Dockerfile b/services/filestore/Dockerfile index 91ec6303ec..a652abb47c 100644 --- a/services/filestore/Dockerfile +++ b/services/filestore/Dockerfile @@ -3,7 +3,7 @@ # https://github.com/sharelatex/sharelatex-dev-environment # Version: 1.3.6 -FROM node:10.19.0 as base +FROM node:12.16.1 as base WORKDIR /app COPY install_deps.sh /app diff --git a/services/filestore/buildscript.txt b/services/filestore/buildscript.txt index cad06f2700..ae58ad6c18 100644 --- a/services/filestore/buildscript.txt +++ b/services/filestore/buildscript.txt @@ -1,11 +1,11 @@ filestore ---public-repo=True ---language=es ---env-add=ENABLE_CONVERSIONS="true",USE_PROM_METRICS="true",AWS_S3_USER_FILES_BUCKET_NAME=fake_user_files,AWS_S3_TEMPLATE_FILES_BUCKET_NAME=fake_template_files,AWS_S3_PUBLIC_FILES_BUCKET_NAME=fake_public_files ---node-version=10.19.0 --acceptance-creds= +--data-dirs=uploads,user_files,template_files --dependencies=s3 --docker-repos=gcr.io/overleaf-ops +--env-add=ENABLE_CONVERSIONS="true",USE_PROM_METRICS="true",AWS_S3_USER_FILES_BUCKET_NAME=fake_user_files,AWS_S3_TEMPLATE_FILES_BUCKET_NAME=fake_template_files,AWS_S3_PUBLIC_FILES_BUCKET_NAME=fake_public_files --env-pass-through= ---data-dirs=uploads,user_files,template_files +--language=es +--node-version=12.16.1 +--public-repo=True --script-version=1.3.6 From 9807568de84cdf0af1a062c315a691d81df14244 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Wed, 4 Mar 2020 10:08:31 +0000 Subject: [PATCH 450/555] Add missing 'process' global for settings tests --- services/filestore/test/unit/js/SettingsTests.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/test/unit/js/SettingsTests.js b/services/filestore/test/unit/js/SettingsTests.js index 91981c7de8..84c3361eab 100644 --- a/services/filestore/test/unit/js/SettingsTests.js +++ b/services/filestore/test/unit/js/SettingsTests.js @@ -13,7 +13,7 @@ describe('Settings', function() { } process.env.S3_BUCKET_CREDENTIALS = JSON.stringify(s3Settings) const settings = SandboxedModule.require('settings-sharelatex', { - globals: { console } + globals: { console, process } }) expect(settings.filestore.s3BucketCreds).to.deep.equal(s3Settings) }) From d06b74b0f81e055bbe1c84b4abc24038fb61d1a5 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Wed, 4 Mar 2020 10:54:53 +0000 Subject: [PATCH 451/555] Bump metrics to 2.5.0 --- services/filestore/package-lock.json | 20 +++++++++----------- services/filestore/package.json | 2 +- 2 files changed, 10 insertions(+), 12 deletions(-) diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index bfeec76d95..de8452d061 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -3169,7 +3169,7 @@ "lodash.once": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz", - "integrity": "sha1-DdOXEhPHxW34gJd9UEyI+0cal6w=" + "integrity": "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==" }, "lodash.pickby": { "version": "4.6.0", @@ -3397,9 +3397,9 @@ "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==" }, "metrics-sharelatex": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/metrics-sharelatex/-/metrics-sharelatex-2.4.0.tgz", - "integrity": "sha512-FbIRRhReVCEM4ETzh+qVMm3lP33zSSAdrHfSTtegkcB7GGi1kYs+Qt1/dXFawUA8pIZRQTtsfxiS1nZamiSwHg==", + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/metrics-sharelatex/-/metrics-sharelatex-2.5.0.tgz", + "integrity": "sha512-JG4yBe5bEzUW5P//8aAUoexInPosPLOXxLS4AjGxMrP78BS5PSV7uVrY0Op6b6c7ZqKItHTtEjzsUfLRPGQ/sQ==", "requires": { "@google-cloud/debug-agent": "^3.0.0", "@google-cloud/profiler": "^0.2.3", @@ -3409,13 +3409,6 @@ "prom-client": "^11.1.3", "underscore": "~1.6.0", "yn": "^3.1.1" - }, - "dependencies": { - "underscore": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz", - "integrity": "sha512-z4o1fvKUojIWh9XuaVLUDdf86RQiq13AC1dmHbTpoyuu+bquHms76v16CjycCbec87J7z0k//SiQVk0sMdFmpQ==" - } } }, "mime": { @@ -5468,6 +5461,11 @@ "integrity": "sha512-EgOVgL/4xfVrCMbhYKUQTdF37SQn4Iw73H5BgCrF1Abdun7Kwy/QZsE/ssAy0y4LxBbvua3PIbFsbRczWWnDdQ==", "dev": true }, + "underscore": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz", + "integrity": "sha512-z4o1fvKUojIWh9XuaVLUDdf86RQiq13AC1dmHbTpoyuu+bquHms76v16CjycCbec87J7z0k//SiQVk0sMdFmpQ==" + }, "unpipe": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", diff --git a/services/filestore/package.json b/services/filestore/package.json index 8949da3898..ca56581131 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -27,7 +27,7 @@ "glob": "^7.1.6", "lodash.once": "^4.1.1", "logger-sharelatex": "^1.7.0", - "metrics-sharelatex": "^2.2.0", + "metrics-sharelatex": "^2.5.0", "node-uuid": "~1.4.1", "range-parser": "^1.0.2", "request": "^2.88.0", From 7292d93dab43d1793d1dbb18bbb3aa10b78bc62e Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Wed, 12 Feb 2020 10:38:32 +0000 Subject: [PATCH 452/555] Add fake GCS server, and fix health checks --- services/filestore/docker-compose.ci.yml | 29 ++++++++++++++----- services/filestore/docker-compose.yml | 29 ++++++++++++++----- .../test/acceptance/deps/Dockerfile.fake-gcs | 5 ++++ .../test/acceptance/deps/Dockerfile.s3mock | 4 +++ .../test/acceptance/deps/healthcheck.sh | 9 ++++++ 5 files changed, 60 insertions(+), 16 deletions(-) create mode 100644 services/filestore/test/acceptance/deps/Dockerfile.fake-gcs create mode 100644 services/filestore/test/acceptance/deps/Dockerfile.s3mock create mode 100755 services/filestore/test/acceptance/deps/healthcheck.sh diff --git a/services/filestore/docker-compose.ci.yml b/services/filestore/docker-compose.ci.yml index 824ba815c0..fe4eaa35fd 100644 --- a/services/filestore/docker-compose.ci.yml +++ b/services/filestore/docker-compose.ci.yml @@ -22,10 +22,6 @@ services: REDIS_HOST: redis MONGO_HOST: mongo POSTGRES_HOST: postgres - AWS_S3_ENDPOINT: http://s3:9090 - AWS_S3_PATH_STYLE: 'true' - AWS_ACCESS_KEY_ID: fake - AWS_SECRET_ACCESS_KEY: fake MOCHA_GREP: ${MOCHA_GREP} NODE_ENV: test ENABLE_CONVERSIONS: "true" @@ -33,9 +29,21 @@ services: AWS_S3_USER_FILES_BUCKET_NAME: fake_user_files AWS_S3_TEMPLATE_FILES_BUCKET_NAME: fake_template_files AWS_S3_PUBLIC_FILES_BUCKET_NAME: fake_public_files + AWS_S3_ENDPOINT: http://s3:9090 + AWS_ACCESS_KEY_ID: fake + AWS_SECRET_ACCESS_KEY: fake + AWS_S3_PATH_STYLE: 'true' + GCS_API_ENDPOINT: gcs:9090 + GCS_USER_FILES_BUCKET_NAME: fake_userfiles + GCS_TEMPLATE_FILES_BUCKET_NAME: fake_templatefiles + GCS_PUBLIC_FILES_BUCKET_NAME: fake_publicfiles + NODE_TLS_REJECT_UNAUTHORIZED: 0 + STORAGE_EMULATOR_HOST: https://gcs:9090/storage/v1 depends_on: s3: condition: service_healthy + gcs: + condition: service_healthy user: node command: npm run test:acceptance:_run @@ -48,8 +56,13 @@ services: command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . user: root s3: - image: adobe/s3mock + build: + context: test/acceptance/deps + dockerfile: Dockerfile.s3mock environment: - - initialBuckets=fake_user_files,fake_template_files,fake_public_files,bucket - healthcheck: - test: ["CMD", "curl", "-f", "http://localhost:9090"] + - initialBuckets=fake_user_files,fake_template_files,fake_public_files + + gcs: + build: + context: test/acceptance/deps + dockerfile: Dockerfile.fake-gcs diff --git a/services/filestore/docker-compose.yml b/services/filestore/docker-compose.yml index c2634432ba..d904574f84 100644 --- a/services/filestore/docker-compose.yml +++ b/services/filestore/docker-compose.yml @@ -31,10 +31,6 @@ services: REDIS_HOST: redis MONGO_HOST: mongo POSTGRES_HOST: postgres - AWS_S3_ENDPOINT: http://s3:9090 - AWS_S3_PATH_STYLE: 'true' - AWS_ACCESS_KEY_ID: fake - AWS_SECRET_ACCESS_KEY: fake MOCHA_GREP: ${MOCHA_GREP} LOG_LEVEL: ERROR NODE_ENV: test @@ -43,15 +39,32 @@ services: AWS_S3_USER_FILES_BUCKET_NAME: fake_user_files AWS_S3_TEMPLATE_FILES_BUCKET_NAME: fake_template_files AWS_S3_PUBLIC_FILES_BUCKET_NAME: fake_public_files + AWS_S3_ENDPOINT: http://s3:9090 + AWS_S3_PATH_STYLE: 'true' + AWS_ACCESS_KEY_ID: fake + AWS_SECRET_ACCESS_KEY: fake + GCS_API_ENDPOINT: gcs:9090 + GCS_USER_FILES_BUCKET_NAME: fake_userfiles + GCS_TEMPLATE_FILES_BUCKET_NAME: fake_templatefiles + GCS_PUBLIC_FILES_BUCKET_NAME: fake_publicfiles + NODE_TLS_REJECT_UNAUTHORIZED: 0 + STORAGE_EMULATOR_HOST: https://gcs:9090/storage/v1 user: node depends_on: s3: condition: service_healthy + gcs: + condition: service_healthy command: npm run test:acceptance s3: - image: adobe/s3mock + build: + context: test/acceptance/deps + dockerfile: Dockerfile.s3mock environment: - - initialBuckets=fake_user_files,fake_template_files,fake_public_files,bucket - healthcheck: - test: ["CMD", "curl", "-f", "http://localhost:9090"] + - initialBuckets=fake_user_files,fake_template_files,fake_public_files + + gcs: + build: + context: test/acceptance/deps + dockerfile: Dockerfile.fake-gcs diff --git a/services/filestore/test/acceptance/deps/Dockerfile.fake-gcs b/services/filestore/test/acceptance/deps/Dockerfile.fake-gcs new file mode 100644 index 0000000000..694bcdac9e --- /dev/null +++ b/services/filestore/test/acceptance/deps/Dockerfile.fake-gcs @@ -0,0 +1,5 @@ +FROM gh2k/fake-gcs-server +RUN apk add --update --no-cache curl +COPY healthcheck.sh /healthcheck.sh +HEALTHCHECK --interval=1s --timeout=1s --retries=30 CMD /healthcheck.sh http://localhost:9090 +CMD ["--port=9090"] diff --git a/services/filestore/test/acceptance/deps/Dockerfile.s3mock b/services/filestore/test/acceptance/deps/Dockerfile.s3mock new file mode 100644 index 0000000000..15eda4dd4b --- /dev/null +++ b/services/filestore/test/acceptance/deps/Dockerfile.s3mock @@ -0,0 +1,4 @@ +FROM adobe/s3mock +RUN apk add --update --no-cache curl +COPY healthcheck.sh /healthcheck.sh +HEALTHCHECK --interval=1s --timeout=1s --retries=30 CMD /healthcheck.sh http://localhost:9090 diff --git a/services/filestore/test/acceptance/deps/healthcheck.sh b/services/filestore/test/acceptance/deps/healthcheck.sh new file mode 100755 index 0000000000..cd19cea637 --- /dev/null +++ b/services/filestore/test/acceptance/deps/healthcheck.sh @@ -0,0 +1,9 @@ +#!/bin/sh + +# health check to allow 404 status code as valid +STATUSCODE=$(curl --silent --output /dev/null --write-out "%{http_code}" $1) +# will be 000 on non-http error (e.g. connection failure) +if test $STATUSCODE -ge 500 || test $STATUSCODE -lt 200; then + exit 1 +fi +exit 0 From e6cf0687a9c042e399ebc981458e36d98a33bfeb Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Wed, 12 Feb 2020 10:39:00 +0000 Subject: [PATCH 453/555] Add gcs client library --- services/filestore/package-lock.json | 205 +++++++++++++++++++++++++-- services/filestore/package.json | 1 + 2 files changed, 197 insertions(+), 9 deletions(-) diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index de8452d061..1d74c5d172 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -603,6 +603,52 @@ "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-1.0.4.tgz", "integrity": "sha512-VccZDcOql77obTnFh0TbNED/6ZbbmHDf8UMNnzO1d5g9V0Htfm4k5cllY8P1tJsRKC3zWYGRLaViiupcgVjBoQ==" }, + "@google-cloud/storage": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-4.4.0.tgz", + "integrity": "sha512-R64ey4dLIG3IgiKw0CL5MdZ4ZtZdGhN75171vjiL+ioZG+hlLFkjsrCTRuIdE35v42nNe5nXmVhBHQQTuPozHA==", + "requires": { + "@google-cloud/common": "^2.1.1", + "@google-cloud/paginator": "^2.0.0", + "@google-cloud/promisify": "^1.0.0", + "arrify": "^2.0.0", + "compressible": "^2.0.12", + "concat-stream": "^2.0.0", + "date-and-time": "^0.12.0", + "duplexify": "^3.5.0", + "extend": "^3.0.2", + "gaxios": "^2.0.1", + "gcs-resumable-upload": "^2.2.4", + "hash-stream-validation": "^0.2.2", + "mime": "^2.2.0", + "mime-types": "^2.0.8", + "onetime": "^5.1.0", + "p-limit": "^2.2.0", + "pumpify": "^2.0.0", + "readable-stream": "^3.4.0", + "snakeize": "^0.1.0", + "stream-events": "^1.0.1", + "through2": "^3.0.0", + "xdg-basedir": "^4.0.0" + }, + "dependencies": { + "mime": { + "version": "2.4.4", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.4.tgz", + "integrity": "sha512-LRxmNwziLPT828z+4YkNzloCFC2YM4wrB99k+AV5ZbEyfGNWfG8SO1FUXLmLDBSo89NrJZ4DIWeLjy1CHGhMGA==" + }, + "readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + } + } + }, "@google-cloud/trace-agent": { "version": "3.6.1", "resolved": "https://registry.npmjs.org/@google-cloud/trace-agent/-/trace-agent-3.6.1.tgz", @@ -1422,6 +1468,11 @@ "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", "integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==" }, + "buffer-from": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.1.tgz", + "integrity": "sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==" + }, "builtin-modules": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.1.0.tgz", @@ -1608,11 +1659,55 @@ "integrity": "sha512-6P6g0uetGpW/sdyUy/iQQCbFF0kWVMSIVSyYz7Zgjcgh8mgw8PQzDNZeyZ5DQ2gM7LBoZPHmnjz8rUthkBG5tw==", "dev": true }, + "compressible": { + "version": "2.0.18", + "resolved": "https://registry.npmjs.org/compressible/-/compressible-2.0.18.tgz", + "integrity": "sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==", + "requires": { + "mime-db": ">= 1.43.0 < 2" + } + }, "concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" }, + "concat-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-2.0.0.tgz", + "integrity": "sha512-MWufYdFw53ccGjCA+Ol7XJYpAlW6/prSMzuPOTRnJGcGzuhLn4Scrz7qf6o8bROZ514ltazcIFJZevcfbo0x7A==", + "requires": { + "buffer-from": "^1.0.0", + "inherits": "^2.0.3", + "readable-stream": "^3.0.2", + "typedarray": "^0.0.6" + }, + "dependencies": { + "readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + } + } + }, + "configstore": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/configstore/-/configstore-5.0.1.tgz", + "integrity": "sha512-aMKprgk5YhBNyH25hj8wGt2+D52Sw1DRRIzqBwLp2Ya9mFmY8KPvvtvmna8SxVR9JMZ4kzMD68N22vlaRpkeFA==", + "requires": { + "dot-prop": "^5.2.0", + "graceful-fs": "^4.1.2", + "make-dir": "^3.0.0", + "unique-string": "^2.0.0", + "write-file-atomic": "^3.0.0", + "xdg-basedir": "^4.0.0" + } + }, "console-log-level": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/console-log-level/-/console-log-level-1.4.1.tgz", @@ -1688,6 +1783,11 @@ } } }, + "crypto-random-string": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-2.0.0.tgz", + "integrity": "sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA==" + }, "d64": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/d64/-/d64-1.0.0.tgz", @@ -1701,6 +1801,11 @@ "assert-plus": "^1.0.0" } }, + "date-and-time": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/date-and-time/-/date-and-time-0.12.0.tgz", + "integrity": "sha512-n2RJIAp93AucgF/U/Rz5WRS2Hjg5Z+QxscaaMCi6pVZT1JpJKRH+C08vyH/lRR1kxNXnPxgo3lWfd+jCb/UcuQ==" + }, "debug": { "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", @@ -2525,6 +2630,19 @@ "json-bigint": "^0.3.0" } }, + "gcs-resumable-upload": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/gcs-resumable-upload/-/gcs-resumable-upload-2.3.2.tgz", + "integrity": "sha512-OPS0iAmPCV+r7PziOIhyxmQOzsazFCy76yYDOS/Z80O/7cuny1KMfqDQa2T0jLaL8EreTU7EMZG5pUuqBKgzHA==", + "requires": { + "abort-controller": "^3.0.0", + "configstore": "^5.0.0", + "gaxios": "^2.0.0", + "google-auth-library": "^5.0.0", + "pumpify": "^2.0.0", + "stream-events": "^1.0.4" + } + }, "get-caller-file": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", @@ -2628,8 +2746,7 @@ "graceful-fs": { "version": "4.2.3", "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.3.tgz", - "integrity": "sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ==", - "dev": true + "integrity": "sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ==" }, "growl": { "version": "1.10.5", @@ -2707,6 +2824,25 @@ "integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==", "dev": true }, + "hash-stream-validation": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/hash-stream-validation/-/hash-stream-validation-0.2.2.tgz", + "integrity": "sha512-cMlva5CxWZOrlS/cY0C+9qAzesn5srhFA8IT1VPiHc9bWWBLkJfEUIZr7MWoi89oOOGmpg8ymchaOjiArsGu5A==", + "requires": { + "through2": "^2.0.0" + }, + "dependencies": { + "through2": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", + "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", + "requires": { + "readable-stream": "~2.3.6", + "xtend": "~4.0.1" + } + } + } + }, "he": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/he/-/he-1.1.1.tgz", @@ -2827,8 +2963,7 @@ "imurmurhash": { "version": "0.1.4", "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", - "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", - "dev": true + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==" }, "indent-string": { "version": "4.0.0", @@ -3330,6 +3465,14 @@ "statsd-parser": "~0.0.4" } }, + "make-dir": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.0.2.tgz", + "integrity": "sha512-rYKABKutXa6vXTXhoV18cBE7PaewPXHe/Bdq4v+ZLMhxbWApkFFplT0LcbMW+6BbjnQXzZ/sAvSE/JdguApG5w==", + "requires": { + "semver": "^6.0.0" + } + }, "make-plural": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/make-plural/-/make-plural-4.3.0.tgz", @@ -3432,8 +3575,7 @@ "mimic-fn": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", - "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", - "dev": true + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==" }, "minimatch": { "version": "3.0.4", @@ -3720,7 +3862,6 @@ "version": "5.1.0", "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.0.tgz", "integrity": "sha512-5NcSkPHhwTVFIQN+TUqXoS5+dlElHXdpAWu9I0HP20YOtIi+aZ0Ct82jdlILDxjLEAWwvm+qj1m6aEtsDVmm6Q==", - "dev": true, "requires": { "mimic-fn": "^2.1.0" } @@ -4990,8 +5131,7 @@ "signal-exit": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz", - "integrity": "sha512-meQNNykwecVxdu1RlYMKpQx4+wefIYpmxi6gexo/KAbwquJrBUrBmKYJrE8KFkVQAAVWEnwNdu21PgrD77J3xA==", - "dev": true + "integrity": "sha512-meQNNykwecVxdu1RlYMKpQx4+wefIYpmxi6gexo/KAbwquJrBUrBmKYJrE8KFkVQAAVWEnwNdu21PgrD77J3xA==" }, "sinon": { "version": "7.1.1", @@ -5055,6 +5195,11 @@ "to-snake-case": "^1.0.0" } }, + "snakeize": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/snakeize/-/snakeize-0.1.0.tgz", + "integrity": "sha1-EMCI2LWOsHazIpu1oE4jLOEmQi0=" + }, "source-map": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", @@ -5455,6 +5600,19 @@ "mime-types": "~2.1.24" } }, + "typedarray": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz", + "integrity": "sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c=" + }, + "typedarray-to-buffer": { + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz", + "integrity": "sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==", + "requires": { + "is-typedarray": "^1.0.0" + } + }, "typescript": { "version": "3.8.2", "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.8.2.tgz", @@ -5466,6 +5624,14 @@ "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz", "integrity": "sha512-z4o1fvKUojIWh9XuaVLUDdf86RQiq13AC1dmHbTpoyuu+bquHms76v16CjycCbec87J7z0k//SiQVk0sMdFmpQ==" }, + "unique-string": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/unique-string/-/unique-string-2.0.0.tgz", + "integrity": "sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg==", + "requires": { + "crypto-random-string": "^2.0.0" + } + }, "unpipe": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", @@ -5686,6 +5852,22 @@ "mkdirp": "^0.5.1" } }, + "write-file-atomic": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-3.0.3.tgz", + "integrity": "sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==", + "requires": { + "imurmurhash": "^0.1.4", + "is-typedarray": "^1.0.0", + "signal-exit": "^3.0.2", + "typedarray-to-buffer": "^3.1.5" + } + }, + "xdg-basedir": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-4.0.0.tgz", + "integrity": "sha512-PSNhEJDejZYV7h50BohL09Er9VaIefr2LMAf3OEmpCkjOi34eYyQYAXUTjEQtZJTKcF0E2UKTh+osDLsgNim9Q==" + }, "xml2js": { "version": "0.4.19", "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.19.tgz", @@ -5700,6 +5882,11 @@ "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-9.0.7.tgz", "integrity": "sha512-7YXTQc3P2l9+0rjaUbLwMKRhtmwg1M1eDf6nag7urC7pIPYLD9W/jmzQ4ptRSUbodw5S0jfoGTflLemQibSpeQ==" }, + "xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==" + }, "y18n": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.0.tgz", diff --git a/services/filestore/package.json b/services/filestore/package.json index ca56581131..6f7d84c778 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -20,6 +20,7 @@ "test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js" }, "dependencies": { + "@google-cloud/storage": "^4.3.0", "@overleaf/o-error": "^2.1.0", "aws-sdk": "^2.628.0", "body-parser": "^1.2.0", From 366ce97169addea6cc65b34a7cb24cf8886897b8 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Wed, 12 Feb 2020 11:00:45 +0000 Subject: [PATCH 454/555] Add GCS Persistor --- services/filestore/app/js/GcsPersistor.js | 295 ++++++++++++++++++ services/filestore/app/js/PersistorManager.js | 2 + 2 files changed, 297 insertions(+) create mode 100644 services/filestore/app/js/GcsPersistor.js diff --git a/services/filestore/app/js/GcsPersistor.js b/services/filestore/app/js/GcsPersistor.js new file mode 100644 index 0000000000..350b0d451c --- /dev/null +++ b/services/filestore/app/js/GcsPersistor.js @@ -0,0 +1,295 @@ +const settings = require('settings-sharelatex') +const metrics = require('metrics-sharelatex') +const fs = require('fs') +const { promisify } = require('util') +const Stream = require('stream') +const { Storage } = require('@google-cloud/storage') +const { callbackify } = require('util') +const { WriteError, ReadError, NotFoundError } = require('./Errors') +const PersistorHelper = require('./PersistorHelper') + +const pipeline = promisify(Stream.pipeline) + +function base64ToHex(base64) { + return Buffer.from(base64, 'base64').toString('hex') +} + +// both of these settings will be null by default except for tests +// that's OK - GCS uses the locally-configured service account by default +const storage = new Storage(settings.filestore.gcs) +// workaround for broken uploads with custom endpoints: +// https://github.com/googleapis/nodejs-storage/issues/898 +if (settings.filestore.gcs.apiEndpoint) { + storage.interceptors.push({ + request: function(reqOpts) { + const url = new URL(reqOpts.uri) + url.host = settings.filestore.gcs.apiEndpoint + reqOpts.uri = url.toString() + return reqOpts + } + }) +} + +const GcsPersistor = { + sendFile: callbackify(sendFile), + sendStream: callbackify(sendStream), + getFileStream: callbackify(getFileStream), + getFileMd5Hash: callbackify(getFileMd5Hash), + deleteDirectory: callbackify(deleteDirectory), + getFileSize: callbackify(getFileSize), + deleteFile: callbackify(deleteFile), + copyFile: callbackify(copyFile), + checkIfFileExists: callbackify(checkIfFileExists), + directorySize: callbackify(directorySize), + promises: { + sendFile, + sendStream, + getFileStream, + getFileMd5Hash, + deleteDirectory, + getFileSize, + deleteFile, + copyFile, + checkIfFileExists, + directorySize + } +} + +module.exports = GcsPersistor + +async function sendFile(bucket, key, fsPath) { + let readStream + try { + readStream = fs.createReadStream(fsPath) + } catch (err) { + throw PersistorHelper.wrapError( + err, + 'error reading file from disk', + { bucketName: bucket, key, fsPath }, + ReadError + ) + } + return sendStream(bucket, key, readStream) +} + +async function sendStream(bucket, key, readStream, sourceMd5) { + try { + let hashPromise + + // if there is no supplied md5 hash, we calculate the hash as the data passes through + if (!sourceMd5) { + hashPromise = PersistorHelper.calculateStreamMd5(readStream) + } + + const meteredStream = PersistorHelper.getMeteredStream( + readStream, + (_, byteCount) => { + metrics.count('gcs.egress', byteCount) + } + ) + + const writeOptions = { + resumable: false // recommended by Google + } + + if (sourceMd5) { + writeOptions.validation = 'md5' + writeOptions.metadata = { + md5Hash: sourceMd5 + } + } + + const uploadStream = storage + .bucket(bucket) + .file(key) + .createWriteStream(writeOptions) + + await pipeline(meteredStream, uploadStream) + + // if we didn't have an md5 hash, we should compare our computed one with Google's + // as we couldn't tell GCS about it beforehand + if (hashPromise) { + sourceMd5 = await hashPromise + // throws on mismatch + await PersistorHelper.verifyMd5(GcsPersistor, bucket, key, sourceMd5) + } + } catch (err) { + throw PersistorHelper.wrapError( + err, + 'upload to GCS failed', + { bucket, key }, + WriteError + ) + } +} + +async function getFileStream(bucket, key, opts) { + if (opts.end) { + // S3 (and http range headers) treat 'end' as inclusive, so increase this by 1 + opts.end++ + } + const stream = storage + .bucket(bucket) + .file(key) + .createReadStream(opts) + + const meteredStream = PersistorHelper.getMeteredStream(stream, (_, bytes) => { + // ignore the error parameter and just log the byte count + metrics.count('gcs.ingress', bytes) + }) + + try { + await PersistorHelper.waitForStreamReady(stream) + return meteredStream + } catch (err) { + throw PersistorHelper.wrapError( + err, + 'error reading file from GCS', + { bucket, key, opts }, + ReadError + ) + } +} + +async function getFileSize(bucket, key) { + try { + const metadata = await storage + .bucket(bucket) + .file(key) + .getMetadata() + return metadata[0].size + } catch (err) { + throw PersistorHelper.wrapError( + err, + 'error getting size of GCS object', + { bucket, key }, + ReadError + ) + } +} + +async function getFileMd5Hash(bucket, key) { + try { + const metadata = await storage + .bucket(bucket) + .file(key) + .getMetadata() + return base64ToHex(metadata[0].md5Hash) + } catch (err) { + throw PersistorHelper.wrapError( + err, + 'error getting hash of GCS object', + { bucket, key }, + ReadError + ) + } +} + +async function deleteFile(bucket, key) { + try { + await storage + .bucket(bucket) + .file(key) + .delete() + } catch (err) { + const error = PersistorHelper.wrapError( + err, + 'error deleting GCS object', + { bucket, key }, + WriteError + ) + if (!(error instanceof NotFoundError)) { + throw error + } + } +} + +async function deleteDirectory(bucket, key) { + let files + + try { + const response = await storage.bucket(bucket).getFiles({ directory: key }) + files = response[0] + } catch (err) { + const error = PersistorHelper.wrapError( + err, + 'failed to list objects in GCS', + { bucket, key }, + ReadError + ) + if (error instanceof NotFoundError) { + return + } + throw error + } + + for (const index in files) { + try { + await files[index].delete() + } catch (err) { + const error = PersistorHelper.wrapError( + err, + 'failed to delete object in GCS', + { bucket, key }, + WriteError + ) + if (!(error instanceof NotFoundError)) { + throw error + } + } + } +} + +async function directorySize(bucket, key) { + let files + + try { + const response = await storage.bucket(bucket).getFiles({ directory: key }) + files = response[0] + } catch (err) { + throw PersistorHelper.wrapError( + err, + 'failed to list objects in GCS', + { bucket, key }, + ReadError + ) + } + + return files.reduce((acc, file) => Number(file.metadata.size) + acc, 0) +} + +async function checkIfFileExists(bucket, key) { + try { + const response = await storage + .bucket(bucket) + .file(key) + .exists() + return response[0] + } catch (err) { + throw PersistorHelper.wrapError( + err, + 'error checking if file exists in GCS', + { bucket, key }, + ReadError + ) + } +} + +async function copyFile(bucket, sourceKey, destKey) { + try { + const src = storage.bucket(bucket).file(sourceKey) + const dest = storage.bucket(bucket).file(destKey) + await src.copy(dest) + } catch (err) { + // fake-gcs-server has a bug that returns an invalid response when the file does not exist + if (err.message === 'Cannot parse response as JSON: not found\n') { + err.code = 404 + } + throw PersistorHelper.wrapError( + err, + 'failed to copy file in GCS', + { bucket, sourceKey, destKey }, + WriteError + ) + } +} diff --git a/services/filestore/app/js/PersistorManager.js b/services/filestore/app/js/PersistorManager.js index 32f6cd41f8..d26ab77a92 100644 --- a/services/filestore/app/js/PersistorManager.js +++ b/services/filestore/app/js/PersistorManager.js @@ -19,6 +19,8 @@ function getPersistor(backend) { return require('./S3Persistor') case 'fs': return require('./FSPersistor') + case 'gcs': + return require('./GcsPersistor') default: throw new Error(`unknown filestore backend: ${backend}`) } From 2cfab8d3137093afc58c4abc5582106fcf1f2400 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Wed, 12 Feb 2020 11:01:51 +0000 Subject: [PATCH 455/555] Add GCS-specific acceptance tests --- .../test/acceptance/js/FilestoreTests.js | 136 ++++++------------ .../test/acceptance/js/TestConfig.js | 107 ++++++++++++++ 2 files changed, 149 insertions(+), 94 deletions(-) create mode 100644 services/filestore/test/acceptance/js/TestConfig.js diff --git a/services/filestore/test/acceptance/js/FilestoreTests.js b/services/filestore/test/acceptance/js/FilestoreTests.js index c6a1e08444..599d60155a 100644 --- a/services/filestore/test/acceptance/js/FilestoreTests.js +++ b/services/filestore/test/acceptance/js/FilestoreTests.js @@ -11,6 +11,7 @@ const S3 = require('aws-sdk/clients/s3') const Stream = require('stream') const request = require('request') const { promisify } = require('util') +const { Storage } = require('@google-cloud/storage') const streamifier = require('streamifier') chai.use(require('chai-as-promised')) @@ -42,89 +43,7 @@ function streamToString(stream) { // store settings for multiple backends, so that we can test each one. // fs will always be available - add others if they are configured -const BackendSettings = { - FSPersistor: { - backend: 'fs', - stores: { - user_files: Path.resolve(__dirname, '../../../user_files'), - public_files: Path.resolve(__dirname, '../../../public_files'), - template_files: Path.resolve(__dirname, '../../../template_files') - } - }, - S3Persistor: { - backend: 's3', - s3: { - key: process.env.AWS_ACCESS_KEY_ID, - secret: process.env.AWS_SECRET_ACCESS_KEY, - endpoint: process.env.AWS_S3_ENDPOINT, - pathStyle: true, - partSize: 100 * 1024 * 1024 - }, - stores: { - user_files: process.env.AWS_S3_USER_FILES_BUCKET_NAME, - template_files: process.env.AWS_S3_TEMPLATE_FILES_BUCKET_NAME, - public_files: process.env.AWS_S3_PUBLIC_FILES_BUCKET_NAME - } - }, - FallbackS3ToFSPersistor: { - backend: 's3', - s3: { - key: process.env.AWS_ACCESS_KEY_ID, - secret: process.env.AWS_SECRET_ACCESS_KEY, - endpoint: process.env.AWS_S3_ENDPOINT, - pathStyle: true, - partSize: 100 * 1024 * 1024 - }, - stores: { - user_files: process.env.AWS_S3_USER_FILES_BUCKET_NAME, - template_files: process.env.AWS_S3_TEMPLATE_FILES_BUCKET_NAME, - public_files: process.env.AWS_S3_PUBLIC_FILES_BUCKET_NAME - }, - fallback: { - backend: 'fs', - buckets: { - [process.env.AWS_S3_USER_FILES_BUCKET_NAME]: Path.resolve( - __dirname, - '../../../user_files' - ), - [process.env.AWS_S3_PUBLIC_FILES_BUCKET_NAME]: Path.resolve( - __dirname, - '../../../public_files' - ), - [process.env.AWS_S3_TEMPLATE_FILES_BUCKET_NAME]: Path.resolve( - __dirname, - '../../../template_files' - ) - } - } - }, - FallbackFSToS3Persistor: { - backend: 'fs', - s3: { - key: process.env.AWS_ACCESS_KEY_ID, - secret: process.env.AWS_SECRET_ACCESS_KEY, - endpoint: process.env.AWS_S3_ENDPOINT, - pathStyle: true, - partSize: 100 * 1024 * 1024 - }, - stores: { - user_files: Path.resolve(__dirname, '../../../user_files'), - public_files: Path.resolve(__dirname, '../../../public_files'), - template_files: Path.resolve(__dirname, '../../../template_files') - }, - fallback: { - backend: 's3', - buckets: { - [Path.resolve(__dirname, '../../../user_files')]: process.env - .AWS_S3_USER_FILES_BUCKET_NAME, - [Path.resolve(__dirname, '../../../public_files')]: process.env - .AWS_S3_PUBLIC_FILES_BUCKET_NAME, - [Path.resolve(__dirname, '../../../template_files')]: process.env - .AWS_S3_TEMPLATE_FILES_BUCKET_NAME - } - } - } -} +const BackendSettings = require('./TestConfig') describe('Filestore', function() { this.timeout(1000 * 10) @@ -134,7 +53,7 @@ describe('Filestore', function() { // redefine the test suite for every available backend Object.keys(BackendSettings).forEach(backend => { describe(backend, function() { - let app, previousEgress, previousIngress, projectId + let app, previousEgress, previousIngress, metricPrefix, projectId before(async function() { // create the app with the relevant filestore settings @@ -143,13 +62,27 @@ describe('Filestore', function() { await app.runServer() }) + if (BackendSettings[backend].gcs) { + before(async function() { + const storage = new Storage(Settings.filestore.gcs) + await storage.createBucket(process.env.GCS_USER_FILES_BUCKET_NAME) + await storage.createBucket(process.env.GCS_PUBLIC_FILES_BUCKET_NAME) + await storage.createBucket(process.env.GCS_TEMPLATE_FILES_BUCKET_NAME) + }) + } + after(async function() { return app.stop() }) beforeEach(async function() { - if (Settings.filestore.backend === 's3') { - previousEgress = await getMetric(filestoreUrl, 's3_egress') + // retrieve previous metrics from the app + if (['s3', 'gcs'].includes(Settings.filestore.backend)) { + metricPrefix = Settings.filestore.backend + previousEgress = await getMetric( + filestoreUrl, + `${metricPrefix}_egress` + ) } projectId = `acceptance_tests_${Math.random()}` }) @@ -195,8 +128,11 @@ describe('Filestore', function() { // The upload request can bump the ingress metric. // The content hash validation might require a full download // in case the ETag field of the upload response is not a md5 sum. - if (Settings.filestore.backend === 's3') { - previousIngress = await getMetric(filestoreUrl, 's3_ingress') + if (['s3', 'gcs'].includes(Settings.filestore.backend)) { + previousIngress = await getMetric( + filestoreUrl, + `${metricPrefix}_ingress` + ) } }) @@ -285,15 +221,21 @@ describe('Filestore', function() { expect(response.body).to.equal(newContent) }) - if (backend === 'S3Persistor') { + if (['S3Persistor', 'GcsPersistor'].includes(backend)) { it('should record an egress metric for the upload', async function() { - const metric = await getMetric(filestoreUrl, 's3_egress') + const metric = await getMetric( + filestoreUrl, + `${metricPrefix}_egress` + ) expect(metric - previousEgress).to.equal(constantFileContent.length) }) it('should record an ingress metric when downloading the file', async function() { await rp.get(fileUrl) - const metric = await getMetric(filestoreUrl, 's3_ingress') + const metric = await getMetric( + filestoreUrl, + `${metricPrefix}_ingress` + ) expect(metric - previousIngress).to.equal( constantFileContent.length ) @@ -307,7 +249,10 @@ describe('Filestore', function() { } } await rp.get(options) - const metric = await getMetric(filestoreUrl, 's3_ingress') + const metric = await getMetric( + filestoreUrl, + `${metricPrefix}_ingress` + ) expect(metric - previousIngress).to.equal(9) }) } @@ -827,9 +772,12 @@ describe('Filestore', function() { expect(response.body.substring(0, 8)).to.equal('%PDF-1.5') }) - if (backend === 'S3Persistor') { + if (['S3Persistor', 'GcsPersistor'].includes(backend)) { it('should record an egress metric for the upload', async function() { - const metric = await getMetric(filestoreUrl, 's3_egress') + const metric = await getMetric( + filestoreUrl, + `${metricPrefix}_egress` + ) expect(metric - previousEgress).to.equal(localFileSize) }) } diff --git a/services/filestore/test/acceptance/js/TestConfig.js b/services/filestore/test/acceptance/js/TestConfig.js new file mode 100644 index 0000000000..e673ace71c --- /dev/null +++ b/services/filestore/test/acceptance/js/TestConfig.js @@ -0,0 +1,107 @@ +const Path = require('path') + +// use functions to get a fresh copy, not a reference, each time +function s3Config() { + return { + key: process.env.AWS_ACCESS_KEY_ID, + secret: process.env.AWS_SECRET_ACCESS_KEY, + endpoint: process.env.AWS_S3_ENDPOINT, + pathStyle: true, + partSize: 100 * 1024 * 1024 + } +} + +function s3Stores() { + return { + user_files: process.env.AWS_S3_USER_FILES_BUCKET_NAME, + template_files: process.env.AWS_S3_TEMPLATE_FILES_BUCKET_NAME, + public_files: process.env.AWS_S3_PUBLIC_FILES_BUCKET_NAME + } +} + +function gcsConfig() { + return { + apiEndpoint: process.env.GCS_API_ENDPOINT, + projectId: 'fake' + } +} + +function gcsStores() { + return { + user_files: process.env.GCS_USER_FILES_BUCKET_NAME, + template_files: process.env.GCS_TEMPLATE_FILES_BUCKET_NAME, + public_files: process.env.GCS_PUBLIC_FILES_BUCKET_NAME + } +} + +function fsStores() { + return { + user_files: Path.resolve(__dirname, '../../../user_files'), + public_files: Path.resolve(__dirname, '../../../public_files'), + template_files: Path.resolve(__dirname, '../../../template_files') + } +} + +function fallbackStores(primaryConfig, fallbackConfig) { + return { + [primaryConfig.user_files]: fallbackConfig.user_files, + [primaryConfig.public_files]: fallbackConfig.public_files, + [primaryConfig.template_files]: fallbackConfig.template_files + } +} + +module.exports = { + FSPersistor: { + backend: 'fs', + stores: fsStores() + }, + S3Persistor: { + backend: 's3', + s3: s3Config(), + stores: s3Stores() + }, + GcsPersistor: { + backend: 'gcs', + gcs: gcsConfig(), + stores: gcsStores() + }, + FallbackS3ToFSPersistor: { + backend: 's3', + s3: s3Config(), + stores: s3Stores(), + fallback: { + backend: 'fs', + buckets: fallbackStores(s3Stores(), fsStores()) + } + }, + FallbackFSToS3Persistor: { + backend: 'fs', + s3: s3Config(), + stores: fsStores(), + fallback: { + backend: 's3', + buckets: fallbackStores(fsStores(), s3Stores()) + } + }, + FallbackGcsToS3Persistor: { + backend: 'gcs', + gcs: gcsConfig(), + stores: gcsStores(), + s3: s3Config(), + fallback: { + backend: 's3', + buckets: fallbackStores(gcsStores(), s3Stores()) + } + }, + FallbackS3ToGcsPersistor: { + backend: 's3', + // can use the same bucket names for gcs and s3 (in tests) + stores: s3Stores(), + s3: s3Config(), + gcs: gcsConfig(), + fallback: { + backend: 'gcs', + buckets: fallbackStores(s3Stores(), gcsStores()) + } + } +} From 9dddf2520992c0704a7279c990d80f92094cdcd9 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Wed, 12 Feb 2020 11:02:05 +0000 Subject: [PATCH 456/555] Add note on gcs config to config file --- services/filestore/config/settings.defaults.coffee | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/services/filestore/config/settings.defaults.coffee b/services/filestore/config/settings.defaults.coffee index 251fb073b4..6ffe5a8523 100644 --- a/services/filestore/config/settings.defaults.coffee +++ b/services/filestore/config/settings.defaults.coffee @@ -31,6 +31,7 @@ settings = # Choices are # s3 - Amazon S3 # fs - local filesystem + # gcs - Google Cloud Storage backend: process.env['BACKEND'] s3: @@ -41,6 +42,9 @@ settings = pathStyle: process.env['AWS_S3_PATH_STYLE'] partSize: process.env['AWS_S3_PARTSIZE'] or (100 * 1024 * 1024) + # GCS should be configured by the service account on the kubernetes pod. See GOOGLE_APPLICATION_CREDENTIALS, + # which will be picked up automatically. + stores: user_files: process.env['USER_FILES_BUCKET_NAME'] template_files: process.env['TEMPLATE_FILES_BUCKET_NAME'] From e2f3dd23c93bb8de2da9f8c91935877ac4394c24 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Wed, 12 Feb 2020 13:44:38 +0000 Subject: [PATCH 457/555] Switch back to official fake-gcs-server image --- services/filestore/test/acceptance/deps/Dockerfile.fake-gcs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/test/acceptance/deps/Dockerfile.fake-gcs b/services/filestore/test/acceptance/deps/Dockerfile.fake-gcs index 694bcdac9e..f8313cbce0 100644 --- a/services/filestore/test/acceptance/deps/Dockerfile.fake-gcs +++ b/services/filestore/test/acceptance/deps/Dockerfile.fake-gcs @@ -1,4 +1,4 @@ -FROM gh2k/fake-gcs-server +FROM fsouza/fake-gcs-server RUN apk add --update --no-cache curl COPY healthcheck.sh /healthcheck.sh HEALTHCHECK --interval=1s --timeout=1s --retries=30 CMD /healthcheck.sh http://localhost:9090 From d9c9d74994d6a660d49c2b644dbdf0067558f65a Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 13 Feb 2020 15:47:42 +0000 Subject: [PATCH 458/555] Remove unnecessary test for S3 file deletion S3 does not throw a not-found error when deleting a file that does not exist --- .../test/unit/js/S3PersistorTests.js | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/services/filestore/test/unit/js/S3PersistorTests.js b/services/filestore/test/unit/js/S3PersistorTests.js index ac104e36f2..aa9444428e 100644 --- a/services/filestore/test/unit/js/S3PersistorTests.js +++ b/services/filestore/test/unit/js/S3PersistorTests.js @@ -675,25 +675,6 @@ describe('S3PersistorTests', function() { }) }) }) - - describe('when the file does not exist', function() { - let error - - beforeEach(async function() { - S3Client.deleteObject = sinon.stub().returns({ - promise: sinon.stub().rejects(S3NotFoundError) - }) - try { - await S3Persistor.promises.deleteFile(bucket, key) - } catch (err) { - error = err - } - }) - - it('should throw a NotFoundError', function() { - expect(error).to.be.an.instanceOf(Errors.NotFoundError) - }) - }) }) describe('deleteDirectory', function() { From e58284aefeec7ded2d00764e7f0a0f434fc4a0ad Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 13 Feb 2020 16:55:01 +0000 Subject: [PATCH 459/555] Move base64/hex methods to PersistorHelper Also add some null-safety checks --- services/filestore/app/js/GcsPersistor.js | 12 ++++-------- services/filestore/app/js/PersistorHelper.js | 12 +++++++++++- services/filestore/app/js/S3Persistor.js | 6 +----- 3 files changed, 16 insertions(+), 14 deletions(-) diff --git a/services/filestore/app/js/GcsPersistor.js b/services/filestore/app/js/GcsPersistor.js index 350b0d451c..690df70252 100644 --- a/services/filestore/app/js/GcsPersistor.js +++ b/services/filestore/app/js/GcsPersistor.js @@ -10,16 +10,12 @@ const PersistorHelper = require('./PersistorHelper') const pipeline = promisify(Stream.pipeline) -function base64ToHex(base64) { - return Buffer.from(base64, 'base64').toString('hex') -} - // both of these settings will be null by default except for tests // that's OK - GCS uses the locally-configured service account by default const storage = new Storage(settings.filestore.gcs) // workaround for broken uploads with custom endpoints: // https://github.com/googleapis/nodejs-storage/issues/898 -if (settings.filestore.gcs.apiEndpoint) { +if (settings.filestore.gcs && settings.filestore.gcs.apiEndpoint) { storage.interceptors.push({ request: function(reqOpts) { const url = new URL(reqOpts.uri) @@ -95,7 +91,7 @@ async function sendStream(bucket, key, readStream, sourceMd5) { if (sourceMd5) { writeOptions.validation = 'md5' writeOptions.metadata = { - md5Hash: sourceMd5 + md5Hash: PersistorHelper.hexToBase64(sourceMd5) } } @@ -123,7 +119,7 @@ async function sendStream(bucket, key, readStream, sourceMd5) { } } -async function getFileStream(bucket, key, opts) { +async function getFileStream(bucket, key, opts = {}) { if (opts.end) { // S3 (and http range headers) treat 'end' as inclusive, so increase this by 1 opts.end++ @@ -174,7 +170,7 @@ async function getFileMd5Hash(bucket, key) { .bucket(bucket) .file(key) .getMetadata() - return base64ToHex(metadata[0].md5Hash) + return PersistorHelper.base64ToHex(metadata[0].md5Hash) } catch (err) { throw PersistorHelper.wrapError( err, diff --git a/services/filestore/app/js/PersistorHelper.js b/services/filestore/app/js/PersistorHelper.js index ea8132a9c9..409f3182f1 100644 --- a/services/filestore/app/js/PersistorHelper.js +++ b/services/filestore/app/js/PersistorHelper.js @@ -12,7 +12,9 @@ module.exports = { verifyMd5, getMeteredStream, waitForStreamReady, - wrapError + wrapError, + hexToBase64, + base64ToHex } // returns a promise which resolves with the md5 hash of the stream @@ -103,3 +105,11 @@ function wrapError(error, message, params, ErrorType) { }).withCause(error) } } + +function base64ToHex(base64) { + return Buffer.from(base64, 'base64').toString('hex') +} + +function hexToBase64(hex) { + return Buffer.from(hex, 'hex').toString('base64') +} diff --git a/services/filestore/app/js/S3Persistor.js b/services/filestore/app/js/S3Persistor.js index fc505ccfbb..7b69ad5d26 100644 --- a/services/filestore/app/js/S3Persistor.js +++ b/services/filestore/app/js/S3Persistor.js @@ -46,10 +46,6 @@ const S3Persistor = { module.exports = S3Persistor -function hexToBase64(hex) { - return Buffer.from(hex, 'hex').toString('base64') -} - async function sendFile(bucketName, key, fsPath) { let readStream try { @@ -72,7 +68,7 @@ async function sendStream(bucketName, key, readStream, sourceMd5) { let b64Hash if (sourceMd5) { - b64Hash = hexToBase64(sourceMd5) + b64Hash = PersistorHelper.hexToBase64(sourceMd5) } else { hashPromise = PersistorHelper.calculateStreamMd5(readStream) } From 12274e1427f31d09f11becd8ac0bc424d7093df3 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 13 Feb 2020 16:55:47 +0000 Subject: [PATCH 460/555] Add unit tests for GCS Persistor --- .../test/unit/js/GcsPersistorTests.js | 756 ++++++++++++++++++ 1 file changed, 756 insertions(+) create mode 100644 services/filestore/test/unit/js/GcsPersistorTests.js diff --git a/services/filestore/test/unit/js/GcsPersistorTests.js b/services/filestore/test/unit/js/GcsPersistorTests.js new file mode 100644 index 0000000000..6264fad0ff --- /dev/null +++ b/services/filestore/test/unit/js/GcsPersistorTests.js @@ -0,0 +1,756 @@ +const sinon = require('sinon') +const chai = require('chai') +const { expect } = chai +const modulePath = '../../../app/js/GcsPersistor.js' +const SandboxedModule = require('sandboxed-module') + +const Errors = require('../../../app/js/Errors') + +describe('GcsPersistorTests', function() { + const filename = '/wombat/potato.tex' + const bucket = 'womBucket' + const key = 'monKey' + const destKey = 'donKey' + const objectSize = 5555 + const genericError = new Error('guru meditation error') + const filesSize = 33 + const md5 = 'ffffffff00000000ffffffff00000000' + const WriteStream = 'writeStream' + + let Metrics, + Logger, + Storage, + Fs, + NotFoundError, + Meter, + MeteredStream, + ReadStream, + Stream, + GcsBucket, + GcsFile, + GcsPersistor, + FileNotFoundError, + Hash, + settings, + crypto, + files + + beforeEach(function() { + settings = { + filestore: { + backend: 'gcs', + stores: { + user_files: 'user_files' + } + } + } + + files = [ + { + metadata: { size: 11, md5Hash: '/////wAAAAD/////AAAAAA==' }, + delete: sinon.stub() + }, + { + metadata: { size: 22, md5Hash: '/////wAAAAD/////AAAAAA==' }, + delete: sinon.stub() + } + ] + + ReadStream = { + pipe: sinon.stub().returns('readStream'), + on: sinon + .stub() + .withArgs('end') + .yields(), + removeListener: sinon.stub() + } + + Stream = { + pipeline: sinon.stub().yields() + } + + Metrics = { + count: sinon.stub() + } + + GcsFile = { + delete: sinon.stub().resolves(), + createReadStream: sinon.stub().returns(ReadStream), + getMetadata: sinon.stub().resolves([files[0].metadata]), + createWriteStream: sinon.stub().returns(WriteStream), + copy: sinon.stub().resolves(), + exists: sinon.stub().resolves([true]) + } + + GcsBucket = { + file: sinon.stub().returns(GcsFile), + getFiles: sinon.stub().resolves([files]) + } + + Storage = class { + constructor() { + this.interceptors = [] + } + } + Storage.prototype.bucket = sinon.stub().returns(GcsBucket) + + NotFoundError = new Error('File not found') + NotFoundError.code = 404 + + Fs = { + createReadStream: sinon.stub().returns(ReadStream) + } + + FileNotFoundError = new Error('File not found') + FileNotFoundError.code = 'ENOENT' + + MeteredStream = { + type: 'metered', + on: sinon.stub(), + bytes: objectSize + } + MeteredStream.on.withArgs('finish').yields() + MeteredStream.on.withArgs('readable').yields() + Meter = sinon.stub().returns(MeteredStream) + + Hash = { + end: sinon.stub(), + read: sinon.stub().returns(md5), + setEncoding: sinon.stub() + } + crypto = { + createHash: sinon.stub().returns(Hash) + } + + Logger = { + warn: sinon.stub() + } + + GcsPersistor = SandboxedModule.require(modulePath, { + requires: { + '@google-cloud/storage': { Storage }, + 'settings-sharelatex': settings, + 'logger-sharelatex': Logger, + './Errors': Errors, + fs: Fs, + 'stream-meter': Meter, + stream: Stream, + 'metrics-sharelatex': Metrics, + crypto + }, + globals: { console } + }) + }) + + describe('getFileStream', function() { + describe('when called with valid parameters', function() { + let stream + + beforeEach(async function() { + stream = await GcsPersistor.promises.getFileStream(bucket, key) + }) + + it('returns a metered stream', function() { + expect(stream).to.equal(MeteredStream) + }) + + it('fetches the right key from the right bucket', function() { + expect(Storage.prototype.bucket).to.have.been.calledWith(bucket) + expect(GcsBucket.file).to.have.been.calledWith(key) + expect(GcsFile.createReadStream).to.have.been.called + }) + + it('pipes the stream through the meter', function() { + expect(Stream.pipeline).to.have.been.calledWith( + ReadStream, + MeteredStream + ) + }) + + it('records an ingress metric', function() { + expect(Metrics.count).to.have.been.calledWith('gcs.ingress', objectSize) + }) + }) + + describe('when called with a byte range', function() { + let stream + + beforeEach(async function() { + stream = await GcsPersistor.promises.getFileStream(bucket, key, { + start: 5, + end: 10 + }) + }) + + it('returns a metered stream', function() { + expect(stream).to.equal(MeteredStream) + }) + + it('passes the byte range on to GCS', function() { + expect(GcsFile.createReadStream).to.have.been.calledWith({ + start: 5, + end: 11 // we increment the end because Google's 'end' is exclusive + }) + }) + }) + + describe("when the file doesn't exist", function() { + let error, stream + + beforeEach(async function() { + ReadStream.on = sinon.stub() + ReadStream.on.withArgs('error').yields(NotFoundError) + try { + stream = await GcsPersistor.promises.getFileStream(bucket, key) + } catch (err) { + error = err + } + }) + + it('does not return a stream', function() { + expect(stream).not.to.exist + }) + + it('throws a NotFoundError', function() { + expect(error).to.be.an.instanceOf(Errors.NotFoundError) + }) + + it('wraps the error', function() { + expect(error.cause).to.exist + }) + + it('stores the bucket and key in the error', function() { + expect(error.info).to.include({ bucket: bucket, key: key }) + }) + }) + + describe('when Gcs encounters an unkown error', function() { + let error, stream + + beforeEach(async function() { + ReadStream.on = sinon.stub() + ReadStream.on.withArgs('error').yields(genericError) + try { + stream = await GcsPersistor.promises.getFileStream(bucket, key) + } catch (err) { + error = err + } + }) + + it('does not return a stream', function() { + expect(stream).not.to.exist + }) + + it('throws a ReadError', function() { + expect(error).to.be.an.instanceOf(Errors.ReadError) + }) + + it('wraps the error', function() { + expect(error.cause).to.exist + }) + + it('stores the bucket and key in the error', function() { + expect(error.info).to.include({ bucket: bucket, key: key }) + }) + }) + }) + + describe('getFileSize', function() { + describe('when called with valid parameters', function() { + let size + + beforeEach(async function() { + size = await GcsPersistor.promises.getFileSize(bucket, key) + }) + + it('should return the object size', function() { + expect(size).to.equal(files[0].metadata.size) + }) + + it('should pass the bucket and key to GCS', function() { + expect(Storage.prototype.bucket).to.have.been.calledWith(bucket) + expect(GcsBucket.file).to.have.been.calledWith(key) + expect(GcsFile.getMetadata).to.have.been.called + }) + }) + + describe('when the object is not found', function() { + let error + + beforeEach(async function() { + GcsFile.getMetadata = sinon.stub().rejects(NotFoundError) + try { + await GcsPersistor.promises.getFileSize(bucket, key) + } catch (err) { + error = err + } + }) + + it('should return a NotFoundError', function() { + expect(error).to.be.an.instanceOf(Errors.NotFoundError) + }) + + it('should wrap the error', function() { + expect(error.cause).to.equal(NotFoundError) + }) + }) + + describe('when GCS returns an error', function() { + let error + + beforeEach(async function() { + GcsFile.getMetadata = sinon.stub().rejects(genericError) + try { + await GcsPersistor.promises.getFileSize(bucket, key) + } catch (err) { + error = err + } + }) + + it('should return a ReadError', function() { + expect(error).to.be.an.instanceOf(Errors.ReadError) + }) + + it('should wrap the error', function() { + expect(error.cause).to.equal(genericError) + }) + }) + }) + + describe('sendStream', function() { + describe('with valid parameters', function() { + beforeEach(async function() { + return GcsPersistor.promises.sendStream(bucket, key, ReadStream) + }) + + it('should upload the stream', function() { + expect(Storage.prototype.bucket).to.have.been.calledWith(bucket) + expect(GcsBucket.file).to.have.been.calledWith(key) + expect(GcsFile.createWriteStream).to.have.been.called + }) + + it('should not try to create a resumable upload', function() { + expect(GcsFile.createWriteStream).to.have.been.calledWith({ + resumable: false + }) + }) + + it('should meter the stream', function() { + expect(Stream.pipeline).to.have.been.calledWith( + ReadStream, + MeteredStream + ) + }) + + it('should pipe the metered stream to GCS', function() { + expect(Stream.pipeline).to.have.been.calledWith( + MeteredStream, + WriteStream + ) + }) + + it('should record an egress metric', function() { + expect(Metrics.count).to.have.been.calledWith('gcs.egress', objectSize) + }) + + it('calculates the md5 hash of the file', function() { + expect(Stream.pipeline).to.have.been.calledWith(ReadStream, Hash) + }) + }) + + describe('when a hash is supplied', function() { + beforeEach(async function() { + return GcsPersistor.promises.sendStream( + bucket, + key, + ReadStream, + 'aaaaaaaabbbbbbbbaaaaaaaabbbbbbbb' + ) + }) + + it('should not calculate the md5 hash of the file', function() { + expect(Stream.pipeline).not.to.have.been.calledWith( + sinon.match.any, + Hash + ) + }) + + it('sends the hash in base64', function() { + expect(GcsFile.createWriteStream).to.have.been.calledWith({ + validation: 'md5', + metadata: { + md5Hash: 'qqqqqru7u7uqqqqqu7u7uw==' + }, + resumable: false + }) + }) + + it('does not fetch the md5 hash of the uploaded file', function() { + expect(GcsFile.getMetadata).not.to.have.been.called + }) + }) + + describe('when the upload fails', function() { + let error + beforeEach(async function() { + Stream.pipeline + .withArgs(MeteredStream, WriteStream, sinon.match.any) + .yields(genericError) + try { + await GcsPersistor.promises.sendStream(bucket, key, ReadStream) + } catch (err) { + error = err + } + }) + + it('throws a WriteError', function() { + expect(error).to.be.an.instanceOf(Errors.WriteError) + }) + + it('wraps the error', function() { + expect(error.cause).to.equal(genericError) + }) + }) + }) + + describe('sendFile', function() { + describe('with valid parameters', function() { + beforeEach(async function() { + return GcsPersistor.promises.sendFile(bucket, key, filename) + }) + + it('should create a read stream for the file', function() { + expect(Fs.createReadStream).to.have.been.calledWith(filename) + }) + + it('should create a write stream', function() { + expect(Storage.prototype.bucket).to.have.been.calledWith(bucket) + expect(GcsBucket.file).to.have.been.calledWith(key) + expect(GcsFile.createWriteStream).to.have.been.called + }) + + it('should upload the stream via the meter', function() { + expect(Stream.pipeline).to.have.been.calledWith( + ReadStream, + MeteredStream + ) + expect(Stream.pipeline).to.have.been.calledWith( + MeteredStream, + WriteStream + ) + }) + }) + + describe('when the file does not exist', function() { + let error + + beforeEach(async function() { + Fs.createReadStream = sinon.stub().throws(FileNotFoundError) + try { + await GcsPersistor.promises.sendFile(bucket, key, filename) + } catch (err) { + error = err + } + }) + + it('returns a NotFoundError', function() { + expect(error).to.be.an.instanceOf(Errors.NotFoundError) + }) + + it('wraps the error', function() { + expect(error.cause).to.equal(FileNotFoundError) + }) + }) + + describe('when reading the file throws an error', function() { + let error + + beforeEach(async function() { + Fs.createReadStream = sinon.stub().throws(genericError) + try { + await GcsPersistor.promises.sendFile(bucket, key, filename) + } catch (err) { + error = err + } + }) + + it('returns a ReadError', function() { + expect(error).to.be.an.instanceOf(Errors.ReadError) + }) + + it('wraps the error', function() { + expect(error.cause).to.equal(genericError) + }) + }) + }) + + describe('copyFile', function() { + const DestinationFile = 'destFile' + + beforeEach(function() { + GcsBucket.file.withArgs(destKey).returns(DestinationFile) + }) + + describe('with valid parameters', function() { + beforeEach(async function() { + return GcsPersistor.promises.copyFile(bucket, key, destKey) + }) + + it('should copy the object', function() { + expect(Storage.prototype.bucket).to.have.been.calledWith(bucket) + expect(GcsBucket.file).to.have.been.calledWith(key) + expect(GcsFile.copy).to.have.been.calledWith(DestinationFile) + }) + }) + + describe('when the file does not exist', function() { + let error + + beforeEach(async function() { + GcsFile.copy = sinon.stub().rejects(NotFoundError) + try { + await GcsPersistor.promises.copyFile(bucket, key, destKey) + } catch (err) { + error = err + } + }) + + it('should throw a NotFoundError', function() { + expect(error).to.be.an.instanceOf(Errors.NotFoundError) + }) + }) + }) + + describe('deleteFile', function() { + describe('with valid parameters', function() { + beforeEach(async function() { + return GcsPersistor.promises.deleteFile(bucket, key) + }) + + it('should delete the object', function() { + expect(Storage.prototype.bucket).to.have.been.calledWith(bucket) + expect(GcsBucket.file).to.have.been.calledWith(key) + expect(GcsFile.delete).to.have.been.called + }) + }) + + describe('when the file does not exist', function() { + let error + + beforeEach(async function() { + GcsFile.delete = sinon.stub().rejects(NotFoundError) + try { + await GcsPersistor.promises.deleteFile(bucket, key) + } catch (err) { + error = err + } + }) + + it('should not throw an error', function() { + expect(error).not.to.exist + }) + }) + }) + + describe('deleteDirectory', function() { + describe('with valid parameters', function() { + beforeEach(async function() { + return GcsPersistor.promises.deleteDirectory(bucket, key) + }) + + it('should list the objects in the directory', function() { + expect(Storage.prototype.bucket).to.have.been.calledWith(bucket) + expect(GcsBucket.getFiles).to.have.been.calledWith({ + directory: key + }) + }) + + it('should delete the objects', function() { + expect(files[0].delete).to.have.been.called + expect(files[1].delete).to.have.been.called + }) + }) + + describe('when there are no files', function() { + beforeEach(async function() { + GcsBucket.getFiles.resolves([[]]) + return GcsPersistor.promises.deleteDirectory(bucket, key) + }) + + it('should list the objects in the directory', function() { + expect(GcsBucket.getFiles).to.have.been.calledWith({ + directory: key + }) + }) + + it('should not try to delete any objects', function() { + expect(files[0].delete).not.to.have.been.called + expect(files[1].delete).not.to.have.been.called + }) + }) + + describe('when there is an error listing the objects', function() { + let error + + beforeEach(async function() { + GcsBucket.getFiles = sinon.stub().rejects(genericError) + try { + await GcsPersistor.promises.deleteDirectory(bucket, key) + } catch (err) { + error = err + } + }) + + it('should generate a ReadError', function() { + expect(error).to.be.an.instanceOf(Errors.ReadError) + }) + + it('should wrap the error', function() { + expect(error.cause).to.equal(genericError) + }) + + it('should not try to delete any objects', function() { + expect(files[0].delete).not.to.have.been.called + }) + }) + + describe('when there is an error deleting the objects', function() { + let error + + beforeEach(async function() { + files[0].delete = sinon.stub().rejects(genericError) + try { + await GcsPersistor.promises.deleteDirectory(bucket, key) + } catch (err) { + error = err + } + }) + + it('should generate a WriteError', function() { + expect(error).to.be.an.instanceOf(Errors.WriteError) + }) + + it('should wrap the error', function() { + expect(error.cause).to.equal(genericError) + }) + }) + }) + + describe('directorySize', function() { + describe('with valid parameters', function() { + let size + + beforeEach(async function() { + size = await GcsPersistor.promises.directorySize(bucket, key) + }) + + it('should list the objects in the directory', function() { + expect(Storage.prototype.bucket).to.have.been.calledWith(bucket) + expect(GcsBucket.getFiles).to.have.been.calledWith({ directory: key }) + }) + + it('should return the directory size', function() { + expect(size).to.equal(filesSize) + }) + }) + + describe('when there are no files', function() { + let size + + beforeEach(async function() { + GcsBucket.getFiles.resolves([[]]) + size = await GcsPersistor.promises.directorySize(bucket, key) + }) + + it('should list the objects in the directory', function() { + expect(Storage.prototype.bucket).to.have.been.calledWith(bucket) + expect(GcsBucket.getFiles).to.have.been.calledWith({ directory: key }) + }) + + it('should return zero', function() { + expect(size).to.equal(0) + }) + }) + + describe('when there is an error listing the objects', function() { + let error + + beforeEach(async function() { + GcsBucket.getFiles.rejects(genericError) + try { + await GcsPersistor.promises.directorySize(bucket, key) + } catch (err) { + error = err + } + }) + + it('should generate a ReadError', function() { + expect(error).to.be.an.instanceOf(Errors.ReadError) + }) + + it('should wrap the error', function() { + expect(error.cause).to.equal(genericError) + }) + }) + }) + + describe('checkIfFileExists', function() { + describe('when the file exists', function() { + let exists + + beforeEach(async function() { + exists = await GcsPersistor.promises.checkIfFileExists(bucket, key) + }) + + it('should ask the file if it exists', function() { + expect(Storage.prototype.bucket).to.have.been.calledWith(bucket) + expect(GcsBucket.file).to.have.been.calledWith(key) + expect(GcsFile.exists).to.have.been.called + }) + + it('should return that the file exists', function() { + expect(exists).to.equal(true) + }) + }) + + describe('when the file does not exist', function() { + let exists + + beforeEach(async function() { + GcsFile.exists = sinon.stub().resolves([false]) + exists = await GcsPersistor.promises.checkIfFileExists(bucket, key) + }) + + it('should get the object header', function() { + expect(Storage.prototype.bucket).to.have.been.calledWith(bucket) + expect(GcsBucket.file).to.have.been.calledWith(key) + expect(GcsFile.exists).to.have.been.called + }) + + it('should return that the file does not exist', function() { + expect(exists).to.equal(false) + }) + }) + + describe('when there is an error', function() { + let error + + beforeEach(async function() { + GcsFile.exists = sinon.stub().rejects(genericError) + try { + await GcsPersistor.promises.checkIfFileExists(bucket, key) + } catch (err) { + error = err + } + }) + + it('should generate a ReadError', function() { + expect(error).to.be.an.instanceOf(Errors.ReadError) + }) + + it('should wrap the error', function() { + expect(error.cause).to.equal(genericError) + }) + }) + }) +}) From 6979b8638aa899f055d34bb0d0c00a602fe4a386 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Wed, 4 Mar 2020 15:42:16 +0000 Subject: [PATCH 461/555] Add 'Buffer' global for GCS unit tests --- services/filestore/test/unit/js/GcsPersistorTests.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/test/unit/js/GcsPersistorTests.js b/services/filestore/test/unit/js/GcsPersistorTests.js index 6264fad0ff..1a22971ddd 100644 --- a/services/filestore/test/unit/js/GcsPersistorTests.js +++ b/services/filestore/test/unit/js/GcsPersistorTests.js @@ -138,7 +138,7 @@ describe('GcsPersistorTests', function() { 'metrics-sharelatex': Metrics, crypto }, - globals: { console } + globals: { console, Buffer } }) }) From 76243fd75aff9e7409939299fc067b2ae33db134 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Wed, 4 Mar 2020 16:04:26 +0000 Subject: [PATCH 462/555] 'bucket' -> 'bucketName' in GCS Persistor --- services/filestore/app/js/GcsPersistor.js | 70 ++++++++++--------- .../test/unit/js/GcsPersistorTests.js | 4 +- 2 files changed, 39 insertions(+), 35 deletions(-) diff --git a/services/filestore/app/js/GcsPersistor.js b/services/filestore/app/js/GcsPersistor.js index 690df70252..dd2c44d5ba 100644 --- a/services/filestore/app/js/GcsPersistor.js +++ b/services/filestore/app/js/GcsPersistor.js @@ -53,7 +53,7 @@ const GcsPersistor = { module.exports = GcsPersistor -async function sendFile(bucket, key, fsPath) { +async function sendFile(bucketName, key, fsPath) { let readStream try { readStream = fs.createReadStream(fsPath) @@ -61,14 +61,14 @@ async function sendFile(bucket, key, fsPath) { throw PersistorHelper.wrapError( err, 'error reading file from disk', - { bucketName: bucket, key, fsPath }, + { bucketName, key, fsPath }, ReadError ) } - return sendStream(bucket, key, readStream) + return sendStream(bucketName, key, readStream) } -async function sendStream(bucket, key, readStream, sourceMd5) { +async function sendStream(bucketName, key, readStream, sourceMd5) { try { let hashPromise @@ -96,7 +96,7 @@ async function sendStream(bucket, key, readStream, sourceMd5) { } const uploadStream = storage - .bucket(bucket) + .bucket(bucketName) .file(key) .createWriteStream(writeOptions) @@ -107,25 +107,25 @@ async function sendStream(bucket, key, readStream, sourceMd5) { if (hashPromise) { sourceMd5 = await hashPromise // throws on mismatch - await PersistorHelper.verifyMd5(GcsPersistor, bucket, key, sourceMd5) + await PersistorHelper.verifyMd5(GcsPersistor, bucketName, key, sourceMd5) } } catch (err) { throw PersistorHelper.wrapError( err, 'upload to GCS failed', - { bucket, key }, + { bucketName, key }, WriteError ) } } -async function getFileStream(bucket, key, opts = {}) { +async function getFileStream(bucketName, key, opts = {}) { if (opts.end) { // S3 (and http range headers) treat 'end' as inclusive, so increase this by 1 opts.end++ } const stream = storage - .bucket(bucket) + .bucket(bucketName) .file(key) .createReadStream(opts) @@ -141,16 +141,16 @@ async function getFileStream(bucket, key, opts = {}) { throw PersistorHelper.wrapError( err, 'error reading file from GCS', - { bucket, key, opts }, + { bucketName, key, opts }, ReadError ) } } -async function getFileSize(bucket, key) { +async function getFileSize(bucketName, key) { try { const metadata = await storage - .bucket(bucket) + .bucket(bucketName) .file(key) .getMetadata() return metadata[0].size @@ -158,16 +158,16 @@ async function getFileSize(bucket, key) { throw PersistorHelper.wrapError( err, 'error getting size of GCS object', - { bucket, key }, + { bucketName, key }, ReadError ) } } -async function getFileMd5Hash(bucket, key) { +async function getFileMd5Hash(bucketName, key) { try { const metadata = await storage - .bucket(bucket) + .bucket(bucketName) .file(key) .getMetadata() return PersistorHelper.base64ToHex(metadata[0].md5Hash) @@ -175,23 +175,23 @@ async function getFileMd5Hash(bucket, key) { throw PersistorHelper.wrapError( err, 'error getting hash of GCS object', - { bucket, key }, + { bucketName, key }, ReadError ) } } -async function deleteFile(bucket, key) { +async function deleteFile(bucketName, key) { try { await storage - .bucket(bucket) + .bucket(bucketName) .file(key) .delete() } catch (err) { const error = PersistorHelper.wrapError( err, 'error deleting GCS object', - { bucket, key }, + { bucketName, key }, WriteError ) if (!(error instanceof NotFoundError)) { @@ -200,17 +200,19 @@ async function deleteFile(bucket, key) { } } -async function deleteDirectory(bucket, key) { +async function deleteDirectory(bucketName, key) { let files try { - const response = await storage.bucket(bucket).getFiles({ directory: key }) + const response = await storage + .bucket(bucketName) + .getFiles({ directory: key }) files = response[0] } catch (err) { const error = PersistorHelper.wrapError( err, 'failed to list objects in GCS', - { bucket, key }, + { bucketName, key }, ReadError ) if (error instanceof NotFoundError) { @@ -226,7 +228,7 @@ async function deleteDirectory(bucket, key) { const error = PersistorHelper.wrapError( err, 'failed to delete object in GCS', - { bucket, key }, + { bucketName, key }, WriteError ) if (!(error instanceof NotFoundError)) { @@ -236,17 +238,19 @@ async function deleteDirectory(bucket, key) { } } -async function directorySize(bucket, key) { +async function directorySize(bucketName, key) { let files try { - const response = await storage.bucket(bucket).getFiles({ directory: key }) + const response = await storage + .bucket(bucketName) + .getFiles({ directory: key }) files = response[0] } catch (err) { throw PersistorHelper.wrapError( err, 'failed to list objects in GCS', - { bucket, key }, + { bucketName, key }, ReadError ) } @@ -254,10 +258,10 @@ async function directorySize(bucket, key) { return files.reduce((acc, file) => Number(file.metadata.size) + acc, 0) } -async function checkIfFileExists(bucket, key) { +async function checkIfFileExists(bucketName, key) { try { const response = await storage - .bucket(bucket) + .bucket(bucketName) .file(key) .exists() return response[0] @@ -265,16 +269,16 @@ async function checkIfFileExists(bucket, key) { throw PersistorHelper.wrapError( err, 'error checking if file exists in GCS', - { bucket, key }, + { bucketName, key }, ReadError ) } } -async function copyFile(bucket, sourceKey, destKey) { +async function copyFile(bucketName, sourceKey, destKey) { try { - const src = storage.bucket(bucket).file(sourceKey) - const dest = storage.bucket(bucket).file(destKey) + const src = storage.bucket(bucketName).file(sourceKey) + const dest = storage.bucket(bucketName).file(destKey) await src.copy(dest) } catch (err) { // fake-gcs-server has a bug that returns an invalid response when the file does not exist @@ -284,7 +288,7 @@ async function copyFile(bucket, sourceKey, destKey) { throw PersistorHelper.wrapError( err, 'failed to copy file in GCS', - { bucket, sourceKey, destKey }, + { bucketName, sourceKey, destKey }, WriteError ) } diff --git a/services/filestore/test/unit/js/GcsPersistorTests.js b/services/filestore/test/unit/js/GcsPersistorTests.js index 1a22971ddd..97b049e833 100644 --- a/services/filestore/test/unit/js/GcsPersistorTests.js +++ b/services/filestore/test/unit/js/GcsPersistorTests.js @@ -220,7 +220,7 @@ describe('GcsPersistorTests', function() { }) it('stores the bucket and key in the error', function() { - expect(error.info).to.include({ bucket: bucket, key: key }) + expect(error.info).to.include({ bucketName: bucket, key: key }) }) }) @@ -250,7 +250,7 @@ describe('GcsPersistorTests', function() { }) it('stores the bucket and key in the error', function() { - expect(error.info).to.include({ bucket: bucket, key: key }) + expect(error.info).to.include({ bucketName: bucket, key: key }) }) }) }) From def383574ea03f8e2899d0825071c25d63b4a7ec Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Wed, 4 Mar 2020 16:17:36 +0000 Subject: [PATCH 463/555] Simplify getMeteredStream to record metric directly --- services/filestore/app/js/GcsPersistor.js | 10 ++-------- services/filestore/app/js/PersistorHelper.js | 9 +++++---- services/filestore/app/js/S3Persistor.js | 14 ++------------ .../filestore/test/unit/js/FSPersistorTests.js | 3 ++- 4 files changed, 11 insertions(+), 25 deletions(-) diff --git a/services/filestore/app/js/GcsPersistor.js b/services/filestore/app/js/GcsPersistor.js index dd2c44d5ba..9ddf608e90 100644 --- a/services/filestore/app/js/GcsPersistor.js +++ b/services/filestore/app/js/GcsPersistor.js @@ -1,5 +1,4 @@ const settings = require('settings-sharelatex') -const metrics = require('metrics-sharelatex') const fs = require('fs') const { promisify } = require('util') const Stream = require('stream') @@ -79,9 +78,7 @@ async function sendStream(bucketName, key, readStream, sourceMd5) { const meteredStream = PersistorHelper.getMeteredStream( readStream, - (_, byteCount) => { - metrics.count('gcs.egress', byteCount) - } + 'gcs.egress' ) const writeOptions = { @@ -129,10 +126,7 @@ async function getFileStream(bucketName, key, opts = {}) { .file(key) .createReadStream(opts) - const meteredStream = PersistorHelper.getMeteredStream(stream, (_, bytes) => { - // ignore the error parameter and just log the byte count - metrics.count('gcs.ingress', bytes) - }) + const meteredStream = PersistorHelper.getMeteredStream(stream, 'gcs.ingress') try { await PersistorHelper.waitForStreamReady(stream) diff --git a/services/filestore/app/js/PersistorHelper.js b/services/filestore/app/js/PersistorHelper.js index 409f3182f1..a19311e889 100644 --- a/services/filestore/app/js/PersistorHelper.js +++ b/services/filestore/app/js/PersistorHelper.js @@ -1,4 +1,5 @@ const crypto = require('crypto') +const metrics = require('metrics-sharelatex') const meter = require('stream-meter') const Stream = require('stream') const logger = require('logger-sharelatex') @@ -54,16 +55,16 @@ async function verifyMd5(persistor, bucket, key, sourceMd5, destMd5 = null) { // returns the next stream in the pipeline, and calls the callback with the byte count // when the stream finishes or receives an error -function getMeteredStream(stream, callback) { +function getMeteredStream(stream, metricName) { const meteredStream = meter() pipeline(stream, meteredStream) .then(() => { - callback(null, meteredStream.bytes) + metrics.count(metricName, meteredStream.bytes) }) - .catch(err => { + .catch(() => { // on error, just send how many bytes we received before the stream stopped - callback(err, meteredStream.bytes) + metrics.count(metricName, meteredStream.bytes) }) return meteredStream diff --git a/services/filestore/app/js/S3Persistor.js b/services/filestore/app/js/S3Persistor.js index 7b69ad5d26..e50a9e3030 100644 --- a/services/filestore/app/js/S3Persistor.js +++ b/services/filestore/app/js/S3Persistor.js @@ -4,7 +4,6 @@ http.globalAgent.maxSockets = 300 https.globalAgent.maxSockets = 300 const settings = require('settings-sharelatex') -const metrics = require('metrics-sharelatex') const PersistorHelper = require('./PersistorHelper') @@ -75,10 +74,7 @@ async function sendStream(bucketName, key, readStream, sourceMd5) { const meteredStream = PersistorHelper.getMeteredStream( readStream, - (_, byteCount) => { - // ignore the error parameter and just log the byte count - metrics.count('s3.egress', byteCount) - } + 's3.egress' ) // if we have an md5 hash, pass this to S3 to verify the upload @@ -143,13 +139,7 @@ async function getFileStream(bucketName, key, opts) { .getObject(params) .createReadStream() - const meteredStream = PersistorHelper.getMeteredStream( - stream, - (_, byteCount) => { - // ignore the error parameter and just log the byte count - metrics.count('s3.ingress', byteCount) - } - ) + const meteredStream = PersistorHelper.getMeteredStream(stream, 's3.ingress') try { await PersistorHelper.waitForStreamReady(stream) diff --git a/services/filestore/test/unit/js/FSPersistorTests.js b/services/filestore/test/unit/js/FSPersistorTests.js index 0a09869bc0..4dd5a2fa11 100644 --- a/services/filestore/test/unit/js/FSPersistorTests.js +++ b/services/filestore/test/unit/js/FSPersistorTests.js @@ -73,7 +73,8 @@ describe('FSPersistorTests', function() { crypto, // imported by PersistorHelper but otherwise unused here 'stream-meter': {}, - 'logger-sharelatex': {} + 'logger-sharelatex': {}, + 'metrics-sharelatex': {} }, globals: { console } }) From a7198764cbeb630513a67673b3f15cfc92f9a38b Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Wed, 4 Mar 2020 16:25:11 +0000 Subject: [PATCH 464/555] Improve/add some comments for clarity --- services/filestore/app/js/GcsPersistor.js | 10 +++++++--- services/filestore/app/js/S3Persistor.js | 7 +++++-- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/services/filestore/app/js/GcsPersistor.js b/services/filestore/app/js/GcsPersistor.js index 9ddf608e90..61b3e814b0 100644 --- a/services/filestore/app/js/GcsPersistor.js +++ b/services/filestore/app/js/GcsPersistor.js @@ -78,11 +78,12 @@ async function sendStream(bucketName, key, readStream, sourceMd5) { const meteredStream = PersistorHelper.getMeteredStream( readStream, - 'gcs.egress' + 'gcs.egress' // egress from us to gcs ) const writeOptions = { - resumable: false // recommended by Google + // disabling of resumable uploads is recommended by Google: + resumable: false } if (sourceMd5) { @@ -126,7 +127,10 @@ async function getFileStream(bucketName, key, opts = {}) { .file(key) .createReadStream(opts) - const meteredStream = PersistorHelper.getMeteredStream(stream, 'gcs.ingress') + const meteredStream = PersistorHelper.getMeteredStream( + stream, + 'gcs.ingress' // ingress to us from gcs + ) try { await PersistorHelper.waitForStreamReady(stream) diff --git a/services/filestore/app/js/S3Persistor.js b/services/filestore/app/js/S3Persistor.js index e50a9e3030..4403386716 100644 --- a/services/filestore/app/js/S3Persistor.js +++ b/services/filestore/app/js/S3Persistor.js @@ -74,7 +74,7 @@ async function sendStream(bucketName, key, readStream, sourceMd5) { const meteredStream = PersistorHelper.getMeteredStream( readStream, - 's3.egress' + 's3.egress' // egress from us to s3 ) // if we have an md5 hash, pass this to S3 to verify the upload @@ -139,7 +139,10 @@ async function getFileStream(bucketName, key, opts) { .getObject(params) .createReadStream() - const meteredStream = PersistorHelper.getMeteredStream(stream, 's3.ingress') + const meteredStream = PersistorHelper.getMeteredStream( + stream, + 's3.ingress' // ingress to us from s3 + ) try { await PersistorHelper.waitForStreamReady(stream) From 30114cd79b237ddfdb2d6a986a7a60c57dc8798c Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Wed, 4 Mar 2020 16:38:05 +0000 Subject: [PATCH 465/555] Remove unnecessary try/catch around 'createReadStream' --- services/filestore/app/js/GcsPersistor.js | 13 +----- services/filestore/app/js/S3Persistor.js | 13 +----- .../test/unit/js/GcsPersistorTests.js | 42 ------------------- .../test/unit/js/S3PersistorTests.js | 42 ------------------- 4 files changed, 2 insertions(+), 108 deletions(-) diff --git a/services/filestore/app/js/GcsPersistor.js b/services/filestore/app/js/GcsPersistor.js index 61b3e814b0..50cac9bda8 100644 --- a/services/filestore/app/js/GcsPersistor.js +++ b/services/filestore/app/js/GcsPersistor.js @@ -53,18 +53,7 @@ const GcsPersistor = { module.exports = GcsPersistor async function sendFile(bucketName, key, fsPath) { - let readStream - try { - readStream = fs.createReadStream(fsPath) - } catch (err) { - throw PersistorHelper.wrapError( - err, - 'error reading file from disk', - { bucketName, key, fsPath }, - ReadError - ) - } - return sendStream(bucketName, key, readStream) + return sendStream(bucketName, key, fs.createReadStream(fsPath)) } async function sendStream(bucketName, key, readStream, sourceMd5) { diff --git a/services/filestore/app/js/S3Persistor.js b/services/filestore/app/js/S3Persistor.js index 4403386716..1b92a61ae6 100644 --- a/services/filestore/app/js/S3Persistor.js +++ b/services/filestore/app/js/S3Persistor.js @@ -46,18 +46,7 @@ const S3Persistor = { module.exports = S3Persistor async function sendFile(bucketName, key, fsPath) { - let readStream - try { - readStream = fs.createReadStream(fsPath) - } catch (err) { - throw PersistorHelper.wrapError( - err, - 'error reading file from disk', - { bucketName, key, fsPath }, - ReadError - ) - } - return sendStream(bucketName, key, readStream) + return sendStream(bucketName, key, fs.createReadStream(fsPath)) } async function sendStream(bucketName, key, readStream, sourceMd5) { diff --git a/services/filestore/test/unit/js/GcsPersistorTests.js b/services/filestore/test/unit/js/GcsPersistorTests.js index 97b049e833..bdd7e1d562 100644 --- a/services/filestore/test/unit/js/GcsPersistorTests.js +++ b/services/filestore/test/unit/js/GcsPersistorTests.js @@ -440,48 +440,6 @@ describe('GcsPersistorTests', function() { ) }) }) - - describe('when the file does not exist', function() { - let error - - beforeEach(async function() { - Fs.createReadStream = sinon.stub().throws(FileNotFoundError) - try { - await GcsPersistor.promises.sendFile(bucket, key, filename) - } catch (err) { - error = err - } - }) - - it('returns a NotFoundError', function() { - expect(error).to.be.an.instanceOf(Errors.NotFoundError) - }) - - it('wraps the error', function() { - expect(error.cause).to.equal(FileNotFoundError) - }) - }) - - describe('when reading the file throws an error', function() { - let error - - beforeEach(async function() { - Fs.createReadStream = sinon.stub().throws(genericError) - try { - await GcsPersistor.promises.sendFile(bucket, key, filename) - } catch (err) { - error = err - } - }) - - it('returns a ReadError', function() { - expect(error).to.be.an.instanceOf(Errors.ReadError) - }) - - it('wraps the error', function() { - expect(error.cause).to.equal(genericError) - }) - }) }) describe('copyFile', function() { diff --git a/services/filestore/test/unit/js/S3PersistorTests.js b/services/filestore/test/unit/js/S3PersistorTests.js index aa9444428e..484a0209a8 100644 --- a/services/filestore/test/unit/js/S3PersistorTests.js +++ b/services/filestore/test/unit/js/S3PersistorTests.js @@ -583,48 +583,6 @@ describe('S3PersistorTests', function() { }) }) }) - - describe('when the file does not exist', function() { - let error - - beforeEach(async function() { - Fs.createReadStream = sinon.stub().throws(FileNotFoundError) - try { - await S3Persistor.promises.sendFile(bucket, key, filename) - } catch (err) { - error = err - } - }) - - it('returns a NotFoundError', function() { - expect(error).to.be.an.instanceOf(Errors.NotFoundError) - }) - - it('wraps the error', function() { - expect(error.cause).to.equal(FileNotFoundError) - }) - }) - - describe('when reading the file throws an error', function() { - let error - - beforeEach(async function() { - Fs.createReadStream = sinon.stub().throws(genericError) - try { - await S3Persistor.promises.sendFile(bucket, key, filename) - } catch (err) { - error = err - } - }) - - it('returns a ReadError', function() { - expect(error).to.be.an.instanceOf(Errors.ReadError) - }) - - it('wraps the error', function() { - expect(error.cause).to.equal(genericError) - }) - }) }) describe('copyFile', function() { From 3bb956b38e74bfd39846905839aacb0061e9c4af Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Wed, 4 Mar 2020 17:01:20 +0000 Subject: [PATCH 466/555] Use http for the fake GCS server --- services/filestore/app/js/GcsPersistor.js | 3 +++ services/filestore/docker-compose.ci.yml | 4 ++-- services/filestore/docker-compose.yml | 4 ++-- services/filestore/test/acceptance/deps/Dockerfile.fake-gcs | 2 +- services/filestore/test/acceptance/js/TestConfig.js | 1 + 5 files changed, 9 insertions(+), 5 deletions(-) diff --git a/services/filestore/app/js/GcsPersistor.js b/services/filestore/app/js/GcsPersistor.js index 50cac9bda8..d81fc40b03 100644 --- a/services/filestore/app/js/GcsPersistor.js +++ b/services/filestore/app/js/GcsPersistor.js @@ -19,6 +19,9 @@ if (settings.filestore.gcs && settings.filestore.gcs.apiEndpoint) { request: function(reqOpts) { const url = new URL(reqOpts.uri) url.host = settings.filestore.gcs.apiEndpoint + if (settings.filestore.gcs.apiScheme) { + url.protocol = settings.filestore.gcs.apiScheme + } reqOpts.uri = url.toString() return reqOpts } diff --git a/services/filestore/docker-compose.ci.yml b/services/filestore/docker-compose.ci.yml index fe4eaa35fd..d3accee799 100644 --- a/services/filestore/docker-compose.ci.yml +++ b/services/filestore/docker-compose.ci.yml @@ -34,11 +34,11 @@ services: AWS_SECRET_ACCESS_KEY: fake AWS_S3_PATH_STYLE: 'true' GCS_API_ENDPOINT: gcs:9090 + GCS_API_SCHEME: http GCS_USER_FILES_BUCKET_NAME: fake_userfiles GCS_TEMPLATE_FILES_BUCKET_NAME: fake_templatefiles GCS_PUBLIC_FILES_BUCKET_NAME: fake_publicfiles - NODE_TLS_REJECT_UNAUTHORIZED: 0 - STORAGE_EMULATOR_HOST: https://gcs:9090/storage/v1 + STORAGE_EMULATOR_HOST: http://gcs:9090/storage/v1 depends_on: s3: condition: service_healthy diff --git a/services/filestore/docker-compose.yml b/services/filestore/docker-compose.yml index d904574f84..54ef9c00c9 100644 --- a/services/filestore/docker-compose.yml +++ b/services/filestore/docker-compose.yml @@ -44,11 +44,11 @@ services: AWS_ACCESS_KEY_ID: fake AWS_SECRET_ACCESS_KEY: fake GCS_API_ENDPOINT: gcs:9090 + GCS_API_SCHEME: http GCS_USER_FILES_BUCKET_NAME: fake_userfiles GCS_TEMPLATE_FILES_BUCKET_NAME: fake_templatefiles GCS_PUBLIC_FILES_BUCKET_NAME: fake_publicfiles - NODE_TLS_REJECT_UNAUTHORIZED: 0 - STORAGE_EMULATOR_HOST: https://gcs:9090/storage/v1 + STORAGE_EMULATOR_HOST: http://gcs:9090/storage/v1 user: node depends_on: s3: diff --git a/services/filestore/test/acceptance/deps/Dockerfile.fake-gcs b/services/filestore/test/acceptance/deps/Dockerfile.fake-gcs index f8313cbce0..6acb2d63b4 100644 --- a/services/filestore/test/acceptance/deps/Dockerfile.fake-gcs +++ b/services/filestore/test/acceptance/deps/Dockerfile.fake-gcs @@ -2,4 +2,4 @@ FROM fsouza/fake-gcs-server RUN apk add --update --no-cache curl COPY healthcheck.sh /healthcheck.sh HEALTHCHECK --interval=1s --timeout=1s --retries=30 CMD /healthcheck.sh http://localhost:9090 -CMD ["--port=9090"] +CMD ["--port=9090", "--scheme=http"] diff --git a/services/filestore/test/acceptance/js/TestConfig.js b/services/filestore/test/acceptance/js/TestConfig.js index e673ace71c..fd7d0f034c 100644 --- a/services/filestore/test/acceptance/js/TestConfig.js +++ b/services/filestore/test/acceptance/js/TestConfig.js @@ -22,6 +22,7 @@ function s3Stores() { function gcsConfig() { return { apiEndpoint: process.env.GCS_API_ENDPOINT, + apiScheme: process.env.GCS_API_SCHEME, projectId: 'fake' } } From 460dd96b179c9d6e8ca955f10d877dd9830275e8 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 5 Mar 2020 13:45:46 +0000 Subject: [PATCH 467/555] Cosmetic clean-up of GCS Persistor & tests --- services/filestore/app/js/GcsPersistor.js | 20 ++++++++--------- .../test/unit/js/GcsPersistorTests.js | 22 +++++++++---------- 2 files changed, 21 insertions(+), 21 deletions(-) diff --git a/services/filestore/app/js/GcsPersistor.js b/services/filestore/app/js/GcsPersistor.js index d81fc40b03..f4702e06a4 100644 --- a/services/filestore/app/js/GcsPersistor.js +++ b/services/filestore/app/js/GcsPersistor.js @@ -139,11 +139,11 @@ async function getFileStream(bucketName, key, opts = {}) { async function getFileSize(bucketName, key) { try { - const metadata = await storage + const [metadata] = await storage .bucket(bucketName) .file(key) .getMetadata() - return metadata[0].size + return metadata.size } catch (err) { throw PersistorHelper.wrapError( err, @@ -156,11 +156,11 @@ async function getFileSize(bucketName, key) { async function getFileMd5Hash(bucketName, key) { try { - const metadata = await storage + const [metadata] = await storage .bucket(bucketName) .file(key) .getMetadata() - return PersistorHelper.base64ToHex(metadata[0].md5Hash) + return PersistorHelper.base64ToHex(metadata.md5Hash) } catch (err) { throw PersistorHelper.wrapError( err, @@ -194,10 +194,10 @@ async function deleteDirectory(bucketName, key) { let files try { - const response = await storage + const [response] = await storage .bucket(bucketName) .getFiles({ directory: key }) - files = response[0] + files = response } catch (err) { const error = PersistorHelper.wrapError( err, @@ -232,10 +232,10 @@ async function directorySize(bucketName, key) { let files try { - const response = await storage + const [response] = await storage .bucket(bucketName) .getFiles({ directory: key }) - files = response[0] + files = response } catch (err) { throw PersistorHelper.wrapError( err, @@ -250,11 +250,11 @@ async function directorySize(bucketName, key) { async function checkIfFileExists(bucketName, key) { try { - const response = await storage + const [response] = await storage .bucket(bucketName) .file(key) .exists() - return response[0] + return response } catch (err) { throw PersistorHelper.wrapError( err, diff --git a/services/filestore/test/unit/js/GcsPersistorTests.js b/services/filestore/test/unit/js/GcsPersistorTests.js index bdd7e1d562..ec071192e2 100644 --- a/services/filestore/test/unit/js/GcsPersistorTests.js +++ b/services/filestore/test/unit/js/GcsPersistorTests.js @@ -21,7 +21,7 @@ describe('GcsPersistorTests', function() { Logger, Storage, Fs, - NotFoundError, + GcsNotFoundError, Meter, MeteredStream, ReadStream, @@ -94,8 +94,8 @@ describe('GcsPersistorTests', function() { } Storage.prototype.bucket = sinon.stub().returns(GcsBucket) - NotFoundError = new Error('File not found') - NotFoundError.code = 404 + GcsNotFoundError = new Error('File not found') + GcsNotFoundError.code = 404 Fs = { createReadStream: sinon.stub().returns(ReadStream) @@ -199,7 +199,7 @@ describe('GcsPersistorTests', function() { beforeEach(async function() { ReadStream.on = sinon.stub() - ReadStream.on.withArgs('error').yields(NotFoundError) + ReadStream.on.withArgs('error').yields(GcsNotFoundError) try { stream = await GcsPersistor.promises.getFileStream(bucket, key) } catch (err) { @@ -278,7 +278,7 @@ describe('GcsPersistorTests', function() { let error beforeEach(async function() { - GcsFile.getMetadata = sinon.stub().rejects(NotFoundError) + GcsFile.getMetadata = sinon.stub().rejects(GcsNotFoundError) try { await GcsPersistor.promises.getFileSize(bucket, key) } catch (err) { @@ -291,7 +291,7 @@ describe('GcsPersistorTests', function() { }) it('should wrap the error', function() { - expect(error.cause).to.equal(NotFoundError) + expect(error.cause).to.equal(GcsNotFoundError) }) }) @@ -443,10 +443,10 @@ describe('GcsPersistorTests', function() { }) describe('copyFile', function() { - const DestinationFile = 'destFile' + const destinationFile = 'destFile' beforeEach(function() { - GcsBucket.file.withArgs(destKey).returns(DestinationFile) + GcsBucket.file.withArgs(destKey).returns(destinationFile) }) describe('with valid parameters', function() { @@ -457,7 +457,7 @@ describe('GcsPersistorTests', function() { it('should copy the object', function() { expect(Storage.prototype.bucket).to.have.been.calledWith(bucket) expect(GcsBucket.file).to.have.been.calledWith(key) - expect(GcsFile.copy).to.have.been.calledWith(DestinationFile) + expect(GcsFile.copy).to.have.been.calledWith(destinationFile) }) }) @@ -465,7 +465,7 @@ describe('GcsPersistorTests', function() { let error beforeEach(async function() { - GcsFile.copy = sinon.stub().rejects(NotFoundError) + GcsFile.copy = sinon.stub().rejects(GcsNotFoundError) try { await GcsPersistor.promises.copyFile(bucket, key, destKey) } catch (err) { @@ -496,7 +496,7 @@ describe('GcsPersistorTests', function() { let error beforeEach(async function() { - GcsFile.delete = sinon.stub().rejects(NotFoundError) + GcsFile.delete = sinon.stub().rejects(GcsNotFoundError) try { await GcsPersistor.promises.deleteFile(bucket, key) } catch (err) { From eb93ae4b10fb89b0d06eec196f1f132a67209aeb Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 5 Mar 2020 14:12:15 +0000 Subject: [PATCH 468/555] Use Bucket.deleteFiles to delete directory contents, instead of iterating --- services/filestore/app/js/GcsPersistor.js | 32 ++++------ .../test/unit/js/GcsPersistorTests.js | 60 +++---------------- 2 files changed, 17 insertions(+), 75 deletions(-) diff --git a/services/filestore/app/js/GcsPersistor.js b/services/filestore/app/js/GcsPersistor.js index f4702e06a4..a2525361c2 100644 --- a/services/filestore/app/js/GcsPersistor.js +++ b/services/filestore/app/js/GcsPersistor.js @@ -191,41 +191,29 @@ async function deleteFile(bucketName, key) { } async function deleteDirectory(bucketName, key) { - let files + if (!key.match(/^[a-z0-9_-]+/i)) { + throw new NotFoundError({ + message: 'deleteDirectoryKey is invalid or missing', + info: { bucketName, key } + }) + } try { - const [response] = await storage + await storage .bucket(bucketName) - .getFiles({ directory: key }) - files = response + .deleteFiles({ directory: key, force: true }) } catch (err) { const error = PersistorHelper.wrapError( err, - 'failed to list objects in GCS', + 'failed to delete directory in GCS', { bucketName, key }, - ReadError + WriteError ) if (error instanceof NotFoundError) { return } throw error } - - for (const index in files) { - try { - await files[index].delete() - } catch (err) { - const error = PersistorHelper.wrapError( - err, - 'failed to delete object in GCS', - { bucketName, key }, - WriteError - ) - if (!(error instanceof NotFoundError)) { - throw error - } - } - } } async function directorySize(bucketName, key) { diff --git a/services/filestore/test/unit/js/GcsPersistorTests.js b/services/filestore/test/unit/js/GcsPersistorTests.js index ec071192e2..a63296a18f 100644 --- a/services/filestore/test/unit/js/GcsPersistorTests.js +++ b/services/filestore/test/unit/js/GcsPersistorTests.js @@ -84,7 +84,8 @@ describe('GcsPersistorTests', function() { GcsBucket = { file: sinon.stub().returns(GcsFile), - getFiles: sinon.stub().resolves([files]) + getFiles: sinon.stub().resolves([files]), + deleteFiles: sinon.stub().resolves() } Storage = class { @@ -516,67 +517,20 @@ describe('GcsPersistorTests', function() { return GcsPersistor.promises.deleteDirectory(bucket, key) }) - it('should list the objects in the directory', function() { + it('should delete the objects in the directory', function() { expect(Storage.prototype.bucket).to.have.been.calledWith(bucket) - expect(GcsBucket.getFiles).to.have.been.calledWith({ - directory: key + expect(GcsBucket.deleteFiles).to.have.been.calledWith({ + directory: key, + force: true }) }) - - it('should delete the objects', function() { - expect(files[0].delete).to.have.been.called - expect(files[1].delete).to.have.been.called - }) - }) - - describe('when there are no files', function() { - beforeEach(async function() { - GcsBucket.getFiles.resolves([[]]) - return GcsPersistor.promises.deleteDirectory(bucket, key) - }) - - it('should list the objects in the directory', function() { - expect(GcsBucket.getFiles).to.have.been.calledWith({ - directory: key - }) - }) - - it('should not try to delete any objects', function() { - expect(files[0].delete).not.to.have.been.called - expect(files[1].delete).not.to.have.been.called - }) - }) - - describe('when there is an error listing the objects', function() { - let error - - beforeEach(async function() { - GcsBucket.getFiles = sinon.stub().rejects(genericError) - try { - await GcsPersistor.promises.deleteDirectory(bucket, key) - } catch (err) { - error = err - } - }) - - it('should generate a ReadError', function() { - expect(error).to.be.an.instanceOf(Errors.ReadError) - }) - - it('should wrap the error', function() { - expect(error.cause).to.equal(genericError) - }) - - it('should not try to delete any objects', function() { - expect(files[0].delete).not.to.have.been.called - }) }) describe('when there is an error deleting the objects', function() { let error beforeEach(async function() { - files[0].delete = sinon.stub().rejects(genericError) + GcsBucket.deleteFiles = sinon.stub().rejects(genericError) try { await GcsPersistor.promises.deleteDirectory(bucket, key) } catch (err) { From 2509b51883228b57f9875bc271048e366880d6fc Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 5 Mar 2020 17:23:47 +0000 Subject: [PATCH 469/555] Add optional gcs config to override gcs settings (for fake gcs server) --- services/filestore/config/settings.defaults.coffee | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/services/filestore/config/settings.defaults.coffee b/services/filestore/config/settings.defaults.coffee index 6ffe5a8523..0fe98effb1 100644 --- a/services/filestore/config/settings.defaults.coffee +++ b/services/filestore/config/settings.defaults.coffee @@ -34,6 +34,12 @@ settings = # gcs - Google Cloud Storage backend: process.env['BACKEND'] + gcs: + if process.env['GCS_API_ENDPOINT'] + apiEndpoint: process.env['GCS_API_ENDPOINT'] + apiScheme: process.env['GCS_API_SCHEME'] + projectId: process.env['GCS_PROJECT_ID'] + s3: if process.env['AWS_ACCESS_KEY_ID']? or process.env['S3_BUCKET_CREDENTIALS']? key: process.env['AWS_ACCESS_KEY_ID'] @@ -71,7 +77,7 @@ settings = sentry: dsn: process.env.SENTRY_DSN - + # Filestore health check # ---------------------- # Project and file details to check in persistor when calling /health_check From 28c3fe4a56e5d8203996d12ec9c61acdc867339e Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Tue, 10 Mar 2020 17:54:09 +0000 Subject: [PATCH 470/555] Validate key names when deleting directory with a configurable regex --- services/filestore/app/js/GcsPersistor.js | 2 +- .../filestore/config/settings.defaults.coffee | 2 + services/filestore/package-lock.json | 87 +++++++++++++++++++ services/filestore/package.json | 1 + .../test/acceptance/js/FilestoreTests.js | 53 ++++++----- .../test/acceptance/js/TestConfig.js | 3 +- .../test/unit/js/GcsPersistorTests.js | 28 +++++- 7 files changed, 144 insertions(+), 32 deletions(-) diff --git a/services/filestore/app/js/GcsPersistor.js b/services/filestore/app/js/GcsPersistor.js index a2525361c2..3a314b50c1 100644 --- a/services/filestore/app/js/GcsPersistor.js +++ b/services/filestore/app/js/GcsPersistor.js @@ -191,7 +191,7 @@ async function deleteFile(bucketName, key) { } async function deleteDirectory(bucketName, key) { - if (!key.match(/^[a-z0-9_-]+/i)) { + if (!key.match(settings.filestore.gcs.directoryKeyRegex)) { throw new NotFoundError({ message: 'deleteDirectoryKey is invalid or missing', info: { bucketName, key } diff --git a/services/filestore/config/settings.defaults.coffee b/services/filestore/config/settings.defaults.coffee index 0fe98effb1..7bb37db9de 100644 --- a/services/filestore/config/settings.defaults.coffee +++ b/services/filestore/config/settings.defaults.coffee @@ -39,6 +39,8 @@ settings = apiEndpoint: process.env['GCS_API_ENDPOINT'] apiScheme: process.env['GCS_API_SCHEME'] projectId: process.env['GCS_PROJECT_ID'] + # only keys that match this regex can be deleted + directoryKeyRegex: new RegExp(process.env['GCS_DIRECTORY_KEY_REGEX'] || "^[0-9a-fA-F]{24}/[0-9a-fA-F]{24}") s3: if process.env['AWS_ACCESS_KEY_ID']? or process.env['S3_BUCKET_CREDENTIALS']? diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index 1d74c5d172..e858a9e0be 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -1415,6 +1415,16 @@ "resolved": "https://registry.npmjs.org/bintrees/-/bintrees-1.0.1.tgz", "integrity": "sha512-tbaUB1QpTIj4cKY8c1rvNAvEQXA+ekzHmbe4jzNfW3QWsF9GnnP/BRWyl6/qqS53heoYJ93naaFcm/jooONH8g==" }, + "bl": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/bl/-/bl-2.2.0.tgz", + "integrity": "sha512-wbgvOpqopSr7uq6fJrLH8EsvYMJf9gzfo2jCsL2eTy75qXPukA4pCgHamOQkZtY5vmfVtjB+P3LNlMHW5CEZXA==", + "dev": true, + "requires": { + "readable-stream": "^2.3.5", + "safe-buffer": "^5.1.1" + } + }, "body-parser": { "version": "1.19.0", "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.19.0.tgz", @@ -1453,6 +1463,12 @@ "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", "dev": true }, + "bson": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/bson/-/bson-1.1.3.tgz", + "integrity": "sha512-TdiJxMVnodVS7r0BdL42y/pqC9cL2iKynVwA0Ho3qbsQYr428veL3l7BQyuqiw+Q5SqqoT0m4srSY/BlZ9AxXg==", + "dev": true + }, "buffer": { "version": "4.9.1", "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.1.tgz", @@ -1854,6 +1870,12 @@ "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==" }, + "denque": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/denque/-/denque-1.4.1.tgz", + "integrity": "sha512-OfzPuSZKGcgr96rf1oODnfjqBFmr1DVoc/TrItj3Ohe0Ah1C5WX5Baquw/9U9KovnQ88EqmJbD66rKYUQYN1tQ==", + "dev": true + }, "depd": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", @@ -3501,6 +3523,13 @@ "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==" }, + "memory-pager": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/memory-pager/-/memory-pager-1.5.0.tgz", + "integrity": "sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg==", + "dev": true, + "optional": true + }, "merge-descriptors": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", @@ -3653,6 +3682,20 @@ "integrity": "sha512-bV7f+6l2QigeBBZSM/6yTNq4P2fNpSWj/0e7jQcy87A8e7o2nAfP/34/2ky5Vw4B9S446EtIhodAzkFCcR4dQg==", "optional": true }, + "mongodb": { + "version": "3.5.4", + "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-3.5.4.tgz", + "integrity": "sha512-xGH41Ig4dkSH5ROGezkgDbsgt/v5zbNUwE3TcFsSbDc6Qn3Qil17dhLsESSDDPTiyFDCPJRpfd4887dtsPgKtA==", + "dev": true, + "requires": { + "bl": "^2.2.0", + "bson": "^1.1.1", + "denque": "^1.4.1", + "require_optional": "^1.0.1", + "safe-buffer": "^5.1.2", + "saslprep": "^1.0.0" + } + }, "ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", @@ -4939,6 +4982,30 @@ "integrity": "sha512-AKGr4qvHiryxRb19m3PsLRGuKVAbJLUD7E6eOaHkfKhwc+vSgVOCY5xNvm9EkolBKTOf0GrQAZKLimOCz81Khg==", "dev": true }, + "require_optional": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/require_optional/-/require_optional-1.0.1.tgz", + "integrity": "sha512-qhM/y57enGWHAe3v/NcwML6a3/vfESLe/sGM2dII+gEO0BpKRUkWZow/tyloNqJyN6kXSl3RyyM8Ll5D/sJP8g==", + "dev": true, + "requires": { + "resolve-from": "^2.0.0", + "semver": "^5.1.0" + }, + "dependencies": { + "resolve-from": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-2.0.0.tgz", + "integrity": "sha1-lICrIOlP+h2egKgEx+oUdhGWa1c=", + "dev": true + }, + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true + } + } + }, "resolve": { "version": "1.15.1", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.15.1.tgz", @@ -5041,6 +5108,16 @@ "stack-trace": "0.0.9" } }, + "saslprep": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/saslprep/-/saslprep-1.0.3.tgz", + "integrity": "sha512-/MY/PEMbk2SuY5sScONwhUDsV2p77Znkb/q3nSVstq/yQzYJOH/Azh29p9oJLsl3LnQwSvZDKagDGBsBwSooag==", + "dev": true, + "optional": true, + "requires": { + "sparse-bitfield": "^3.0.3" + } + }, "sax": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz", @@ -5205,6 +5282,16 @@ "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" }, + "sparse-bitfield": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/sparse-bitfield/-/sparse-bitfield-3.0.3.tgz", + "integrity": "sha1-/0rm5oZWBWuks+eSqzM004JzyhE=", + "dev": true, + "optional": true, + "requires": { + "memory-pager": "^1.0.2" + } + }, "spdx-correct": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.0.tgz", diff --git a/services/filestore/package.json b/services/filestore/package.json index 6f7d84c778..bbdd586f58 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -56,6 +56,7 @@ "eslint-plugin-promise": "^4.2.1", "eslint-plugin-standard": "^4.0.1", "mocha": "5.2.0", + "mongodb": "^3.5.4", "prettier-eslint": "^9.0.1", "prettier-eslint-cli": "^5.0.0", "sandboxed-module": "2.0.3", diff --git a/services/filestore/test/acceptance/js/FilestoreTests.js b/services/filestore/test/acceptance/js/FilestoreTests.js index 599d60155a..9da46db092 100644 --- a/services/filestore/test/acceptance/js/FilestoreTests.js +++ b/services/filestore/test/acceptance/js/FilestoreTests.js @@ -14,6 +14,7 @@ const { promisify } = require('util') const { Storage } = require('@google-cloud/storage') const streamifier = require('streamifier') chai.use(require('chai-as-promised')) +const { ObjectId } = require('mongodb') const fsWriteFile = promisify(fs.writeFile) const fsStat = promisify(fs.stat) @@ -48,7 +49,6 @@ const BackendSettings = require('./TestConfig') describe('Filestore', function() { this.timeout(1000 * 10) const filestoreUrl = `http://localhost:${Settings.internal.filestore.port}` - const directoryName = 'directory' // redefine the test suite for every available backend Object.keys(BackendSettings).forEach(backend => { @@ -84,7 +84,7 @@ describe('Filestore', function() { `${metricPrefix}_egress` ) } - projectId = `acceptance_tests_${Math.random()}` + projectId = ObjectId().toString() }) it('should send a 200 for the status endpoint', async function() { @@ -107,8 +107,8 @@ describe('Filestore', function() { '/tmp/filestore_acceptance_tests_file_read.txt' beforeEach(async function() { - fileId = Math.random() - fileUrl = `${filestoreUrl}/project/${projectId}/file/${directoryName}%2F${fileId}` + fileId = ObjectId().toString() + fileUrl = `${filestoreUrl}/project/${projectId}/file/${fileId}` constantFileContent = [ 'hello world', `line 2 goes here ${Math.random()}`, @@ -188,16 +188,16 @@ describe('Filestore', function() { }) it('should be able to copy files', async function() { - const newProjectID = `acceptance_tests_copied_project_${Math.random()}` - const newFileId = Math.random() - const newFileUrl = `${filestoreUrl}/project/${newProjectID}/file/${directoryName}%2F${newFileId}` + const newProjectID = ObjectId().toString() + const newFileId = ObjectId().toString() + const newFileUrl = `${filestoreUrl}/project/${newProjectID}/file/${newFileId}` const opts = { method: 'put', uri: newFileUrl, json: { source: { project_id: projectId, - file_id: `${directoryName}/${fileId}` + file_id: fileId } } } @@ -260,7 +260,6 @@ describe('Filestore', function() { describe('with multiple files', function() { let fileIds, fileUrls - const directoryName = 'directory' const localFileReadPaths = [ '/tmp/filestore_acceptance_tests_file_read_1.txt', '/tmp/filestore_acceptance_tests_file_read_2.txt' @@ -286,10 +285,10 @@ describe('Filestore', function() { }) beforeEach(async function() { - fileIds = [Math.random(), Math.random()] + fileIds = [ObjectId().toString(), ObjectId().toString()] fileUrls = [ - `${filestoreUrl}/project/${projectId}/file/${directoryName}%2F${fileIds[0]}`, - `${filestoreUrl}/project/${projectId}/file/${directoryName}%2F${fileIds[1]}` + `${filestoreUrl}/project/${projectId}/file/${fileIds[0]}`, + `${filestoreUrl}/project/${projectId}/file/${fileIds[1]}` ] const writeStreams = [ @@ -325,8 +324,8 @@ describe('Filestore', function() { let fileId, fileUrl, largeFileContent, error beforeEach(async function() { - fileId = Math.random() - fileUrl = `${filestoreUrl}/project/${projectId}/file/${directoryName}%2F${fileId}` + fileId = ObjectId().toString() + fileUrl = `${filestoreUrl}/project/${projectId}/file/${fileId}` largeFileContent = '_wombat_'.repeat(1024 * 1024) // 8 megabytes largeFileContent += Math.random() @@ -359,8 +358,8 @@ describe('Filestore', function() { beforeEach(async function() { constantFileContent = `This is a file in a different S3 bucket ${Math.random()}` - fileId = Math.random().toString() - bucketName = Math.random().toString() + fileId = ObjectId().toString() + bucketName = ObjectId().toString() fileUrl = `${filestoreUrl}/bucket/${bucketName}/key/${fileId}` const s3ClientSettings = { @@ -448,9 +447,9 @@ describe('Filestore', function() { beforeEach(function() { constantFileContent = `This is yet more file content ${Math.random()}` - fileId = Math.random().toString() - fileKey = `${projectId}/${directoryName}/${fileId}` - fileUrl = `${filestoreUrl}/project/${projectId}/file/${directoryName}%2F${fileId}` + fileId = ObjectId().toString() + fileKey = `${projectId}/${fileId}` + fileUrl = `${filestoreUrl}/project/${projectId}/file/${fileId}` bucket = Settings.filestore.stores.user_files fallbackBucket = Settings.filestore.fallback.buckets[bucket] @@ -532,10 +531,10 @@ describe('Filestore', function() { let newFileId, newFileUrl, newFileKey, opts beforeEach(function() { - const newProjectID = `acceptance_tests_copied_project_${Math.random()}` - newFileId = Math.random() - newFileUrl = `${filestoreUrl}/project/${newProjectID}/file/${directoryName}%2F${newFileId}` - newFileKey = `${newProjectID}/${directoryName}/${newFileId}` + const newProjectID = ObjectId().toString() + newFileId = ObjectId().toString() + newFileUrl = `${filestoreUrl}/project/${newProjectID}/file/${newFileId}` + newFileKey = `${newProjectID}/${newFileId}` opts = { method: 'put', @@ -543,7 +542,7 @@ describe('Filestore', function() { json: { source: { project_id: projectId, - file_id: `${directoryName}/${fileId}` + file_id: fileId } } } @@ -668,7 +667,7 @@ describe('Filestore', function() { await expectPersistorNotToHaveFile( app.persistor.fallbackPersistor, fallbackBucket, - `${projectId}/${directoryName}/${fileId}` + `${projectId}/${fileId}` ) }) }) @@ -757,8 +756,8 @@ describe('Filestore', function() { ) beforeEach(async function() { - fileId = Math.random() - fileUrl = `${filestoreUrl}/project/${projectId}/file/${directoryName}%2F${fileId}` + fileId = ObjectId().toString() + fileUrl = `${filestoreUrl}/project/${projectId}/file/${fileId}` const stat = await fsStat(localFileReadPath) localFileSize = stat.size const writeStream = request.post(fileUrl) diff --git a/services/filestore/test/acceptance/js/TestConfig.js b/services/filestore/test/acceptance/js/TestConfig.js index fd7d0f034c..833a3b09be 100644 --- a/services/filestore/test/acceptance/js/TestConfig.js +++ b/services/filestore/test/acceptance/js/TestConfig.js @@ -23,7 +23,8 @@ function gcsConfig() { return { apiEndpoint: process.env.GCS_API_ENDPOINT, apiScheme: process.env.GCS_API_SCHEME, - projectId: 'fake' + projectId: 'fake', + directoryKeyRegex: new RegExp('^[0-9a-fA-F]{24}/[0-9a-fA-F]{24}') } } diff --git a/services/filestore/test/unit/js/GcsPersistorTests.js b/services/filestore/test/unit/js/GcsPersistorTests.js index a63296a18f..0e5f77bdf1 100644 --- a/services/filestore/test/unit/js/GcsPersistorTests.js +++ b/services/filestore/test/unit/js/GcsPersistorTests.js @@ -3,6 +3,7 @@ const chai = require('chai') const { expect } = chai const modulePath = '../../../app/js/GcsPersistor.js' const SandboxedModule = require('sandboxed-module') +const { ObjectId } = require('mongodb') const Errors = require('../../../app/js/Errors') @@ -41,6 +42,9 @@ describe('GcsPersistorTests', function() { backend: 'gcs', stores: { user_files: 'user_files' + }, + gcs: { + directoryKeyRegex: /^[0-9a-fA-F]{24}\/[0-9a-fA-F]{24}/ } } } @@ -512,15 +516,17 @@ describe('GcsPersistorTests', function() { }) describe('deleteDirectory', function() { + const directoryName = `${ObjectId()}/${ObjectId()}` describe('with valid parameters', function() { beforeEach(async function() { - return GcsPersistor.promises.deleteDirectory(bucket, key) + console.log(key) + return GcsPersistor.promises.deleteDirectory(bucket, directoryName) }) it('should delete the objects in the directory', function() { expect(Storage.prototype.bucket).to.have.been.calledWith(bucket) expect(GcsBucket.deleteFiles).to.have.been.calledWith({ - directory: key, + directory: directoryName, force: true }) }) @@ -532,7 +538,7 @@ describe('GcsPersistorTests', function() { beforeEach(async function() { GcsBucket.deleteFiles = sinon.stub().rejects(genericError) try { - await GcsPersistor.promises.deleteDirectory(bucket, key) + await GcsPersistor.promises.deleteDirectory(bucket, directoryName) } catch (err) { error = err } @@ -546,6 +552,22 @@ describe('GcsPersistorTests', function() { expect(error.cause).to.equal(genericError) }) }) + + describe('when the directory name is in the wrong format', function() { + let error + + beforeEach(async function() { + try { + await GcsPersistor.promises.deleteDirectory(bucket, 'carbonara') + } catch (err) { + error = err + } + }) + + it('should throw a NotFoundError', function() { + expect(error).to.be.an.instanceOf(Errors.NotFoundError) + }) + }) }) describe('directorySize', function() { From 183cb0179a18514e6c6af7baa316c1438c79f23e Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Fri, 13 Mar 2020 16:14:06 +0000 Subject: [PATCH 471/555] Add support to GCS persistor for unlocking files and copying on delete --- services/filestore/app/js/GcsPersistor.js | 40 +++-- services/filestore/app/js/PersistorHelper.js | 3 +- .../filestore/config/settings.defaults.coffee | 15 +- .../test/acceptance/js/FilestoreTests.js | 168 +++++++++--------- .../test/acceptance/js/TestConfig.js | 12 +- .../test/acceptance/js/TestHelper.js | 54 ++++++ .../test/unit/js/GcsPersistorTests.js | 18 +- 7 files changed, 193 insertions(+), 117 deletions(-) create mode 100644 services/filestore/test/acceptance/js/TestHelper.js diff --git a/services/filestore/app/js/GcsPersistor.js b/services/filestore/app/js/GcsPersistor.js index 3a314b50c1..399ae68064 100644 --- a/services/filestore/app/js/GcsPersistor.js +++ b/services/filestore/app/js/GcsPersistor.js @@ -9,18 +9,21 @@ const PersistorHelper = require('./PersistorHelper') const pipeline = promisify(Stream.pipeline) -// both of these settings will be null by default except for tests +// endpoint settings will be null by default except for tests // that's OK - GCS uses the locally-configured service account by default -const storage = new Storage(settings.filestore.gcs) +const storage = new Storage(settings.filestore.gcs.endpoint) // workaround for broken uploads with custom endpoints: // https://github.com/googleapis/nodejs-storage/issues/898 -if (settings.filestore.gcs && settings.filestore.gcs.apiEndpoint) { +if ( + settings.filestore.gcs.endpoint && + settings.filestore.gcs.endpoint.apiEndpoint +) { storage.interceptors.push({ request: function(reqOpts) { const url = new URL(reqOpts.uri) - url.host = settings.filestore.gcs.apiEndpoint - if (settings.filestore.gcs.apiScheme) { - url.protocol = settings.filestore.gcs.apiScheme + url.host = settings.filestore.gcs.endpoint.apiEndpoint + if (settings.filestore.gcs.endpoint.apiScheme) { + url.protocol = settings.filestore.gcs.endpoint.apiScheme } reqOpts.uri = url.toString() return reqOpts @@ -173,10 +176,19 @@ async function getFileMd5Hash(bucketName, key) { async function deleteFile(bucketName, key) { try { - await storage - .bucket(bucketName) - .file(key) - .delete() + const file = storage.bucket(bucketName).file(key) + + if (settings.filestore.gcs.unlockBeforeDelete) { + await file.setMetadata({ eventBasedHold: false }) + } + if (settings.filestore.gcs.deletedBucketSuffix) { + await file.copy( + storage.bucket( + `${bucketName}${settings.filestore.gcs.deletedBucketSuffix}` + ) + ) + } + await file.delete() } catch (err) { const error = PersistorHelper.wrapError( err, @@ -199,9 +211,13 @@ async function deleteDirectory(bucketName, key) { } try { - await storage + const [files] = await storage .bucket(bucketName) - .deleteFiles({ directory: key, force: true }) + .getFiles({ directory: key }) + + for (const file of files) { + await deleteFile(bucketName, file.name) + } } catch (err) { const error = PersistorHelper.wrapError( err, diff --git a/services/filestore/app/js/PersistorHelper.js b/services/filestore/app/js/PersistorHelper.js index a19311e889..a58f024bb4 100644 --- a/services/filestore/app/js/PersistorHelper.js +++ b/services/filestore/app/js/PersistorHelper.js @@ -93,7 +93,8 @@ function wrapError(error, message, params, ErrorType) { error instanceof NotFoundError || ['NoSuchKey', 'NotFound', 404, 'AccessDenied', 'ENOENT'].includes( error.code - ) + ) || + (error.response && error.response.statusCode === 404) ) { return new NotFoundError({ message: 'no such file', diff --git a/services/filestore/config/settings.defaults.coffee b/services/filestore/config/settings.defaults.coffee index 7bb37db9de..24bce087ff 100644 --- a/services/filestore/config/settings.defaults.coffee +++ b/services/filestore/config/settings.defaults.coffee @@ -35,12 +35,15 @@ settings = backend: process.env['BACKEND'] gcs: - if process.env['GCS_API_ENDPOINT'] - apiEndpoint: process.env['GCS_API_ENDPOINT'] - apiScheme: process.env['GCS_API_SCHEME'] - projectId: process.env['GCS_PROJECT_ID'] - # only keys that match this regex can be deleted - directoryKeyRegex: new RegExp(process.env['GCS_DIRECTORY_KEY_REGEX'] || "^[0-9a-fA-F]{24}/[0-9a-fA-F]{24}") + endpoint: + if process.env['GCS_API_ENDPOINT'] + apiEndpoint: process.env['GCS_API_ENDPOINT'] + apiScheme: process.env['GCS_API_SCHEME'] + projectId: process.env['GCS_PROJECT_ID'] + # only keys that match this regex can be deleted + directoryKeyRegex: new RegExp(process.env['GCS_DIRECTORY_KEY_REGEX'] || "^[0-9a-fA-F]{24}/[0-9a-fA-F]{24}") + unlockBeforeDelete: process.env['GCS_UNLOCK_BEFORE_DELETE'] == "true" # unlock an event-based hold before deleting. default false + deletedBucketSuffix: process.env['GCS_DELETED_BUCKET_SUFFIX'] # if present, copy file to another bucket on delete. default null s3: if process.env['AWS_ACCESS_KEY_ID']? or process.env['S3_BUCKET_CREDENTIALS']? diff --git a/services/filestore/test/acceptance/js/FilestoreTests.js b/services/filestore/test/acceptance/js/FilestoreTests.js index 9da46db092..0aa2c8e294 100644 --- a/services/filestore/test/acceptance/js/FilestoreTests.js +++ b/services/filestore/test/acceptance/js/FilestoreTests.js @@ -4,6 +4,7 @@ const fs = require('fs') const Settings = require('settings-sharelatex') const Path = require('path') const FilestoreApp = require('./FilestoreApp') +const TestHelper = require('./TestHelper') const rp = require('request-promise-native').defaults({ resolveWithFullResponse: true }) @@ -20,28 +21,10 @@ const fsWriteFile = promisify(fs.writeFile) const fsStat = promisify(fs.stat) const pipeline = promisify(Stream.pipeline) -async function getMetric(filestoreUrl, metric) { - const res = await rp.get(`${filestoreUrl}/metrics`) - expect(res.statusCode).to.equal(200) - const metricRegex = new RegExp(`^${metric}{[^}]+} ([0-9]+)$`, 'm') - const found = metricRegex.exec(res.body) - return parseInt(found ? found[1] : 0) || 0 -} - if (!process.env.AWS_ACCESS_KEY_ID) { throw new Error('please provide credentials for the AWS S3 test server') } -function streamToString(stream) { - const chunks = [] - return new Promise((resolve, reject) => { - stream.on('data', chunk => chunks.push(chunk)) - stream.on('error', reject) - stream.on('end', () => resolve(Buffer.concat(chunks).toString('utf8'))) - stream.resume() - }) -} - // store settings for multiple backends, so that we can test each one. // fs will always be available - add others if they are configured const BackendSettings = require('./TestConfig') @@ -64,10 +47,19 @@ describe('Filestore', function() { if (BackendSettings[backend].gcs) { before(async function() { - const storage = new Storage(Settings.filestore.gcs) + const storage = new Storage(Settings.filestore.gcs.endpoint) await storage.createBucket(process.env.GCS_USER_FILES_BUCKET_NAME) await storage.createBucket(process.env.GCS_PUBLIC_FILES_BUCKET_NAME) await storage.createBucket(process.env.GCS_TEMPLATE_FILES_BUCKET_NAME) + await storage.createBucket( + `${process.env.GCS_USER_FILES_BUCKET_NAME}-deleted` + ) + await storage.createBucket( + `${process.env.GCS_PUBLIC_FILES_BUCKET_NAME}-deleted` + ) + await storage.createBucket( + `${process.env.GCS_TEMPLATE_FILES_BUCKET_NAME}-deleted` + ) }) } @@ -79,7 +71,7 @@ describe('Filestore', function() { // retrieve previous metrics from the app if (['s3', 'gcs'].includes(Settings.filestore.backend)) { metricPrefix = Settings.filestore.backend - previousEgress = await getMetric( + previousEgress = await TestHelper.getMetric( filestoreUrl, `${metricPrefix}_egress` ) @@ -129,7 +121,7 @@ describe('Filestore', function() { // The content hash validation might require a full download // in case the ETag field of the upload response is not a md5 sum. if (['s3', 'gcs'].includes(Settings.filestore.backend)) { - previousIngress = await getMetric( + previousIngress = await TestHelper.getMetric( filestoreUrl, `${metricPrefix}_ingress` ) @@ -223,7 +215,7 @@ describe('Filestore', function() { if (['S3Persistor', 'GcsPersistor'].includes(backend)) { it('should record an egress metric for the upload', async function() { - const metric = await getMetric( + const metric = await TestHelper.getMetric( filestoreUrl, `${metricPrefix}_egress` ) @@ -232,7 +224,7 @@ describe('Filestore', function() { it('should record an ingress metric when downloading the file', async function() { await rp.get(fileUrl) - const metric = await getMetric( + const metric = await TestHelper.getMetric( filestoreUrl, `${metricPrefix}_ingress` ) @@ -249,7 +241,7 @@ describe('Filestore', function() { } } await rp.get(options) - const metric = await getMetric( + const metric = await TestHelper.getMetric( filestoreUrl, `${metricPrefix}_ingress` ) @@ -394,50 +386,54 @@ describe('Filestore', function() { }) } + if (backend === 'GcsPersistor') { + describe('when deleting a file in GCS', function() { + let fileId, fileUrl, content, error + + beforeEach(async function() { + fileId = ObjectId() + fileUrl = `${filestoreUrl}/project/${projectId}/file/${fileId}` + + content = '_wombat_' + Math.random() + + const writeStream = request.post(fileUrl) + const readStream = streamifier.createReadStream(content) + // hack to consume the result to ensure the http request has been fully processed + const resultStream = fs.createWriteStream('/dev/null') + + try { + await pipeline(readStream, writeStream, resultStream) + await rp.delete(fileUrl) + } catch (err) { + error = err + } + }) + + it('should not throw an error', function() { + expect(error).not.to.exist + }) + + it('should copy the file to the deleted-files bucket', async function() { + await TestHelper.expectPersistorToHaveFile( + app.persistor, + `${Settings.filestore.stores.user_files}-deleted`, + `${projectId}/${fileId}`, + content + ) + }) + + it('should remove the file from the original bucket', async function() { + await TestHelper.expectPersistorNotToHaveFile( + app.persistor, + Settings.filestore.stores.user_files, + `${projectId}/${fileId}` + ) + }) + }) + } + if (BackendSettings[backend].fallback) { describe('with a fallback', function() { - async function uploadStringToPersistor( - persistor, - bucket, - key, - content - ) { - const fileStream = streamifier.createReadStream(content) - await persistor.promises.sendStream(bucket, key, fileStream) - } - - async function getStringFromPersistor(persistor, bucket, key) { - const stream = await persistor.promises.getFileStream( - bucket, - key, - {} - ) - return streamToString(stream) - } - - async function expectPersistorToHaveFile( - persistor, - bucket, - key, - content - ) { - const foundContent = await getStringFromPersistor( - persistor, - bucket, - key - ) - expect(foundContent).to.equal(content) - } - - async function expectPersistorNotToHaveFile(persistor, bucket, key) { - await expect( - getStringFromPersistor(persistor, bucket, key) - ).to.eventually.have.been.rejected.with.property( - 'name', - 'NotFoundError' - ) - } - let constantFileContent, fileId, fileKey, @@ -457,7 +453,7 @@ describe('Filestore', function() { describe('with a file in the fallback bucket', function() { beforeEach(async function() { - await uploadStringToPersistor( + await TestHelper.uploadStringToPersistor( app.persistor.fallbackPersistor, fallbackBucket, fileKey, @@ -466,7 +462,7 @@ describe('Filestore', function() { }) it('should not find file in the primary', async function() { - await expectPersistorNotToHaveFile( + await TestHelper.expectPersistorNotToHaveFile( app.persistor.primaryPersistor, bucket, fileKey @@ -474,7 +470,7 @@ describe('Filestore', function() { }) it('should find the file in the fallback', async function() { - await expectPersistorToHaveFile( + await TestHelper.expectPersistorToHaveFile( app.persistor.fallbackPersistor, fallbackBucket, fileKey, @@ -495,7 +491,7 @@ describe('Filestore', function() { it('should not copy the file to the primary', async function() { await rp.get(fileUrl) - await expectPersistorNotToHaveFile( + await TestHelper.expectPersistorNotToHaveFile( app.persistor.primaryPersistor, bucket, fileKey @@ -518,7 +514,7 @@ describe('Filestore', function() { // wait for the file to copy in the background await promisify(setTimeout)(1000) - await expectPersistorToHaveFile( + await TestHelper.expectPersistorToHaveFile( app.persistor.primaryPersistor, bucket, fileKey, @@ -557,7 +553,7 @@ describe('Filestore', function() { }) it('should leave the old file in the old bucket', async function() { - await expectPersistorToHaveFile( + await TestHelper.expectPersistorToHaveFile( app.persistor.fallbackPersistor, fallbackBucket, fileKey, @@ -566,7 +562,7 @@ describe('Filestore', function() { }) it('should not create a new file in the old bucket', async function() { - await expectPersistorNotToHaveFile( + await TestHelper.expectPersistorNotToHaveFile( app.persistor.fallbackPersistor, fallbackBucket, newFileKey @@ -574,7 +570,7 @@ describe('Filestore', function() { }) it('should create a new file in the new bucket', async function() { - await expectPersistorToHaveFile( + await TestHelper.expectPersistorToHaveFile( app.persistor.primaryPersistor, bucket, newFileKey, @@ -586,7 +582,7 @@ describe('Filestore', function() { // wait for the file to copy in the background await promisify(setTimeout)(1000) - await expectPersistorNotToHaveFile( + await TestHelper.expectPersistorNotToHaveFile( app.persistor.primaryPersistor, bucket, fileKey @@ -603,7 +599,7 @@ describe('Filestore', function() { }) it('should leave the old file in the old bucket', async function() { - await expectPersistorToHaveFile( + await TestHelper.expectPersistorToHaveFile( app.persistor.fallbackPersistor, fallbackBucket, fileKey, @@ -612,7 +608,7 @@ describe('Filestore', function() { }) it('should not create a new file in the old bucket', async function() { - await expectPersistorNotToHaveFile( + await TestHelper.expectPersistorNotToHaveFile( app.persistor.fallbackPersistor, fallbackBucket, newFileKey @@ -620,7 +616,7 @@ describe('Filestore', function() { }) it('should create a new file in the new bucket', async function() { - await expectPersistorToHaveFile( + await TestHelper.expectPersistorToHaveFile( app.persistor.primaryPersistor, bucket, newFileKey, @@ -632,7 +628,7 @@ describe('Filestore', function() { // wait for the file to copy in the background await promisify(setTimeout)(1000) - await expectPersistorToHaveFile( + await TestHelper.expectPersistorToHaveFile( app.persistor.primaryPersistor, bucket, fileKey, @@ -655,7 +651,7 @@ describe('Filestore', function() { }) it('should store the file on the primary', async function() { - await expectPersistorToHaveFile( + await TestHelper.expectPersistorToHaveFile( app.persistor.primaryPersistor, bucket, fileKey, @@ -664,7 +660,7 @@ describe('Filestore', function() { }) it('should not store the file on the fallback', async function() { - await expectPersistorNotToHaveFile( + await TestHelper.expectPersistorNotToHaveFile( app.persistor.fallbackPersistor, fallbackBucket, `${projectId}/${fileId}` @@ -675,7 +671,7 @@ describe('Filestore', function() { describe('when deleting a file', function() { describe('when the file exists on the primary', function() { beforeEach(async function() { - await uploadStringToPersistor( + await TestHelper.uploadStringToPersistor( app.persistor.primaryPersistor, bucket, fileKey, @@ -694,7 +690,7 @@ describe('Filestore', function() { describe('when the file exists on the fallback', function() { beforeEach(async function() { - await uploadStringToPersistor( + await TestHelper.uploadStringToPersistor( app.persistor.fallbackPersistor, fallbackBucket, fileKey, @@ -713,13 +709,13 @@ describe('Filestore', function() { describe('when the file exists on both the primary and the fallback', function() { beforeEach(async function() { - await uploadStringToPersistor( + await TestHelper.uploadStringToPersistor( app.persistor.primaryPersistor, bucket, fileKey, constantFileContent ) - await uploadStringToPersistor( + await TestHelper.uploadStringToPersistor( app.persistor.fallbackPersistor, fallbackBucket, fileKey, @@ -773,7 +769,7 @@ describe('Filestore', function() { if (['S3Persistor', 'GcsPersistor'].includes(backend)) { it('should record an egress metric for the upload', async function() { - const metric = await getMetric( + const metric = await TestHelper.getMetric( filestoreUrl, `${metricPrefix}_egress` ) diff --git a/services/filestore/test/acceptance/js/TestConfig.js b/services/filestore/test/acceptance/js/TestConfig.js index 833a3b09be..ec80e45c1f 100644 --- a/services/filestore/test/acceptance/js/TestConfig.js +++ b/services/filestore/test/acceptance/js/TestConfig.js @@ -21,10 +21,14 @@ function s3Stores() { function gcsConfig() { return { - apiEndpoint: process.env.GCS_API_ENDPOINT, - apiScheme: process.env.GCS_API_SCHEME, - projectId: 'fake', - directoryKeyRegex: new RegExp('^[0-9a-fA-F]{24}/[0-9a-fA-F]{24}') + endpoint: { + apiEndpoint: process.env.GCS_API_ENDPOINT, + apiScheme: process.env.GCS_API_SCHEME, + projectId: 'fake' + }, + directoryKeyRegex: new RegExp('^[0-9a-fA-F]{24}/[0-9a-fA-F]{24}'), + unlockBeforeDelete: false, // fake-gcs does not support this + deletedBucketSuffix: '-deleted' } } diff --git a/services/filestore/test/acceptance/js/TestHelper.js b/services/filestore/test/acceptance/js/TestHelper.js new file mode 100644 index 0000000000..df57303de1 --- /dev/null +++ b/services/filestore/test/acceptance/js/TestHelper.js @@ -0,0 +1,54 @@ +const streamifier = require('streamifier') +const rp = require('request-promise-native').defaults({ + resolveWithFullResponse: true +}) + +const { expect } = require('chai') + +module.exports = { + uploadStringToPersistor, + getStringFromPersistor, + expectPersistorToHaveFile, + expectPersistorNotToHaveFile, + streamToString, + getMetric +} + +async function getMetric(filestoreUrl, metric) { + const res = await rp.get(`${filestoreUrl}/metrics`) + expect(res.statusCode).to.equal(200) + const metricRegex = new RegExp(`^${metric}{[^}]+} ([0-9]+)$`, 'm') + const found = metricRegex.exec(res.body) + return parseInt(found ? found[1] : 0) || 0 +} + +function streamToString(stream) { + const chunks = [] + return new Promise((resolve, reject) => { + stream.on('data', chunk => chunks.push(chunk)) + stream.on('error', reject) + stream.on('end', () => resolve(Buffer.concat(chunks).toString('utf8'))) + stream.resume() + }) +} + +async function uploadStringToPersistor(persistor, bucket, key, content) { + const fileStream = streamifier.createReadStream(content) + await persistor.promises.sendStream(bucket, key, fileStream) +} + +async function getStringFromPersistor(persistor, bucket, key) { + const stream = await persistor.promises.getFileStream(bucket, key, {}) + return streamToString(stream) +} + +async function expectPersistorToHaveFile(persistor, bucket, key, content) { + const foundContent = await getStringFromPersistor(persistor, bucket, key) + expect(foundContent).to.equal(content) +} + +async function expectPersistorNotToHaveFile(persistor, bucket, key) { + await expect( + getStringFromPersistor(persistor, bucket, key) + ).to.eventually.have.been.rejected.with.property('name', 'NotFoundError') +} diff --git a/services/filestore/test/unit/js/GcsPersistorTests.js b/services/filestore/test/unit/js/GcsPersistorTests.js index 0e5f77bdf1..8a9df40485 100644 --- a/services/filestore/test/unit/js/GcsPersistorTests.js +++ b/services/filestore/test/unit/js/GcsPersistorTests.js @@ -88,8 +88,7 @@ describe('GcsPersistorTests', function() { GcsBucket = { file: sinon.stub().returns(GcsFile), - getFiles: sinon.stub().resolves([files]), - deleteFiles: sinon.stub().resolves() + getFiles: sinon.stub().resolves([files]) } Storage = class { @@ -523,20 +522,23 @@ describe('GcsPersistorTests', function() { return GcsPersistor.promises.deleteDirectory(bucket, directoryName) }) - it('should delete the objects in the directory', function() { + it('should list the objects in the directory', function() { expect(Storage.prototype.bucket).to.have.been.calledWith(bucket) - expect(GcsBucket.deleteFiles).to.have.been.calledWith({ - directory: directoryName, - force: true + expect(GcsBucket.getFiles).to.have.been.calledWith({ + directory: directoryName }) }) + + it('should delete the files', function() { + expect(GcsFile.delete).to.have.been.calledTwice + }) }) - describe('when there is an error deleting the objects', function() { + describe('when there is an error listing the objects', function() { let error beforeEach(async function() { - GcsBucket.deleteFiles = sinon.stub().rejects(genericError) + GcsBucket.getFiles = sinon.stub().rejects(genericError) try { await GcsPersistor.promises.deleteDirectory(bucket, directoryName) } catch (err) { From edf1ce1f7e91b6d497f1c340847ea55c447d84c3 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Sat, 14 Mar 2020 14:02:58 +0000 Subject: [PATCH 472/555] Delete files from a directory in parallel --- services/filestore/app/js/GcsPersistor.js | 5 +++-- services/filestore/package-lock.json | 21 +++++++++++++++++++ services/filestore/package.json | 3 ++- .../test/unit/js/GcsPersistorTests.js | 2 ++ 4 files changed, 28 insertions(+), 3 deletions(-) diff --git a/services/filestore/app/js/GcsPersistor.js b/services/filestore/app/js/GcsPersistor.js index 399ae68064..d8639e70d4 100644 --- a/services/filestore/app/js/GcsPersistor.js +++ b/services/filestore/app/js/GcsPersistor.js @@ -5,6 +5,7 @@ const Stream = require('stream') const { Storage } = require('@google-cloud/storage') const { callbackify } = require('util') const { WriteError, ReadError, NotFoundError } = require('./Errors') +const asyncPool = require('tiny-async-pool') const PersistorHelper = require('./PersistorHelper') const pipeline = promisify(Stream.pipeline) @@ -215,9 +216,9 @@ async function deleteDirectory(bucketName, key) { .bucket(bucketName) .getFiles({ directory: key }) - for (const file of files) { + await asyncPool(50, files, async file => { await deleteFile(bucketName, file.name) - } + }) } catch (err) { const error = PersistorHelper.wrapError( err, diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index e858a9e0be..f9d1b1696b 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -5582,6 +5582,22 @@ "readable-stream": "2 || 3" } }, + "tiny-async-pool": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/tiny-async-pool/-/tiny-async-pool-1.1.0.tgz", + "integrity": "sha512-jIglyHF/9QdCC3662m/UMVADE6SlocBDpXdFLMZyiAfrw8MSG1pml7lwRtBMT6L/z4dddAxfzw2lpW2Vm42fyQ==", + "requires": { + "semver": "^5.5.0", + "yaassertion": "^1.0.0" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" + } + } + }, "tmp": { "version": "0.0.33", "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz", @@ -5980,6 +5996,11 @@ "integrity": "sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w==", "dev": true }, + "yaassertion": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/yaassertion/-/yaassertion-1.0.2.tgz", + "integrity": "sha512-sBoJBg5vTr3lOpRX0yFD+tz7wv/l2UPMFthag4HGTMPrypBRKerjjS8jiEnNMjcAEtPXjbHiKE0UwRR1W1GXBg==" + }, "yallist": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", diff --git a/services/filestore/package.json b/services/filestore/package.json index bbdd586f58..73459bac71 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -36,7 +36,8 @@ "rimraf": "2.2.8", "settings-sharelatex": "^1.1.0", "stream-buffers": "~0.2.5", - "stream-meter": "^1.0.4" + "stream-meter": "^1.0.4", + "tiny-async-pool": "^1.1.0" }, "devDependencies": { "babel-eslint": "^10.0.3", diff --git a/services/filestore/test/unit/js/GcsPersistorTests.js b/services/filestore/test/unit/js/GcsPersistorTests.js index 8a9df40485..f02589c389 100644 --- a/services/filestore/test/unit/js/GcsPersistorTests.js +++ b/services/filestore/test/unit/js/GcsPersistorTests.js @@ -4,6 +4,7 @@ const { expect } = chai const modulePath = '../../../app/js/GcsPersistor.js' const SandboxedModule = require('sandboxed-module') const { ObjectId } = require('mongodb') +const asyncPool = require('tiny-async-pool') const Errors = require('../../../app/js/Errors') @@ -135,6 +136,7 @@ describe('GcsPersistorTests', function() { '@google-cloud/storage': { Storage }, 'settings-sharelatex': settings, 'logger-sharelatex': Logger, + 'tiny-async-pool': asyncPool, './Errors': Errors, fs: Fs, 'stream-meter': Meter, From 58db14456a02745fb13fc6a6f8182d9644da7d13 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Sat, 14 Mar 2020 14:11:17 +0000 Subject: [PATCH 473/555] Add timestamp to files in deleted bucket --- services/filestore/app/js/GcsPersistor.js | 6 +++--- services/filestore/package-lock.json | 6 ++++++ services/filestore/package.json | 3 ++- .../filestore/test/acceptance/js/FilestoreTests.js | 11 +++++++++-- 4 files changed, 20 insertions(+), 6 deletions(-) diff --git a/services/filestore/app/js/GcsPersistor.js b/services/filestore/app/js/GcsPersistor.js index d8639e70d4..29286ef505 100644 --- a/services/filestore/app/js/GcsPersistor.js +++ b/services/filestore/app/js/GcsPersistor.js @@ -184,9 +184,9 @@ async function deleteFile(bucketName, key) { } if (settings.filestore.gcs.deletedBucketSuffix) { await file.copy( - storage.bucket( - `${bucketName}${settings.filestore.gcs.deletedBucketSuffix}` - ) + storage + .bucket(`${bucketName}${settings.filestore.gcs.deletedBucketSuffix}`) + .file(`${key}-${new Date()}`) ) } await file.delete() diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index f9d1b1696b..7c21d9e128 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -5582,6 +5582,12 @@ "readable-stream": "2 || 3" } }, + "timekeeper": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/timekeeper/-/timekeeper-2.2.0.tgz", + "integrity": "sha512-W3AmPTJWZkRwu+iSNxPIsLZ2ByADsOLbbLxe46UJyWj3mlYLlwucKiq+/dPm0l9wTzqoF3/2PH0AGFCebjq23A==", + "dev": true + }, "tiny-async-pool": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/tiny-async-pool/-/tiny-async-pool-1.1.0.tgz", diff --git a/services/filestore/package.json b/services/filestore/package.json index 73459bac71..51530e86b4 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -63,6 +63,7 @@ "sandboxed-module": "2.0.3", "sinon": "7.1.1", "sinon-chai": "^3.3.0", - "streamifier": "^0.1.1" + "streamifier": "^0.1.1", + "timekeeper": "^2.2.0" } } diff --git a/services/filestore/test/acceptance/js/FilestoreTests.js b/services/filestore/test/acceptance/js/FilestoreTests.js index 0aa2c8e294..8369698891 100644 --- a/services/filestore/test/acceptance/js/FilestoreTests.js +++ b/services/filestore/test/acceptance/js/FilestoreTests.js @@ -16,6 +16,7 @@ const { Storage } = require('@google-cloud/storage') const streamifier = require('streamifier') chai.use(require('chai-as-promised')) const { ObjectId } = require('mongodb') +const tk = require('timekeeper') const fsWriteFile = promisify(fs.writeFile) const fsStat = promisify(fs.stat) @@ -388,9 +389,11 @@ describe('Filestore', function() { if (backend === 'GcsPersistor') { describe('when deleting a file in GCS', function() { - let fileId, fileUrl, content, error + let fileId, fileUrl, content, error, date beforeEach(async function() { + date = new Date() + tk.freeze(date) fileId = ObjectId() fileUrl = `${filestoreUrl}/project/${projectId}/file/${fileId}` @@ -409,6 +412,10 @@ describe('Filestore', function() { } }) + afterEach(function() { + tk.reset() + }) + it('should not throw an error', function() { expect(error).not.to.exist }) @@ -417,7 +424,7 @@ describe('Filestore', function() { await TestHelper.expectPersistorToHaveFile( app.persistor, `${Settings.filestore.stores.user_files}-deleted`, - `${projectId}/${fileId}`, + `${projectId}/${fileId}-${date}`, content ) }) From fc80aa3954e6823c7970adf63e4bced6594cf97f Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Sat, 14 Mar 2020 14:31:30 +0000 Subject: [PATCH 474/555] Move directory key validation into FileHandler --- services/filestore/app/js/FileHandler.js | 14 ++++++++++++- services/filestore/app/js/GcsPersistor.js | 7 ------- .../filestore/config/settings.defaults.coffee | 2 -- .../test/unit/js/FileHandlerTests.js | 21 +++++++++++++++++-- .../test/unit/js/GcsPersistorTests.js | 16 -------------- 5 files changed, 32 insertions(+), 28 deletions(-) diff --git a/services/filestore/app/js/FileHandler.js b/services/filestore/app/js/FileHandler.js index 02831fa3d0..9b592df34e 100644 --- a/services/filestore/app/js/FileHandler.js +++ b/services/filestore/app/js/FileHandler.js @@ -5,7 +5,7 @@ const LocalFileWriter = require('./LocalFileWriter') const FileConverter = require('./FileConverter') const KeyBuilder = require('./KeyBuilder') const ImageOptimiser = require('./ImageOptimiser') -const { ConversionError } = require('./Errors') +const { ConversionError, WriteError } = require('./Errors') module.exports = { insertFile: callbackify(insertFile), @@ -24,12 +24,24 @@ module.exports = { async function insertFile(bucket, key, stream) { const convertedKey = KeyBuilder.getConvertedFolderKey(key) + if (!convertedKey.match(/^[0-9a-f]{24}\/[0-9a-f]{24}/i)) { + throw new WriteError({ + message: 'key does not match validation regex', + info: { bucket, key, convertedKey } + }) + } await PersistorManager.promises.deleteDirectory(bucket, convertedKey) await PersistorManager.promises.sendStream(bucket, key, stream) } async function deleteFile(bucket, key) { const convertedKey = KeyBuilder.getConvertedFolderKey(key) + if (!convertedKey.match(/^[0-9a-f]{24}\/[0-9a-f]{24}/i)) { + throw new WriteError({ + message: 'key does not match validation regex', + info: { bucket, key, convertedKey } + }) + } await Promise.all([ PersistorManager.promises.deleteFile(bucket, key), PersistorManager.promises.deleteDirectory(bucket, convertedKey) diff --git a/services/filestore/app/js/GcsPersistor.js b/services/filestore/app/js/GcsPersistor.js index 29286ef505..bc46153983 100644 --- a/services/filestore/app/js/GcsPersistor.js +++ b/services/filestore/app/js/GcsPersistor.js @@ -204,13 +204,6 @@ async function deleteFile(bucketName, key) { } async function deleteDirectory(bucketName, key) { - if (!key.match(settings.filestore.gcs.directoryKeyRegex)) { - throw new NotFoundError({ - message: 'deleteDirectoryKey is invalid or missing', - info: { bucketName, key } - }) - } - try { const [files] = await storage .bucket(bucketName) diff --git a/services/filestore/config/settings.defaults.coffee b/services/filestore/config/settings.defaults.coffee index 24bce087ff..6b5238e552 100644 --- a/services/filestore/config/settings.defaults.coffee +++ b/services/filestore/config/settings.defaults.coffee @@ -40,8 +40,6 @@ settings = apiEndpoint: process.env['GCS_API_ENDPOINT'] apiScheme: process.env['GCS_API_SCHEME'] projectId: process.env['GCS_PROJECT_ID'] - # only keys that match this regex can be deleted - directoryKeyRegex: new RegExp(process.env['GCS_DIRECTORY_KEY_REGEX'] || "^[0-9a-fA-F]{24}/[0-9a-fA-F]{24}") unlockBeforeDelete: process.env['GCS_UNLOCK_BEFORE_DELETE'] == "true" # unlock an event-based hold before deleting. default false deletedBucketSuffix: process.env['GCS_DELETED_BUCKET_SUFFIX'] # if present, copy file to another bucket on delete. default null diff --git a/services/filestore/test/unit/js/FileHandlerTests.js b/services/filestore/test/unit/js/FileHandlerTests.js index 623ed440b0..9692521531 100644 --- a/services/filestore/test/unit/js/FileHandlerTests.js +++ b/services/filestore/test/unit/js/FileHandlerTests.js @@ -3,6 +3,7 @@ const chai = require('chai') const { expect } = chai const modulePath = '../../../app/js/FileHandler.js' const SandboxedModule = require('sandboxed-module') +const { ObjectId } = require('mongodb') chai.use(require('sinon-chai')) chai.use(require('chai-as-promised')) @@ -24,8 +25,8 @@ describe('FileHandler', function() { } const bucket = 'my_bucket' - const key = 'key/here' - const convertedFolderKey = 'convertedFolder' + const key = `${ObjectId()}/${ObjectId()}` + const convertedFolderKey = `${ObjectId()}/${ObjectId()}` const sourceStream = 'sourceStream' const convertedKey = 'convertedKey' const readStream = { @@ -112,6 +113,14 @@ describe('FileHandler', function() { done() }) }) + + it('should throw an error when the key is in the wrong format', function(done) { + KeyBuilder.getConvertedFolderKey.returns('wombat') + FileHandler.insertFile(bucket, key, stream, err => { + expect(err).to.exist + done() + }) + }) }) describe('deleteFile', function() { @@ -135,6 +144,14 @@ describe('FileHandler', function() { done() }) }) + + it('should throw an error when the key is in the wrong format', function(done) { + KeyBuilder.getConvertedFolderKey.returns('wombat') + FileHandler.deleteFile(bucket, key, err => { + expect(err).to.exist + done() + }) + }) }) describe('getFile', function() { diff --git a/services/filestore/test/unit/js/GcsPersistorTests.js b/services/filestore/test/unit/js/GcsPersistorTests.js index f02589c389..cd95bf1e20 100644 --- a/services/filestore/test/unit/js/GcsPersistorTests.js +++ b/services/filestore/test/unit/js/GcsPersistorTests.js @@ -556,22 +556,6 @@ describe('GcsPersistorTests', function() { expect(error.cause).to.equal(genericError) }) }) - - describe('when the directory name is in the wrong format', function() { - let error - - beforeEach(async function() { - try { - await GcsPersistor.promises.deleteDirectory(bucket, 'carbonara') - } catch (err) { - error = err - } - }) - - it('should throw a NotFoundError', function() { - expect(error).to.be.an.instanceOf(Errors.NotFoundError) - }) - }) }) describe('directorySize', function() { From 47e96a4d94a3e28e30fb40d035f9978576bc4c5c Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Sat, 14 Mar 2020 14:56:29 +0000 Subject: [PATCH 475/555] Add endpoint to delete a project Needs acceptance tests --- services/filestore/app.js | 5 +++++ services/filestore/app/js/FileController.js | 17 +++++++++++++++ services/filestore/app/js/FileHandler.js | 12 +++++++++++ services/filestore/app/js/KeyBuilder.js | 8 +++++++ .../test/unit/js/FileControllerTests.js | 19 +++++++++++++++++ .../test/unit/js/FileHandlerTests.js | 21 +++++++++++++++++++ 6 files changed, 82 insertions(+) diff --git a/services/filestore/app.js b/services/filestore/app.js index ea2c2ca1d8..2d5b27ee6b 100644 --- a/services/filestore/app.js +++ b/services/filestore/app.js @@ -61,6 +61,11 @@ app.delete( keyBuilder.userFileKeyMiddleware, fileController.deleteFile ) +app.delete( + '/project/:project_id', + keyBuilder.userProjectKeyMiddleware, + fileController.deleteProject +) app.head( '/template/:template_id/v/:version/:format', diff --git a/services/filestore/app/js/FileController.js b/services/filestore/app/js/FileController.js index 930434dc9d..9ddafb9f69 100644 --- a/services/filestore/app/js/FileController.js +++ b/services/filestore/app/js/FileController.js @@ -13,6 +13,7 @@ module.exports = { insertFile, copyFile, deleteFile, + deleteProject, directorySize } @@ -158,6 +159,22 @@ function deleteFile(req, res, next) { }) } +function deleteProject(req, res, next) { + metrics.inc('deleteProject') + const { project_id: projectId, bucket } = req + + req.requestLogger.setMessage('getting project size') + req.requestLogger.addFields({ projectId, bucket }) + + FileHandler.deleteProject(bucket, projectId, function(err) { + if (err) { + next(err) + } else { + res.sendStatus(204) + } + }) +} + function directorySize(req, res, next) { metrics.inc('projectSize') const { project_id: projectId, bucket } = req diff --git a/services/filestore/app/js/FileHandler.js b/services/filestore/app/js/FileHandler.js index 9b592df34e..40bdc95e34 100644 --- a/services/filestore/app/js/FileHandler.js +++ b/services/filestore/app/js/FileHandler.js @@ -10,6 +10,7 @@ const { ConversionError, WriteError } = require('./Errors') module.exports = { insertFile: callbackify(insertFile), deleteFile: callbackify(deleteFile), + deleteProject: callbackify(deleteProject), getFile: callbackify(getFile), getFileSize: callbackify(getFileSize), getDirectorySize: callbackify(getDirectorySize), @@ -17,6 +18,7 @@ module.exports = { getFile, insertFile, deleteFile, + deleteProject, getFileSize, getDirectorySize } @@ -48,6 +50,16 @@ async function deleteFile(bucket, key) { ]) } +async function deleteProject(bucket, key) { + if (!key.match(/^[0-9a-f]{24}\//i)) { + throw new WriteError({ + message: 'key does not match validation regex', + info: { bucket, key } + }) + } + await PersistorManager.promises.deleteDirectory(bucket, key) +} + async function getFile(bucket, key, opts) { opts = opts || {} if (!opts.format && !opts.style) { diff --git a/services/filestore/app/js/KeyBuilder.js b/services/filestore/app/js/KeyBuilder.js index 66cf563014..9968753349 100644 --- a/services/filestore/app/js/KeyBuilder.js +++ b/services/filestore/app/js/KeyBuilder.js @@ -4,6 +4,7 @@ module.exports = { getConvertedFolderKey, addCachingToKey, userFileKeyMiddleware, + userProjectKeyMiddleware, publicFileKeyMiddleware, publicProjectKeyMiddleware, bucketFileKeyMiddleware, @@ -37,6 +38,13 @@ function userFileKeyMiddleware(req, res, next) { next() } +function userProjectKeyMiddleware(req, res, next) { + const { project_id: projectId } = req.params + req.key = `${projectId}/` + req.bucket = settings.filestore.stores.user_files + next() +} + function publicFileKeyMiddleware(req, res, next) { if (settings.filestore.stores.public_files == null) { return res.status(501).send('public files not available') diff --git a/services/filestore/test/unit/js/FileControllerTests.js b/services/filestore/test/unit/js/FileControllerTests.js index 2d1411ea27..4a99a875a9 100644 --- a/services/filestore/test/unit/js/FileControllerTests.js +++ b/services/filestore/test/unit/js/FileControllerTests.js @@ -40,6 +40,7 @@ describe('FileController', function() { getFile: sinon.stub().yields(null, fileStream), getFileSize: sinon.stub().yields(null, fileSize), deleteFile: sinon.stub().yields(), + deleteProject: sinon.stub().yields(), insertFile: sinon.stub().yields(), getDirectorySize: sinon.stub().yields(null, fileSize) } @@ -67,6 +68,7 @@ describe('FileController', function() { req = { key: key, bucket: bucket, + project_id: projectId, query: {}, params: { project_id: projectId, @@ -257,6 +259,23 @@ describe('FileController', function() { }) }) + describe('delete project', function() { + it('should tell the file handler', function(done) { + res.sendStatus = code => { + code.should.equal(204) + expect(FileHandler.deleteProject).to.have.been.calledWith(bucket, projectId) + done() + } + FileController.deleteProject(req, res, next) + }) + + it('should send a 500 if there was an error', function() { + FileHandler.deleteProject.yields(error) + FileController.deleteProject(req, res, next) + expect(next).to.have.been.calledWith(error) + }) + }) + describe('directorySize', function() { it('should return total directory size bytes', function(done) { FileController.directorySize(req, { diff --git a/services/filestore/test/unit/js/FileHandlerTests.js b/services/filestore/test/unit/js/FileHandlerTests.js index 9692521531..acd3b8fc86 100644 --- a/services/filestore/test/unit/js/FileHandlerTests.js +++ b/services/filestore/test/unit/js/FileHandlerTests.js @@ -27,6 +27,7 @@ describe('FileHandler', function() { const bucket = 'my_bucket' const key = `${ObjectId()}/${ObjectId()}` const convertedFolderKey = `${ObjectId()}/${ObjectId()}` + const projectKey = `${ObjectId()}/` const sourceStream = 'sourceStream' const convertedKey = 'convertedKey' const readStream = { @@ -154,6 +155,26 @@ describe('FileHandler', function() { }) }) + describe('deleteProject', function() { + it('should tell the filestore manager to delete the folder', function(done) { + FileHandler.deleteProject(bucket, projectKey, err => { + expect(err).not.to.exist + expect(PersistorManager.promises.deleteDirectory).to.have.been.calledWith( + bucket, + projectKey + ) + done() + }) + }) + + it('should throw an error when the key is in the wrong format', function(done) { + FileHandler.deleteProject(bucket, 'wombat', err => { + expect(err).to.exist + done() + }) + }) + }) + describe('getFile', function() { it('should return the source stream no format or style are defined', function(done) { FileHandler.getFile(bucket, key, null, (err, stream) => { From ce52f8aa602f08b643a2cc8ab730c44254c533b4 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 16 Mar 2020 11:33:51 +0000 Subject: [PATCH 476/555] Update FSPersistor deleteDirectory behaviour to match S3 and GCS --- services/filestore/app/js/FSPersistor.js | 6 ++--- services/filestore/package-lock.json | 5 ---- services/filestore/package.json | 1 - .../test/unit/js/FSPersistorTests.js | 27 +++++++++---------- 4 files changed, 15 insertions(+), 24 deletions(-) diff --git a/services/filestore/app/js/FSPersistor.js b/services/filestore/app/js/FSPersistor.js index 973c670efd..4e514e3350 100644 --- a/services/filestore/app/js/FSPersistor.js +++ b/services/filestore/app/js/FSPersistor.js @@ -1,7 +1,6 @@ const fs = require('fs') const glob = require('glob') const path = require('path') -const rimraf = require('rimraf') const Stream = require('stream') const { promisify, callbackify } = require('util') @@ -14,7 +13,6 @@ const fsUnlink = promisify(fs.unlink) const fsOpen = promisify(fs.open) const fsStat = promisify(fs.stat) const fsGlob = promisify(glob) -const rmrf = promisify(rimraf) const filterName = key => key.replace(/\//g, '_') @@ -146,7 +144,9 @@ async function deleteDirectory(location, name) { const filteredName = filterName(name.replace(/\/$/, '')) try { - await rmrf(`${location}/${filteredName}`) + await Promise.all( + (await fsGlob(`${location}/${filteredName}*`)).map(file => fsUnlink(file)) + ) } catch (err) { throw PersistorHelper.wrapError( err, diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index 7c21d9e128..90f5698668 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -5059,11 +5059,6 @@ } } }, - "rimraf": { - "version": "2.2.8", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.2.8.tgz", - "integrity": "sha512-R5KMKHnPAQaZMqLOsyuyUmcIjSeDm+73eoqQpaXA7AZ22BL+6C+1mcUscgOsNd8WVlJuvlgAPsegcx7pjlV0Dg==" - }, "run-async": { "version": "2.4.0", "resolved": "https://registry.npmjs.org/run-async/-/run-async-2.4.0.tgz", diff --git a/services/filestore/package.json b/services/filestore/package.json index 51530e86b4..c4b8f16b15 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -33,7 +33,6 @@ "range-parser": "^1.0.2", "request": "^2.88.0", "request-promise-native": "^1.0.8", - "rimraf": "2.2.8", "settings-sharelatex": "^1.1.0", "stream-buffers": "~0.2.5", "stream-meter": "^1.0.4", diff --git a/services/filestore/test/unit/js/FSPersistorTests.js b/services/filestore/test/unit/js/FSPersistorTests.js index 4dd5a2fa11..4777de502a 100644 --- a/services/filestore/test/unit/js/FSPersistorTests.js +++ b/services/filestore/test/unit/js/FSPersistorTests.js @@ -22,15 +22,7 @@ describe('FSPersistorTests', function() { const files = ['animals/wombat.tex', 'vegetables/potato.tex'] const globs = [`${location}/${files[0]}`, `${location}/${files[1]}`] const filteredFilenames = ['animals_wombat.tex', 'vegetables_potato.tex'] - let fs, - rimraf, - stream, - LocalFileWriter, - FSPersistor, - glob, - readStream, - crypto, - Hash + let fs, stream, LocalFileWriter, FSPersistor, glob, readStream, crypto, Hash beforeEach(function() { readStream = { @@ -46,7 +38,6 @@ describe('FSPersistorTests', function() { stat: sinon.stub().yields(null, stat) } glob = sinon.stub().yields(null, globs) - rimraf = sinon.stub().yields() stream = { pipeline: sinon.stub().yields() } LocalFileWriter = { promises: { @@ -68,7 +59,6 @@ describe('FSPersistorTests', function() { './Errors': Errors, fs, glob, - rimraf, stream, crypto, // imported by PersistorHelper but otherwise unused here @@ -271,15 +261,22 @@ describe('FSPersistorTests', function() { }) describe('deleteDirectory', function() { - it('Should call rmdir(rimraf) with correct options', async function() { + it('Should call glob with correct options', async function() { await FSPersistor.promises.deleteDirectory(location, files[0]) - expect(rimraf).to.have.been.calledWith( - `${location}/${filteredFilenames[0]}` + expect(glob).to.have.been.calledWith( + `${location}/${filteredFilenames[0]}*` ) }) + it('Should call unlink on the returned files', async function() { + await FSPersistor.promises.deleteDirectory(location, files[0]) + for (const filename of globs) { + expect(fs.unlink).to.have.been.calledWith(filename) + } + }) + it('Should propagate the error', async function() { - rimraf.yields(error) + glob.yields(error) await expect( FSPersistor.promises.deleteDirectory(location, files[0]) ).to.eventually.be.rejected.and.have.property('cause', error) From 9f74aac1a08fcb099ef3ebe98115d8f0710e0488 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 16 Mar 2020 11:34:45 +0000 Subject: [PATCH 477/555] Add acceptance tests for directory deletion --- .../test/acceptance/js/FilestoreTests.js | 35 +++++++++++++++++-- 1 file changed, 32 insertions(+), 3 deletions(-) diff --git a/services/filestore/test/acceptance/js/FilestoreTests.js b/services/filestore/test/acceptance/js/FilestoreTests.js index 8369698891..dc31d1a83c 100644 --- a/services/filestore/test/acceptance/js/FilestoreTests.js +++ b/services/filestore/test/acceptance/js/FilestoreTests.js @@ -252,7 +252,7 @@ describe('Filestore', function() { }) describe('with multiple files', function() { - let fileIds, fileUrls + let fileIds, fileUrls, projectUrl const localFileReadPaths = [ '/tmp/filestore_acceptance_tests_file_read_1.txt', '/tmp/filestore_acceptance_tests_file_read_2.txt' @@ -278,10 +278,11 @@ describe('Filestore', function() { }) beforeEach(async function() { + projectUrl = `${filestoreUrl}/project/${projectId}` fileIds = [ObjectId().toString(), ObjectId().toString()] fileUrls = [ - `${filestoreUrl}/project/${projectId}/file/${fileIds[0]}`, - `${filestoreUrl}/project/${projectId}/file/${fileIds[1]}` + `${projectUrl}/file/${fileIds[0]}`, + `${projectUrl}/file/${fileIds[1]}` ] const writeStreams = [ @@ -311,6 +312,34 @@ describe('Filestore', function() { constantFileContents[0].length + constantFileContents[1].length ) }) + + it('should store the files', async function() { + for (const index in fileUrls) { + await expect(rp.get(fileUrls[index])).to.eventually.have.property( + 'body', + constantFileContents[index] + ) + } + }) + + it('should be able to delete the project', async function() { + await expect(rp.delete(projectUrl)).to.eventually.have.property( + 'statusCode', + 204 + ) + + for (const index in fileUrls) { + await expect( + rp.get(fileUrls[index]) + ).to.eventually.be.rejected.and.have.property('statusCode', 404) + } + }) + + it('should not delete a partial project id', async function() { + await expect( + rp.delete(`${filestoreUrl}/project/5`) + ).to.eventually.be.rejected.and.have.property('statusCode', 400) + }) }) describe('with a large file', function() { From 06c4c0f74f5abd2df18b0d156da71ee29dfae575 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 16 Mar 2020 11:35:01 +0000 Subject: [PATCH 478/555] Fix incorrect key when deleting projects --- services/filestore/app/js/Errors.js | 4 +++- services/filestore/app/js/FileController.js | 11 +++++++---- services/filestore/app/js/FileHandler.js | 8 ++++---- .../filestore/test/unit/js/FileControllerTests.js | 2 +- services/filestore/test/unit/js/FileHandlerTests.js | 7 +++---- 5 files changed, 18 insertions(+), 14 deletions(-) diff --git a/services/filestore/app/js/Errors.js b/services/filestore/app/js/Errors.js index 445b666e17..1beefb79c8 100644 --- a/services/filestore/app/js/Errors.js +++ b/services/filestore/app/js/Errors.js @@ -25,6 +25,7 @@ class ConversionsDisabledError extends BackwardCompatibleError {} class ConversionError extends BackwardCompatibleError {} class SettingsError extends BackwardCompatibleError {} class TimeoutError extends BackwardCompatibleError {} +class InvalidParametersError extends BackwardCompatibleError {} class FailedCommandError extends OError { constructor(command, code, stdout, stderr) { @@ -50,5 +51,6 @@ module.exports = { ConversionError, HealthCheckError, SettingsError, - TimeoutError + TimeoutError, + InvalidParametersError } diff --git a/services/filestore/app/js/FileController.js b/services/filestore/app/js/FileController.js index 9ddafb9f69..0e663f9421 100644 --- a/services/filestore/app/js/FileController.js +++ b/services/filestore/app/js/FileController.js @@ -161,13 +161,16 @@ function deleteFile(req, res, next) { function deleteProject(req, res, next) { metrics.inc('deleteProject') - const { project_id: projectId, bucket } = req + const { key, bucket } = req - req.requestLogger.setMessage('getting project size') - req.requestLogger.addFields({ projectId, bucket }) + req.requestLogger.setMessage('deleting project') + req.requestLogger.addFields({ key, bucket }) - FileHandler.deleteProject(bucket, projectId, function(err) { + FileHandler.deleteProject(bucket, key, function(err) { if (err) { + if (err instanceof Errors.InvalidParametersError) { + return res.sendStatus(400) + } next(err) } else { res.sendStatus(204) diff --git a/services/filestore/app/js/FileHandler.js b/services/filestore/app/js/FileHandler.js index 40bdc95e34..a6032350b1 100644 --- a/services/filestore/app/js/FileHandler.js +++ b/services/filestore/app/js/FileHandler.js @@ -5,7 +5,7 @@ const LocalFileWriter = require('./LocalFileWriter') const FileConverter = require('./FileConverter') const KeyBuilder = require('./KeyBuilder') const ImageOptimiser = require('./ImageOptimiser') -const { ConversionError, WriteError } = require('./Errors') +const { ConversionError, InvalidParametersError } = require('./Errors') module.exports = { insertFile: callbackify(insertFile), @@ -27,7 +27,7 @@ module.exports = { async function insertFile(bucket, key, stream) { const convertedKey = KeyBuilder.getConvertedFolderKey(key) if (!convertedKey.match(/^[0-9a-f]{24}\/[0-9a-f]{24}/i)) { - throw new WriteError({ + throw new InvalidParametersError({ message: 'key does not match validation regex', info: { bucket, key, convertedKey } }) @@ -39,7 +39,7 @@ async function insertFile(bucket, key, stream) { async function deleteFile(bucket, key) { const convertedKey = KeyBuilder.getConvertedFolderKey(key) if (!convertedKey.match(/^[0-9a-f]{24}\/[0-9a-f]{24}/i)) { - throw new WriteError({ + throw new InvalidParametersError({ message: 'key does not match validation regex', info: { bucket, key, convertedKey } }) @@ -52,7 +52,7 @@ async function deleteFile(bucket, key) { async function deleteProject(bucket, key) { if (!key.match(/^[0-9a-f]{24}\//i)) { - throw new WriteError({ + throw new InvalidParametersError({ message: 'key does not match validation regex', info: { bucket, key } }) diff --git a/services/filestore/test/unit/js/FileControllerTests.js b/services/filestore/test/unit/js/FileControllerTests.js index 4a99a875a9..16fbb3641c 100644 --- a/services/filestore/test/unit/js/FileControllerTests.js +++ b/services/filestore/test/unit/js/FileControllerTests.js @@ -263,7 +263,7 @@ describe('FileController', function() { it('should tell the file handler', function(done) { res.sendStatus = code => { code.should.equal(204) - expect(FileHandler.deleteProject).to.have.been.calledWith(bucket, projectId) + expect(FileHandler.deleteProject).to.have.been.calledWith(bucket, key) done() } FileController.deleteProject(req, res, next) diff --git a/services/filestore/test/unit/js/FileHandlerTests.js b/services/filestore/test/unit/js/FileHandlerTests.js index acd3b8fc86..7823c9454f 100644 --- a/services/filestore/test/unit/js/FileHandlerTests.js +++ b/services/filestore/test/unit/js/FileHandlerTests.js @@ -159,10 +159,9 @@ describe('FileHandler', function() { it('should tell the filestore manager to delete the folder', function(done) { FileHandler.deleteProject(bucket, projectKey, err => { expect(err).not.to.exist - expect(PersistorManager.promises.deleteDirectory).to.have.been.calledWith( - bucket, - projectKey - ) + expect( + PersistorManager.promises.deleteDirectory + ).to.have.been.calledWith(bucket, projectKey) done() }) }) From 9b658dda18cb2431e3b77f3f1011d0310294bf20 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 16 Mar 2020 15:53:45 +0000 Subject: [PATCH 479/555] Copy-on-delete before unlocking --- services/filestore/app/js/GcsPersistor.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/filestore/app/js/GcsPersistor.js b/services/filestore/app/js/GcsPersistor.js index bc46153983..ddaf37d42d 100644 --- a/services/filestore/app/js/GcsPersistor.js +++ b/services/filestore/app/js/GcsPersistor.js @@ -179,9 +179,6 @@ async function deleteFile(bucketName, key) { try { const file = storage.bucket(bucketName).file(key) - if (settings.filestore.gcs.unlockBeforeDelete) { - await file.setMetadata({ eventBasedHold: false }) - } if (settings.filestore.gcs.deletedBucketSuffix) { await file.copy( storage @@ -189,6 +186,9 @@ async function deleteFile(bucketName, key) { .file(`${key}-${new Date()}`) ) } + if (settings.filestore.gcs.unlockBeforeDelete) { + await file.setMetadata({ eventBasedHold: false }) + } await file.delete() } catch (err) { const error = PersistorHelper.wrapError( From b37c52fc3ab5369638654046a7f91028b94105ab Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 16 Mar 2020 15:54:05 +0000 Subject: [PATCH 480/555] Make GCS delete concurrency configurable --- services/filestore/app/js/GcsPersistor.js | 10 +++++++--- services/filestore/config/settings.defaults.coffee | 1 + 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/services/filestore/app/js/GcsPersistor.js b/services/filestore/app/js/GcsPersistor.js index ddaf37d42d..a78bfce2cd 100644 --- a/services/filestore/app/js/GcsPersistor.js +++ b/services/filestore/app/js/GcsPersistor.js @@ -209,9 +209,13 @@ async function deleteDirectory(bucketName, key) { .bucket(bucketName) .getFiles({ directory: key }) - await asyncPool(50, files, async file => { - await deleteFile(bucketName, file.name) - }) + await asyncPool( + settings.filestore.gcs.deleteConcurrency, + files, + async file => { + await deleteFile(bucketName, file.name) + } + ) } catch (err) { const error = PersistorHelper.wrapError( err, diff --git a/services/filestore/config/settings.defaults.coffee b/services/filestore/config/settings.defaults.coffee index 6b5238e552..6867945d10 100644 --- a/services/filestore/config/settings.defaults.coffee +++ b/services/filestore/config/settings.defaults.coffee @@ -42,6 +42,7 @@ settings = projectId: process.env['GCS_PROJECT_ID'] unlockBeforeDelete: process.env['GCS_UNLOCK_BEFORE_DELETE'] == "true" # unlock an event-based hold before deleting. default false deletedBucketSuffix: process.env['GCS_DELETED_BUCKET_SUFFIX'] # if present, copy file to another bucket on delete. default null + deleteConcurrency: parseInt(process.env['GCS_DELETE_CONCURRENCY']) || 50 s3: if process.env['AWS_ACCESS_KEY_ID']? or process.env['S3_BUCKET_CREDENTIALS']? From 9d32d4ec16b97007743126c432d00c70c62646db Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 16 Mar 2020 15:57:37 +0000 Subject: [PATCH 481/555] Don't modify 'opts' parameter --- services/filestore/app/js/GcsPersistor.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/services/filestore/app/js/GcsPersistor.js b/services/filestore/app/js/GcsPersistor.js index a78bfce2cd..5a44132882 100644 --- a/services/filestore/app/js/GcsPersistor.js +++ b/services/filestore/app/js/GcsPersistor.js @@ -113,7 +113,8 @@ async function sendStream(bucketName, key, readStream, sourceMd5) { } } -async function getFileStream(bucketName, key, opts = {}) { +async function getFileStream(bucketName, key, _opts = {}) { + const opts = Object.assign({}, _opts) if (opts.end) { // S3 (and http range headers) treat 'end' as inclusive, so increase this by 1 opts.end++ From cb4bdd99f4b2094940a32e634e0a521810156f03 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 16 Mar 2020 16:09:56 +0000 Subject: [PATCH 482/555] Use an ISODate for deleted file names --- services/filestore/app/js/GcsPersistor.js | 2 +- services/filestore/test/acceptance/js/FilestoreTests.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/services/filestore/app/js/GcsPersistor.js b/services/filestore/app/js/GcsPersistor.js index 5a44132882..799ee65905 100644 --- a/services/filestore/app/js/GcsPersistor.js +++ b/services/filestore/app/js/GcsPersistor.js @@ -184,7 +184,7 @@ async function deleteFile(bucketName, key) { await file.copy( storage .bucket(`${bucketName}${settings.filestore.gcs.deletedBucketSuffix}`) - .file(`${key}-${new Date()}`) + .file(`${key}-${new Date().toISOString()}`) ) } if (settings.filestore.gcs.unlockBeforeDelete) { diff --git a/services/filestore/test/acceptance/js/FilestoreTests.js b/services/filestore/test/acceptance/js/FilestoreTests.js index dc31d1a83c..272ffb52bd 100644 --- a/services/filestore/test/acceptance/js/FilestoreTests.js +++ b/services/filestore/test/acceptance/js/FilestoreTests.js @@ -453,7 +453,7 @@ describe('Filestore', function() { await TestHelper.expectPersistorToHaveFile( app.persistor, `${Settings.filestore.stores.user_files}-deleted`, - `${projectId}/${fileId}-${date}`, + `${projectId}/${fileId}-${date.toISOString()}`, content ) }) From 65f19346f0d72aa3e1026e43b89d16229f81ae98 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 19 Mar 2020 11:10:31 +0000 Subject: [PATCH 483/555] Fix fallback to custom s3 buckets --- services/filestore/app/js/MigrationPersistor.js | 2 +- services/filestore/test/acceptance/js/FilestoreTests.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/services/filestore/app/js/MigrationPersistor.js b/services/filestore/app/js/MigrationPersistor.js index 3ddc762922..d25ea84ce4 100644 --- a/services/filestore/app/js/MigrationPersistor.js +++ b/services/filestore/app/js/MigrationPersistor.js @@ -115,7 +115,7 @@ module.exports = function(primary, fallback) { } function _getFallbackBucket(bucket) { - return Settings.filestore.fallback.buckets[bucket] + return Settings.filestore.fallback.buckets[bucket] || bucket } function _wrapFallbackMethod(method) { diff --git a/services/filestore/test/acceptance/js/FilestoreTests.js b/services/filestore/test/acceptance/js/FilestoreTests.js index 272ffb52bd..668570e7cb 100644 --- a/services/filestore/test/acceptance/js/FilestoreTests.js +++ b/services/filestore/test/acceptance/js/FilestoreTests.js @@ -374,7 +374,7 @@ describe('Filestore', function() { }) }) - if (backend === 'S3Persistor') { + if (backend === 'S3Persistor' || backend === 'FallbackGcsToS3Persistor') { describe('with a file in a specific bucket', function() { let constantFileContent, fileId, fileUrl, bucketName From 2ce3c8f97e0b46b1c24454aec58e893f4a865ae1 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Mon, 23 Mar 2020 16:18:07 +0100 Subject: [PATCH 484/555] [misc] bump logger-sharelatex to 1.9.1 --- services/filestore/package-lock.json | 6 +++--- services/filestore/package.json | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index 90f5698668..e2ff0d61d8 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -3345,9 +3345,9 @@ "integrity": "sha512-U7KCmLdqsGHBLeWqYlFA0V0Sl6P08EE1ZrmA9cxjUE0WVqT9qnyVDPz1kzpFEP0jdJuFnasWIfSd7fsaNXkpbg==" }, "logger-sharelatex": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-1.9.0.tgz", - "integrity": "sha512-yVTuha82047IiMOQLgQHCZGKkJo6I2+2KtiFKpgkIooR2yZaoTEvAeoMwBesSDSpGUpvUJ/+9UI+PmRyc+PQKQ==", + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-1.9.1.tgz", + "integrity": "sha512-9s6JQnH/PN+Js2CmI8+J3MQCTNlRzP2Dh4pcekXrV6Jm5J4HzyPi+6d3zfBskZ4NBmaUVw9hC4p5dmdaRmh4mQ==", "requires": { "@google-cloud/logging-bunyan": "^2.0.0", "@overleaf/o-error": "^2.0.0", diff --git a/services/filestore/package.json b/services/filestore/package.json index c4b8f16b15..2425804672 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -27,7 +27,7 @@ "express": "^4.2.0", "glob": "^7.1.6", "lodash.once": "^4.1.1", - "logger-sharelatex": "^1.7.0", + "logger-sharelatex": "^1.9.1", "metrics-sharelatex": "^2.5.0", "node-uuid": "~1.4.1", "range-parser": "^1.0.2", From d073fe75caf585cc0bf5ab4716c1b0f011fb92c5 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Wed, 25 Mar 2020 16:59:51 +0000 Subject: [PATCH 485/555] Use single pipeline when calculating md5 --- services/filestore/app/js/GcsPersistor.js | 29 +++++---- services/filestore/app/js/PersistorHelper.js | 61 +++++++++++++------ services/filestore/app/js/S3Persistor.js | 32 +++++----- services/filestore/package-lock.json | 8 --- services/filestore/package.json | 1 - .../test/unit/js/FSPersistorTests.js | 8 ++- .../test/unit/js/GcsPersistorTests.js | 61 ++++++------------- .../test/unit/js/S3PersistorTests.js | 51 +++++----------- 8 files changed, 111 insertions(+), 140 deletions(-) diff --git a/services/filestore/app/js/GcsPersistor.js b/services/filestore/app/js/GcsPersistor.js index 799ee65905..ae1c2dd53a 100644 --- a/services/filestore/app/js/GcsPersistor.js +++ b/services/filestore/app/js/GcsPersistor.js @@ -65,17 +65,15 @@ async function sendFile(bucketName, key, fsPath) { async function sendStream(bucketName, key, readStream, sourceMd5) { try { - let hashPromise + // egress from us to gcs + const observeOptions = { metric: 'gcs.egress' } - // if there is no supplied md5 hash, we calculate the hash as the data passes through if (!sourceMd5) { - hashPromise = PersistorHelper.calculateStreamMd5(readStream) + // if there is no supplied md5 hash, we calculate the hash as the data passes through + observeOptions.hash = 'md5' } - const meteredStream = PersistorHelper.getMeteredStream( - readStream, - 'gcs.egress' // egress from us to gcs - ) + const observer = new PersistorHelper.ObserverStream(observeOptions) const writeOptions = { // disabling of resumable uploads is recommended by Google: @@ -94,12 +92,12 @@ async function sendStream(bucketName, key, readStream, sourceMd5) { .file(key) .createWriteStream(writeOptions) - await pipeline(meteredStream, uploadStream) + await pipeline(readStream, observer, uploadStream) // if we didn't have an md5 hash, we should compare our computed one with Google's // as we couldn't tell GCS about it beforehand - if (hashPromise) { - sourceMd5 = await hashPromise + if (!sourceMd5) { + sourceMd5 = observer.getHash() // throws on mismatch await PersistorHelper.verifyMd5(GcsPersistor, bucketName, key, sourceMd5) } @@ -124,14 +122,15 @@ async function getFileStream(bucketName, key, _opts = {}) { .file(key) .createReadStream(opts) - const meteredStream = PersistorHelper.getMeteredStream( - stream, - 'gcs.ingress' // ingress to us from gcs - ) + // ingress to us from gcs + const observer = new PersistorHelper.ObserverStream({ + metric: 'gcs.ingress' + }) + pipeline(stream, observer) try { await PersistorHelper.waitForStreamReady(stream) - return meteredStream + return observer } catch (err) { throw PersistorHelper.wrapError( err, diff --git a/services/filestore/app/js/PersistorHelper.js b/services/filestore/app/js/PersistorHelper.js index a58f024bb4..ad5152374f 100644 --- a/services/filestore/app/js/PersistorHelper.js +++ b/services/filestore/app/js/PersistorHelper.js @@ -1,17 +1,54 @@ const crypto = require('crypto') -const metrics = require('metrics-sharelatex') -const meter = require('stream-meter') const Stream = require('stream') const logger = require('logger-sharelatex') +const metrics = require('metrics-sharelatex') const { WriteError, ReadError, NotFoundError } = require('./Errors') const { promisify } = require('util') const pipeline = promisify(Stream.pipeline) +// Observes data that passes through and computes some metadata for it +// - specifically, it computes the number of bytes transferred, and optionally +// computes a cryptographic hash based on the 'hash' option. e.g., pass +// { hash: 'md5' } to compute the md5 hash of the stream +// - if 'metric' is supplied as an option, this metric will be incremented by +// the number of bytes transferred +class ObserverStream extends Stream.Transform { + constructor(options) { + super(options) + + this.bytes = 0 + + if (options.hash) { + this.hash = crypto.createHash(options.hash) + } + if (options.metric) { + const onEnd = () => { + metrics.count(options.metric, this.bytes) + } + this.once('error', onEnd) + this.once('end', onEnd) + } + } + + _transform(chunk, encoding, done) { + if (this.hash) { + this.hash.update(chunk) + } + this.bytes += chunk.length + this.push(chunk) + done() + } + + getHash() { + return this.hash && this.hash.digest('hex') + } +} + module.exports = { + ObserverStream, calculateStreamMd5, verifyMd5, - getMeteredStream, waitForStreamReady, wrapError, hexToBase64, @@ -19,6 +56,7 @@ module.exports = { } // returns a promise which resolves with the md5 hash of the stream +// - consumes the stream function calculateStreamMd5(stream) { const hash = crypto.createHash('md5') hash.setEncoding('hex') @@ -53,23 +91,6 @@ async function verifyMd5(persistor, bucket, key, sourceMd5, destMd5 = null) { } } -// returns the next stream in the pipeline, and calls the callback with the byte count -// when the stream finishes or receives an error -function getMeteredStream(stream, metricName) { - const meteredStream = meter() - - pipeline(stream, meteredStream) - .then(() => { - metrics.count(metricName, meteredStream.bytes) - }) - .catch(() => { - // on error, just send how many bytes we received before the stream stopped - metrics.count(metricName, meteredStream.bytes) - }) - - return meteredStream -} - // resolves when a stream is 'readable', or rejects if the stream throws an error // before that happens - this lets us handle protocol-level errors before trying // to read them diff --git a/services/filestore/app/js/S3Persistor.js b/services/filestore/app/js/S3Persistor.js index 1b92a61ae6..ba82db31e2 100644 --- a/services/filestore/app/js/S3Persistor.js +++ b/services/filestore/app/js/S3Persistor.js @@ -10,13 +10,15 @@ const PersistorHelper = require('./PersistorHelper') const fs = require('fs') const S3 = require('aws-sdk/clients/s3') const { URL } = require('url') -const { callbackify } = require('util') +const Stream = require('stream') +const { promisify, callbackify } = require('util') const { WriteError, ReadError, NotFoundError, SettingsError } = require('./Errors') +const pipeline = promisify(Stream.pipeline) const S3Persistor = { sendFile: callbackify(sendFile), @@ -51,26 +53,25 @@ async function sendFile(bucketName, key, fsPath) { async function sendStream(bucketName, key, readStream, sourceMd5) { try { - // if there is no supplied md5 hash, we calculate the hash as the data passes through - let hashPromise + // egress from us to S3 + const observeOptions = { metric: 's3.egress' } let b64Hash if (sourceMd5) { b64Hash = PersistorHelper.hexToBase64(sourceMd5) } else { - hashPromise = PersistorHelper.calculateStreamMd5(readStream) + // if there is no supplied md5 hash, we calculate the hash as the data passes through + observeOptions.hash = 'md5' } - const meteredStream = PersistorHelper.getMeteredStream( - readStream, - 's3.egress' // egress from us to s3 - ) + const observer = new PersistorHelper.ObserverStream(observeOptions) + pipeline(readStream, observer) // if we have an md5 hash, pass this to S3 to verify the upload const uploadOptions = { Bucket: bucketName, Key: key, - Body: meteredStream + Body: observer } if (b64Hash) { uploadOptions.ContentMD5 = b64Hash @@ -92,8 +93,8 @@ async function sendStream(bucketName, key, readStream, sourceMd5) { // if we didn't have an md5 hash, we should compare our computed one with S3's // as we couldn't tell S3 about it beforehand - if (hashPromise) { - sourceMd5 = await hashPromise + if (!sourceMd5) { + sourceMd5 = observer.getHash() // throws on mismatch await PersistorHelper.verifyMd5( S3Persistor, @@ -128,14 +129,13 @@ async function getFileStream(bucketName, key, opts) { .getObject(params) .createReadStream() - const meteredStream = PersistorHelper.getMeteredStream( - stream, - 's3.ingress' // ingress to us from s3 - ) + // ingress from S3 to us + const observer = new PersistorHelper.ObserverStream({ metric: 's3.ingress' }) + pipeline(stream, observer) try { await PersistorHelper.waitForStreamReady(stream) - return meteredStream + return observer } catch (err) { throw PersistorHelper.wrapError( err, diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index 90f5698668..f50ef5f552 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -5382,14 +5382,6 @@ "stubs": "^3.0.0" } }, - "stream-meter": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/stream-meter/-/stream-meter-1.0.4.tgz", - "integrity": "sha512-4sOEtrbgFotXwnEuzzsQBYEV1elAeFSO8rSGeTwabuX1RRn/kEq9JVH7I0MRBhKVRR0sJkr0M0QCH7yOLf9fhQ==", - "requires": { - "readable-stream": "^2.1.4" - } - }, "stream-shift": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.1.tgz", diff --git a/services/filestore/package.json b/services/filestore/package.json index c4b8f16b15..53d7e24c6c 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -35,7 +35,6 @@ "request-promise-native": "^1.0.8", "settings-sharelatex": "^1.1.0", "stream-buffers": "~0.2.5", - "stream-meter": "^1.0.4", "tiny-async-pool": "^1.1.0" }, "devDependencies": { diff --git a/services/filestore/test/unit/js/FSPersistorTests.js b/services/filestore/test/unit/js/FSPersistorTests.js index 4777de502a..c8255987ba 100644 --- a/services/filestore/test/unit/js/FSPersistorTests.js +++ b/services/filestore/test/unit/js/FSPersistorTests.js @@ -3,6 +3,7 @@ const chai = require('chai') const { expect } = chai const SandboxedModule = require('sandboxed-module') const Errors = require('../../../app/js/Errors') +const StreamModule = require('stream') chai.use(require('sinon-chai')) chai.use(require('chai-as-promised')) @@ -38,7 +39,10 @@ describe('FSPersistorTests', function() { stat: sinon.stub().yields(null, stat) } glob = sinon.stub().yields(null, globs) - stream = { pipeline: sinon.stub().yields() } + stream = { + pipeline: sinon.stub().yields(), + Transform: StreamModule.Transform + } LocalFileWriter = { promises: { writeStream: sinon.stub().resolves(tempFile), @@ -48,6 +52,7 @@ describe('FSPersistorTests', function() { Hash = { end: sinon.stub(), read: sinon.stub().returns(md5), + digest: sinon.stub().returns(md5), setEncoding: sinon.stub() } crypto = { @@ -62,7 +67,6 @@ describe('FSPersistorTests', function() { stream, crypto, // imported by PersistorHelper but otherwise unused here - 'stream-meter': {}, 'logger-sharelatex': {}, 'metrics-sharelatex': {} }, diff --git a/services/filestore/test/unit/js/GcsPersistorTests.js b/services/filestore/test/unit/js/GcsPersistorTests.js index cd95bf1e20..0ca0f39d0f 100644 --- a/services/filestore/test/unit/js/GcsPersistorTests.js +++ b/services/filestore/test/unit/js/GcsPersistorTests.js @@ -5,6 +5,7 @@ const modulePath = '../../../app/js/GcsPersistor.js' const SandboxedModule = require('sandboxed-module') const { ObjectId } = require('mongodb') const asyncPool = require('tiny-async-pool') +const StreamModule = require('stream') const Errors = require('../../../app/js/Errors') @@ -13,7 +14,6 @@ describe('GcsPersistorTests', function() { const bucket = 'womBucket' const key = 'monKey' const destKey = 'donKey' - const objectSize = 5555 const genericError = new Error('guru meditation error') const filesSize = 33 const md5 = 'ffffffff00000000ffffffff00000000' @@ -24,8 +24,6 @@ describe('GcsPersistorTests', function() { Storage, Fs, GcsNotFoundError, - Meter, - MeteredStream, ReadStream, Stream, GcsBucket, @@ -71,7 +69,8 @@ describe('GcsPersistorTests', function() { } Stream = { - pipeline: sinon.stub().yields() + pipeline: sinon.stub().yields(), + Transform: StreamModule.Transform } Metrics = { @@ -109,18 +108,10 @@ describe('GcsPersistorTests', function() { FileNotFoundError = new Error('File not found') FileNotFoundError.code = 'ENOENT' - MeteredStream = { - type: 'metered', - on: sinon.stub(), - bytes: objectSize - } - MeteredStream.on.withArgs('finish').yields() - MeteredStream.on.withArgs('readable').yields() - Meter = sinon.stub().returns(MeteredStream) - Hash = { end: sinon.stub(), read: sinon.stub().returns(md5), + digest: sinon.stub().returns(md5), setEncoding: sinon.stub() } crypto = { @@ -139,7 +130,6 @@ describe('GcsPersistorTests', function() { 'tiny-async-pool': asyncPool, './Errors': Errors, fs: Fs, - 'stream-meter': Meter, stream: Stream, 'metrics-sharelatex': Metrics, crypto @@ -157,7 +147,7 @@ describe('GcsPersistorTests', function() { }) it('returns a metered stream', function() { - expect(stream).to.equal(MeteredStream) + expect(stream).to.be.instanceOf(StreamModule.Transform) }) it('fetches the right key from the right bucket', function() { @@ -169,13 +159,9 @@ describe('GcsPersistorTests', function() { it('pipes the stream through the meter', function() { expect(Stream.pipeline).to.have.been.calledWith( ReadStream, - MeteredStream + sinon.match.instanceOf(StreamModule.Transform) ) }) - - it('records an ingress metric', function() { - expect(Metrics.count).to.have.been.calledWith('gcs.ingress', objectSize) - }) }) describe('when called with a byte range', function() { @@ -189,7 +175,7 @@ describe('GcsPersistorTests', function() { }) it('returns a metered stream', function() { - expect(stream).to.equal(MeteredStream) + expect(stream).to.be.instanceOf(StreamModule.Transform) }) it('passes the byte range on to GCS', function() { @@ -341,26 +327,16 @@ describe('GcsPersistorTests', function() { }) }) - it('should meter the stream', function() { + it('should meter the stream and pass it to GCS', function() { expect(Stream.pipeline).to.have.been.calledWith( ReadStream, - MeteredStream - ) - }) - - it('should pipe the metered stream to GCS', function() { - expect(Stream.pipeline).to.have.been.calledWith( - MeteredStream, + sinon.match.instanceOf(StreamModule.Transform), WriteStream ) }) - it('should record an egress metric', function() { - expect(Metrics.count).to.have.been.calledWith('gcs.egress', objectSize) - }) - it('calculates the md5 hash of the file', function() { - expect(Stream.pipeline).to.have.been.calledWith(ReadStream, Hash) + expect(Hash.digest).to.have.been.called }) }) @@ -375,10 +351,7 @@ describe('GcsPersistorTests', function() { }) it('should not calculate the md5 hash of the file', function() { - expect(Stream.pipeline).not.to.have.been.calledWith( - sinon.match.any, - Hash - ) + expect(Hash.digest).not.to.have.been.called }) it('sends the hash in base64', function() { @@ -400,7 +373,12 @@ describe('GcsPersistorTests', function() { let error beforeEach(async function() { Stream.pipeline - .withArgs(MeteredStream, WriteStream, sinon.match.any) + .withArgs( + ReadStream, + sinon.match.instanceOf(StreamModule.Transform), + WriteStream, + sinon.match.any + ) .yields(genericError) try { await GcsPersistor.promises.sendStream(bucket, key, ReadStream) @@ -438,10 +416,7 @@ describe('GcsPersistorTests', function() { it('should upload the stream via the meter', function() { expect(Stream.pipeline).to.have.been.calledWith( ReadStream, - MeteredStream - ) - expect(Stream.pipeline).to.have.been.calledWith( - MeteredStream, + sinon.match.instanceOf(StreamModule.Transform), WriteStream ) }) diff --git a/services/filestore/test/unit/js/S3PersistorTests.js b/services/filestore/test/unit/js/S3PersistorTests.js index 484a0209a8..2117164d74 100644 --- a/services/filestore/test/unit/js/S3PersistorTests.js +++ b/services/filestore/test/unit/js/S3PersistorTests.js @@ -3,6 +3,7 @@ const chai = require('chai') const { expect } = chai const modulePath = '../../../app/js/S3Persistor.js' const SandboxedModule = require('sandboxed-module') +const StreamModule = require('stream') const Errors = require('../../../app/js/Errors') @@ -32,8 +33,6 @@ describe('S3PersistorTests', function() { Logger, S3, Fs, - Meter, - MeteredStream, ReadStream, Stream, S3Persistor, @@ -63,7 +62,8 @@ describe('S3PersistorTests', function() { } Stream = { - pipeline: sinon.stub().yields() + pipeline: sinon.stub().yields(), + Transform: StreamModule.Transform } EmptyPromise = { @@ -89,14 +89,6 @@ describe('S3PersistorTests', function() { createReadStream: sinon.stub().returns(ReadStream) } - MeteredStream = { - type: 'metered', - on: sinon.stub(), - bytes: objectSize - } - MeteredStream.on.withArgs('finish').yields() - Meter = sinon.stub().returns(MeteredStream) - S3NotFoundError = new Error('not found') S3NotFoundError.code = 'NoSuchKey' @@ -136,6 +128,7 @@ describe('S3PersistorTests', function() { Hash = { end: sinon.stub(), read: sinon.stub().returns(md5), + digest: sinon.stub().returns(md5), setEncoding: sinon.stub() } crypto = { @@ -153,7 +146,6 @@ describe('S3PersistorTests', function() { 'logger-sharelatex': Logger, './Errors': Errors, fs: Fs, - 'stream-meter': Meter, stream: Stream, 'metrics-sharelatex': Metrics, crypto @@ -171,7 +163,7 @@ describe('S3PersistorTests', function() { }) it('returns a metered stream', function() { - expect(stream).to.equal(MeteredStream) + expect(stream).to.be.instanceOf(StreamModule.Transform) }) it('sets the AWS client up with credentials from settings', function() { @@ -188,13 +180,9 @@ describe('S3PersistorTests', function() { it('pipes the stream through the meter', function() { expect(Stream.pipeline).to.have.been.calledWith( S3ReadStream, - MeteredStream + sinon.match.instanceOf(StreamModule.Transform) ) }) - - it('records an ingress metric', function() { - expect(Metrics.count).to.have.been.calledWith('s3.ingress', objectSize) - }) }) describe('when called with a byte range', function() { @@ -208,7 +196,7 @@ describe('S3PersistorTests', function() { }) it('returns a metered stream', function() { - expect(stream).to.equal(MeteredStream) + expect(stream).to.be.instanceOf(Stream.Transform) }) it('passes the byte range on to S3', function() { @@ -242,7 +230,7 @@ describe('S3PersistorTests', function() { }) it('returns a metered stream', function() { - expect(stream).to.equal(MeteredStream) + expect(stream).to.be.instanceOf(Stream.Transform) }) it('sets the AWS client up with the alternative credentials', function() { @@ -457,7 +445,7 @@ describe('S3PersistorTests', function() { expect(S3Client.upload).to.have.been.calledWith({ Bucket: bucket, Key: key, - Body: MeteredStream + Body: sinon.match.instanceOf(Stream.Transform) }) }) @@ -470,16 +458,12 @@ describe('S3PersistorTests', function() { it('should meter the stream', function() { expect(Stream.pipeline).to.have.been.calledWith( ReadStream, - MeteredStream + sinon.match.instanceOf(Stream.Transform) ) }) - it('should record an egress metric', function() { - expect(Metrics.count).to.have.been.calledWith('s3.egress', objectSize) - }) - it('calculates the md5 hash of the file', function() { - expect(Stream.pipeline).to.have.been.calledWith(ReadStream, Hash) + expect(Hash.digest).to.have.been.called }) }) @@ -494,17 +478,14 @@ describe('S3PersistorTests', function() { }) it('should not calculate the md5 hash of the file', function() { - expect(Stream.pipeline).not.to.have.been.calledWith( - sinon.match.any, - Hash - ) + expect(Hash.digest).not.to.have.been.called }) it('sends the hash in base64', function() { expect(S3Client.upload).to.have.been.calledWith({ Bucket: bucket, Key: key, - Body: MeteredStream, + Body: sinon.match.instanceOf(StreamModule.Transform), ContentMD5: 'qqqqqru7u7uqqqqqu7u7uw==' }) }) @@ -555,12 +536,12 @@ describe('S3PersistorTests', function() { it('should meter the download', function() { expect(Stream.pipeline).to.have.been.calledWith( S3ReadStream, - MeteredStream + sinon.match.instanceOf(Stream.Transform) ) }) it('should calculate the md5 hash from the file', function() { - expect(Stream.pipeline).to.have.been.calledWith(MeteredStream, Hash) + expect(Hash.digest).to.have.been.called }) }) }) @@ -579,7 +560,7 @@ describe('S3PersistorTests', function() { expect(S3Client.upload).to.have.been.calledWith({ Bucket: bucket, Key: key, - Body: MeteredStream + Body: sinon.match.instanceOf(StreamModule.Transform) }) }) }) From 7e45a82c356bba713205bede583af4e843819269 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 26 Mar 2020 15:09:56 +0000 Subject: [PATCH 486/555] Use autodestroy on Transform stream --- services/filestore/app/js/PersistorHelper.js | 1 + 1 file changed, 1 insertion(+) diff --git a/services/filestore/app/js/PersistorHelper.js b/services/filestore/app/js/PersistorHelper.js index ad5152374f..9df7d42369 100644 --- a/services/filestore/app/js/PersistorHelper.js +++ b/services/filestore/app/js/PersistorHelper.js @@ -15,6 +15,7 @@ const pipeline = promisify(Stream.pipeline) // the number of bytes transferred class ObserverStream extends Stream.Transform { constructor(options) { + options.autoDestroy = true super(options) this.bytes = 0 From 238d96ed448787ff3e91a9f926f0bdb08a9b4c99 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 26 Mar 2020 15:11:22 +0000 Subject: [PATCH 487/555] Fail acceptance tests on unhandled promise rejection --- services/filestore/test/acceptance/js/FilestoreTests.js | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/services/filestore/test/acceptance/js/FilestoreTests.js b/services/filestore/test/acceptance/js/FilestoreTests.js index 668570e7cb..a2c710a382 100644 --- a/services/filestore/test/acceptance/js/FilestoreTests.js +++ b/services/filestore/test/acceptance/js/FilestoreTests.js @@ -26,6 +26,12 @@ if (!process.env.AWS_ACCESS_KEY_ID) { throw new Error('please provide credentials for the AWS S3 test server') } +process.on('unhandledRejection', e => { + // eslint-disable no-console + console.log('** Unhandled Promise Rejection **\n', e) + throw e +}) + // store settings for multiple backends, so that we can test each one. // fs will always be available - add others if they are configured const BackendSettings = require('./TestConfig') From 1f037ef653b6bdf9f97f01e3c1711bdbb58c7d6f Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 26 Mar 2020 16:24:08 +0000 Subject: [PATCH 488/555] Catch errors from pipeline and stream when waiting for streams to be readable --- services/filestore/app/js/GcsPersistor.js | 4 +-- services/filestore/app/js/PersistorHelper.js | 32 ++++++++++++----- services/filestore/app/js/S3Persistor.js | 4 +-- .../test/unit/js/GcsPersistorTests.js | 35 +++++++++++------- .../test/unit/js/S3PersistorTests.js | 36 ++++++++++++------- 5 files changed, 73 insertions(+), 38 deletions(-) diff --git a/services/filestore/app/js/GcsPersistor.js b/services/filestore/app/js/GcsPersistor.js index ae1c2dd53a..99a8c1a513 100644 --- a/services/filestore/app/js/GcsPersistor.js +++ b/services/filestore/app/js/GcsPersistor.js @@ -126,10 +126,10 @@ async function getFileStream(bucketName, key, _opts = {}) { const observer = new PersistorHelper.ObserverStream({ metric: 'gcs.ingress' }) - pipeline(stream, observer) try { - await PersistorHelper.waitForStreamReady(stream) + // wait for the pipeline to be ready, to catch non-200s + await PersistorHelper.getReadyPipeline(stream, observer) return observer } catch (err) { throw PersistorHelper.wrapError( diff --git a/services/filestore/app/js/PersistorHelper.js b/services/filestore/app/js/PersistorHelper.js index 9df7d42369..f99dd78e45 100644 --- a/services/filestore/app/js/PersistorHelper.js +++ b/services/filestore/app/js/PersistorHelper.js @@ -50,7 +50,7 @@ module.exports = { ObserverStream, calculateStreamMd5, verifyMd5, - waitForStreamReady, + getReadyPipeline, wrapError, hexToBase64, base64ToHex @@ -94,19 +94,33 @@ async function verifyMd5(persistor, bucket, key, sourceMd5, destMd5 = null) { // resolves when a stream is 'readable', or rejects if the stream throws an error // before that happens - this lets us handle protocol-level errors before trying -// to read them -function waitForStreamReady(stream) { +// to read them - these can come from the call to pipeline or the stream itself +function getReadyPipeline(...streams) { return new Promise((resolve, reject) => { + const lastStream = streams.slice(-1)[0] + let resolvedOrErrored = false + const onError = function(err) { - reject(wrapError(err, 'error before stream became ready', {}, ReadError)) + if (!resolvedOrErrored) { + resolvedOrErrored = true + reject( + wrapError(err, 'error before stream became ready', {}, ReadError) + ) + } } const onStreamReady = function() { - stream.removeListener('readable', onStreamReady) - stream.removeListener('error', onError) - resolve(stream) + if (!resolvedOrErrored) { + resolvedOrErrored = true + lastStream.removeListener('readable', onStreamReady) + lastStream.removeListener('error', onError) + resolve(lastStream) + } } - stream.on('readable', onStreamReady) - stream.on('error', onError) + + pipeline(...streams).catch(onError) + + lastStream.on('readable', onStreamReady) + lastStream.on('error', onError) }) } diff --git a/services/filestore/app/js/S3Persistor.js b/services/filestore/app/js/S3Persistor.js index ba82db31e2..8216c5f7cb 100644 --- a/services/filestore/app/js/S3Persistor.js +++ b/services/filestore/app/js/S3Persistor.js @@ -131,10 +131,10 @@ async function getFileStream(bucketName, key, opts) { // ingress from S3 to us const observer = new PersistorHelper.ObserverStream({ metric: 's3.ingress' }) - pipeline(stream, observer) try { - await PersistorHelper.waitForStreamReady(stream) + // wait for the pipeline to be ready, to catch non-200s + await PersistorHelper.getReadyPipeline(stream, observer) return observer } catch (err) { throw PersistorHelper.wrapError( diff --git a/services/filestore/test/unit/js/GcsPersistorTests.js b/services/filestore/test/unit/js/GcsPersistorTests.js index 0ca0f39d0f..2df42729ec 100644 --- a/services/filestore/test/unit/js/GcsPersistorTests.js +++ b/services/filestore/test/unit/js/GcsPersistorTests.js @@ -5,7 +5,6 @@ const modulePath = '../../../app/js/GcsPersistor.js' const SandboxedModule = require('sandboxed-module') const { ObjectId } = require('mongodb') const asyncPool = require('tiny-async-pool') -const StreamModule = require('stream') const Errors = require('../../../app/js/Errors') @@ -21,6 +20,7 @@ describe('GcsPersistorTests', function() { let Metrics, Logger, + Transform, Storage, Fs, GcsNotFoundError, @@ -68,9 +68,20 @@ describe('GcsPersistorTests', function() { removeListener: sinon.stub() } + Transform = class { + on(event, callback) { + if (event === 'readable') { + callback() + } + } + + once() {} + removeListener() {} + } + Stream = { pipeline: sinon.stub().yields(), - Transform: StreamModule.Transform + Transform: Transform } Metrics = { @@ -147,7 +158,7 @@ describe('GcsPersistorTests', function() { }) it('returns a metered stream', function() { - expect(stream).to.be.instanceOf(StreamModule.Transform) + expect(stream).to.be.instanceOf(Transform) }) it('fetches the right key from the right bucket', function() { @@ -159,7 +170,7 @@ describe('GcsPersistorTests', function() { it('pipes the stream through the meter', function() { expect(Stream.pipeline).to.have.been.calledWith( ReadStream, - sinon.match.instanceOf(StreamModule.Transform) + sinon.match.instanceOf(Transform) ) }) }) @@ -175,7 +186,7 @@ describe('GcsPersistorTests', function() { }) it('returns a metered stream', function() { - expect(stream).to.be.instanceOf(StreamModule.Transform) + expect(stream).to.be.instanceOf(Transform) }) it('passes the byte range on to GCS', function() { @@ -190,8 +201,8 @@ describe('GcsPersistorTests', function() { let error, stream beforeEach(async function() { - ReadStream.on = sinon.stub() - ReadStream.on.withArgs('error').yields(GcsNotFoundError) + Transform.prototype.on = sinon.stub() + Transform.prototype.on.withArgs('error').yields(GcsNotFoundError) try { stream = await GcsPersistor.promises.getFileStream(bucket, key) } catch (err) { @@ -220,8 +231,8 @@ describe('GcsPersistorTests', function() { let error, stream beforeEach(async function() { - ReadStream.on = sinon.stub() - ReadStream.on.withArgs('error').yields(genericError) + Transform.prototype.on = sinon.stub() + Transform.prototype.on.withArgs('error').yields(genericError) try { stream = await GcsPersistor.promises.getFileStream(bucket, key) } catch (err) { @@ -330,7 +341,7 @@ describe('GcsPersistorTests', function() { it('should meter the stream and pass it to GCS', function() { expect(Stream.pipeline).to.have.been.calledWith( ReadStream, - sinon.match.instanceOf(StreamModule.Transform), + sinon.match.instanceOf(Transform), WriteStream ) }) @@ -375,7 +386,7 @@ describe('GcsPersistorTests', function() { Stream.pipeline .withArgs( ReadStream, - sinon.match.instanceOf(StreamModule.Transform), + sinon.match.instanceOf(Transform), WriteStream, sinon.match.any ) @@ -416,7 +427,7 @@ describe('GcsPersistorTests', function() { it('should upload the stream via the meter', function() { expect(Stream.pipeline).to.have.been.calledWith( ReadStream, - sinon.match.instanceOf(StreamModule.Transform), + sinon.match.instanceOf(Transform), WriteStream ) }) diff --git a/services/filestore/test/unit/js/S3PersistorTests.js b/services/filestore/test/unit/js/S3PersistorTests.js index 2117164d74..414179afd1 100644 --- a/services/filestore/test/unit/js/S3PersistorTests.js +++ b/services/filestore/test/unit/js/S3PersistorTests.js @@ -3,7 +3,6 @@ const chai = require('chai') const { expect } = chai const modulePath = '../../../app/js/S3Persistor.js' const SandboxedModule = require('sandboxed-module') -const StreamModule = require('stream') const Errors = require('../../../app/js/Errors') @@ -31,6 +30,7 @@ describe('S3PersistorTests', function() { let Metrics, Logger, + Transform, S3, Fs, ReadStream, @@ -61,9 +61,20 @@ describe('S3PersistorTests', function() { } } + Transform = class { + on(event, callback) { + if (event === 'readable') { + callback() + } + } + + once() {} + removeListener() {} + } + Stream = { pipeline: sinon.stub().yields(), - Transform: StreamModule.Transform + Transform: Transform } EmptyPromise = { @@ -100,7 +111,6 @@ describe('S3PersistorTests', function() { pipe: sinon.stub(), removeListener: sinon.stub() } - S3ReadStream.on.withArgs('readable').yields() S3Client = { getObject: sinon.stub().returns({ createReadStream: sinon.stub().returns(S3ReadStream) @@ -163,7 +173,7 @@ describe('S3PersistorTests', function() { }) it('returns a metered stream', function() { - expect(stream).to.be.instanceOf(StreamModule.Transform) + expect(stream).to.be.instanceOf(Transform) }) it('sets the AWS client up with credentials from settings', function() { @@ -180,7 +190,7 @@ describe('S3PersistorTests', function() { it('pipes the stream through the meter', function() { expect(Stream.pipeline).to.have.been.calledWith( S3ReadStream, - sinon.match.instanceOf(StreamModule.Transform) + sinon.match.instanceOf(Transform) ) }) }) @@ -281,8 +291,8 @@ describe('S3PersistorTests', function() { let error, stream beforeEach(async function() { - S3ReadStream.on = sinon.stub() - S3ReadStream.on.withArgs('error').yields(S3NotFoundError) + Transform.prototype.on = sinon.stub() + Transform.prototype.on.withArgs('error').yields(S3NotFoundError) try { stream = await S3Persistor.promises.getFileStream(bucket, key) } catch (err) { @@ -311,8 +321,8 @@ describe('S3PersistorTests', function() { let error, stream beforeEach(async function() { - S3ReadStream.on = sinon.stub() - S3ReadStream.on.withArgs('error').yields(S3AccessDeniedError) + Transform.prototype.on = sinon.stub() + Transform.prototype.on.withArgs('error').yields(S3AccessDeniedError) try { stream = await S3Persistor.promises.getFileStream(bucket, key) } catch (err) { @@ -341,8 +351,8 @@ describe('S3PersistorTests', function() { let error, stream beforeEach(async function() { - S3ReadStream.on = sinon.stub() - S3ReadStream.on.withArgs('error').yields(genericError) + Transform.prototype.on = sinon.stub() + Transform.prototype.on.withArgs('error').yields(genericError) try { stream = await S3Persistor.promises.getFileStream(bucket, key) } catch (err) { @@ -485,7 +495,7 @@ describe('S3PersistorTests', function() { expect(S3Client.upload).to.have.been.calledWith({ Bucket: bucket, Key: key, - Body: sinon.match.instanceOf(StreamModule.Transform), + Body: sinon.match.instanceOf(Transform), ContentMD5: 'qqqqqru7u7uqqqqqu7u7uw==' }) }) @@ -560,7 +570,7 @@ describe('S3PersistorTests', function() { expect(S3Client.upload).to.have.been.calledWith({ Bucket: bucket, Key: key, - Body: sinon.match.instanceOf(StreamModule.Transform) + Body: sinon.match.instanceOf(Transform) }) }) }) From a1ae68f6b5c702581e9fbbb45f7013df166c6600 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 26 Mar 2020 16:44:46 +0000 Subject: [PATCH 489/555] Tidy up 'getReadyPipeline' --- services/filestore/app/js/PersistorHelper.js | 27 ++++++++----------- .../test/unit/js/GcsPersistorTests.js | 4 +-- .../test/unit/js/S3PersistorTests.js | 6 ++--- 3 files changed, 16 insertions(+), 21 deletions(-) diff --git a/services/filestore/app/js/PersistorHelper.js b/services/filestore/app/js/PersistorHelper.js index f99dd78e45..f2d0013915 100644 --- a/services/filestore/app/js/PersistorHelper.js +++ b/services/filestore/app/js/PersistorHelper.js @@ -94,33 +94,28 @@ async function verifyMd5(persistor, bucket, key, sourceMd5, destMd5 = null) { // resolves when a stream is 'readable', or rejects if the stream throws an error // before that happens - this lets us handle protocol-level errors before trying -// to read them - these can come from the call to pipeline or the stream itself +// to read them function getReadyPipeline(...streams) { return new Promise((resolve, reject) => { const lastStream = streams.slice(-1)[0] let resolvedOrErrored = false - const onError = function(err) { + const handler = function(err) { if (!resolvedOrErrored) { resolvedOrErrored = true - reject( - wrapError(err, 'error before stream became ready', {}, ReadError) - ) - } - } - const onStreamReady = function() { - if (!resolvedOrErrored) { - resolvedOrErrored = true - lastStream.removeListener('readable', onStreamReady) - lastStream.removeListener('error', onError) + + lastStream.removeListener('readable', handler) + if (err) { + return reject( + wrapError(err, 'error before stream became ready', {}, ReadError) + ) + } resolve(lastStream) } } - pipeline(...streams).catch(onError) - - lastStream.on('readable', onStreamReady) - lastStream.on('error', onError) + pipeline(...streams).catch(handler) + lastStream.on('readable', handler) }) } diff --git a/services/filestore/test/unit/js/GcsPersistorTests.js b/services/filestore/test/unit/js/GcsPersistorTests.js index 2df42729ec..cc13c45ce7 100644 --- a/services/filestore/test/unit/js/GcsPersistorTests.js +++ b/services/filestore/test/unit/js/GcsPersistorTests.js @@ -202,7 +202,7 @@ describe('GcsPersistorTests', function() { beforeEach(async function() { Transform.prototype.on = sinon.stub() - Transform.prototype.on.withArgs('error').yields(GcsNotFoundError) + Stream.pipeline.yields(GcsNotFoundError) try { stream = await GcsPersistor.promises.getFileStream(bucket, key) } catch (err) { @@ -232,7 +232,7 @@ describe('GcsPersistorTests', function() { beforeEach(async function() { Transform.prototype.on = sinon.stub() - Transform.prototype.on.withArgs('error').yields(genericError) + Stream.pipeline.yields(genericError) try { stream = await GcsPersistor.promises.getFileStream(bucket, key) } catch (err) { diff --git a/services/filestore/test/unit/js/S3PersistorTests.js b/services/filestore/test/unit/js/S3PersistorTests.js index 414179afd1..565e3e0bc9 100644 --- a/services/filestore/test/unit/js/S3PersistorTests.js +++ b/services/filestore/test/unit/js/S3PersistorTests.js @@ -292,7 +292,7 @@ describe('S3PersistorTests', function() { beforeEach(async function() { Transform.prototype.on = sinon.stub() - Transform.prototype.on.withArgs('error').yields(S3NotFoundError) + Stream.pipeline.yields(S3NotFoundError) try { stream = await S3Persistor.promises.getFileStream(bucket, key) } catch (err) { @@ -322,7 +322,7 @@ describe('S3PersistorTests', function() { beforeEach(async function() { Transform.prototype.on = sinon.stub() - Transform.prototype.on.withArgs('error').yields(S3AccessDeniedError) + Stream.pipeline.yields(S3AccessDeniedError) try { stream = await S3Persistor.promises.getFileStream(bucket, key) } catch (err) { @@ -352,7 +352,7 @@ describe('S3PersistorTests', function() { beforeEach(async function() { Transform.prototype.on = sinon.stub() - Transform.prototype.on.withArgs('error').yields(genericError) + Stream.pipeline.yields(genericError) try { stream = await S3Persistor.promises.getFileStream(bucket, key) } catch (err) { From 64562dffb0a201a238beda51301e1a982130db67 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 26 Mar 2020 22:07:37 +0000 Subject: [PATCH 490/555] eslint-disable => eslint-disable-next-line --- services/filestore/test/acceptance/js/FilestoreTests.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/test/acceptance/js/FilestoreTests.js b/services/filestore/test/acceptance/js/FilestoreTests.js index a2c710a382..7e3b197a9c 100644 --- a/services/filestore/test/acceptance/js/FilestoreTests.js +++ b/services/filestore/test/acceptance/js/FilestoreTests.js @@ -27,7 +27,7 @@ if (!process.env.AWS_ACCESS_KEY_ID) { } process.on('unhandledRejection', e => { - // eslint-disable no-console + // eslint-disable-next-line no-console console.log('** Unhandled Promise Rejection **\n', e) throw e }) From 4cb4d450be9c66408344b7176b9cb600d8a044b1 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 30 Mar 2020 15:03:09 +0100 Subject: [PATCH 491/555] Call end() when handing ERR_STREAM_PREMATURE_CLOSE --- services/filestore/app/js/FileController.js | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/services/filestore/app/js/FileController.js b/services/filestore/app/js/FileController.js index 0e663f9421..e39afd67bb 100644 --- a/services/filestore/app/js/FileController.js +++ b/services/filestore/app/js/FileController.js @@ -61,7 +61,9 @@ function getFile(req, res, next) { } pipeline(fileStream, res, err => { - if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { + if (err && err.code === 'ERR_STREAM_PREMATURE_CLOSE') { + res.end() + } else if (err) { next( new Errors.ReadError({ message: 'error transferring stream', From a067198619652ce288485fa63554c0babbd928d6 Mon Sep 17 00:00:00 2001 From: mserranom Date: Mon, 30 Mar 2020 16:05:25 +0200 Subject: [PATCH 492/555] updated minor and patch dependencies --- services/filestore/package-lock.json | 252 ++++++++++++++++----------- services/filestore/package.json | 36 ++-- 2 files changed, 170 insertions(+), 118 deletions(-) diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index b8276d2e6f..8a551e6ce0 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -451,7 +451,7 @@ "arrify": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", - "integrity": "sha512-3CYzex9M9FGQjCGMGyi6/31c8GJbgb0qGyrx5HWxPd0aCwh4cB2YjMb2Xf9UuoogrMrlO9cTqnB5rI5GHZTcUA==" + "integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=" }, "debug": { "version": "3.2.6", @@ -604,9 +604,9 @@ "integrity": "sha512-VccZDcOql77obTnFh0TbNED/6ZbbmHDf8UMNnzO1d5g9V0Htfm4k5cllY8P1tJsRKC3zWYGRLaViiupcgVjBoQ==" }, "@google-cloud/storage": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-4.4.0.tgz", - "integrity": "sha512-R64ey4dLIG3IgiKw0CL5MdZ4ZtZdGhN75171vjiL+ioZG+hlLFkjsrCTRuIdE35v42nNe5nXmVhBHQQTuPozHA==", + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-4.7.0.tgz", + "integrity": "sha512-f0guAlbeg7Z0m3gKjCfBCu7FG9qS3M3oL5OQQxlvGoPtK7/qg3+W+KQV73O2/sbuS54n0Kh2mvT5K2FWzF5vVQ==", "requires": { "@google-cloud/common": "^2.1.1", "@google-cloud/paginator": "^2.0.0", @@ -614,10 +614,10 @@ "arrify": "^2.0.0", "compressible": "^2.0.12", "concat-stream": "^2.0.0", - "date-and-time": "^0.12.0", + "date-and-time": "^0.13.0", "duplexify": "^3.5.0", "extend": "^3.0.2", - "gaxios": "^2.0.1", + "gaxios": "^3.0.0", "gcs-resumable-upload": "^2.2.4", "hash-stream-validation": "^0.2.2", "mime": "^2.2.0", @@ -632,6 +632,18 @@ "xdg-basedir": "^4.0.0" }, "dependencies": { + "gaxios": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-3.0.2.tgz", + "integrity": "sha512-cLOetrsKOBLPwjzVyFzirYaGjrhtYjbKUHp6fQpsio2HH8Mil35JTFQLgkV5D3CCXV7Gnd5V69/m4C9rMBi9bA==", + "requires": { + "abort-controller": "^3.0.0", + "extend": "^3.0.2", + "https-proxy-agent": "^5.0.0", + "is-stream": "^2.0.0", + "node-fetch": "^2.3.0" + } + }, "mime": { "version": "2.4.4", "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.4.tgz", @@ -1324,9 +1336,9 @@ "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" }, "aws-sdk": { - "version": "2.628.0", - "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.628.0.tgz", - "integrity": "sha512-k0yQqqgTuwCWqPvLe09vGGSJ88oKaWPPCC6uyxQKv7pPQcz9cFZjmjLe6QdGJZeCmeb3rGIZlG+N4Z+9LnwCNg==", + "version": "2.648.0", + "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.648.0.tgz", + "integrity": "sha512-b+PdZmCFvZBisqXEH68jO4xB30LrDHQMWrEX6MJoZaOlxPJfpOqRFUH3zsiAXF5Q2jTdjYLtS5bs3vcIwRzi3Q==", "requires": { "buffer": "4.9.1", "events": "1.1.1", @@ -1413,7 +1425,7 @@ "bintrees": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/bintrees/-/bintrees-1.0.1.tgz", - "integrity": "sha512-tbaUB1QpTIj4cKY8c1rvNAvEQXA+ekzHmbe4jzNfW3QWsF9GnnP/BRWyl6/qqS53heoYJ93naaFcm/jooONH8g==" + "integrity": "sha1-DmVcm5wkNeqraL9AJyJtK1WjRSQ=" }, "bl": { "version": "2.2.0", @@ -1464,15 +1476,15 @@ "dev": true }, "bson": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/bson/-/bson-1.1.3.tgz", - "integrity": "sha512-TdiJxMVnodVS7r0BdL42y/pqC9cL2iKynVwA0Ho3qbsQYr428veL3l7BQyuqiw+Q5SqqoT0m4srSY/BlZ9AxXg==", + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/bson/-/bson-1.1.4.tgz", + "integrity": "sha512-S/yKGU1syOMzO86+dGpg2qGoDL0zvzcb262G+gqEy6TgP6rt6z6qxSFX/8X6vLC91P7G7C3nLs0+bvDzmvBA3Q==", "dev": true }, "buffer": { "version": "4.9.1", "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.1.tgz", - "integrity": "sha512-DNK4ruAqtyHaN8Zne7PkBTO+dD1Lr0YfTduMqlIyjvQIoztBkUxrvL+hKeLW8NXFKHOq/2upkxuoS9znQ9bW9A==", + "integrity": "sha1-bRu2AbB6TvztlwlBMgkwJ8lbwpg=", "requires": { "base64-js": "^1.0.2", "ieee754": "^1.1.4", @@ -1732,7 +1744,7 @@ "contains-path": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/contains-path/-/contains-path-0.1.0.tgz", - "integrity": "sha512-OKZnPGeMQy2RPaUIBPFFd71iNf4791H12MCRuVQDnzGRwCYNYmTDy5pdafo2SLAcEMKzTOQnLWG4QdcjeJUMEg==", + "integrity": "sha1-/ozxhP9mcLa67wGp1IYaXL7EEgo=", "dev": true }, "content-disposition": { @@ -1818,9 +1830,9 @@ } }, "date-and-time": { - "version": "0.12.0", - "resolved": "https://registry.npmjs.org/date-and-time/-/date-and-time-0.12.0.tgz", - "integrity": "sha512-n2RJIAp93AucgF/U/Rz5WRS2Hjg5Z+QxscaaMCi6pVZT1JpJKRH+C08vyH/lRR1kxNXnPxgo3lWfd+jCb/UcuQ==" + "version": "0.13.1", + "resolved": "https://registry.npmjs.org/date-and-time/-/date-and-time-0.13.1.tgz", + "integrity": "sha512-/Uge9DJAT+s+oAcDxtBhyR8+sKjUnZbYmyhbmWjTHNtX7B7oWD8YyYdeXcBRbwSj6hVvj+IQegJam7m7czhbFw==" }, "debug": { "version": "2.6.9", @@ -2005,9 +2017,9 @@ } }, "es-abstract": { - "version": "1.17.4", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.4.tgz", - "integrity": "sha512-Ae3um/gb8F0mui/jPL+QiqmglkUsaQf7FwBEHYIFkztkneosu9imhqHpBzQ3h1vit8t5iQ74t6PEVvphBZiuiQ==", + "version": "1.17.5", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.5.tgz", + "integrity": "sha512-BR9auzDbySxOcfog0tLECW8l28eRGpDpU3Dm3Hp4q/N+VtLTmyj4EUN088XZWQDW/hzj6sYRDXeOFsaAODKvpg==", "dev": true, "requires": { "es-to-primitive": "^1.2.1", @@ -2042,7 +2054,7 @@ "es6-promisify": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/es6-promisify/-/es6-promisify-5.0.0.tgz", - "integrity": "sha512-C+d6UdsYDk0lMebHNR4S2NybQMMngAOnOwYBQjTOiv0MkoJMP0Myw2mgpDLBcpfCmRLxyFqYhS/CfOENq4SJhQ==", + "integrity": "sha1-UQnWLz5W6pZ8S2NQWu8IKRyKUgM=", "requires": { "es6-promise": "^4.0.3" } @@ -2136,18 +2148,18 @@ } }, "eslint-config-prettier": { - "version": "6.10.0", - "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-6.10.0.tgz", - "integrity": "sha512-AtndijGte1rPILInUdHjvKEGbIV06NuvPrqlIEaEaWtbtvJh464mDeyGMdZEQMsGvC0ZVkiex1fSNcC4HAbRGg==", + "version": "6.10.1", + "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-6.10.1.tgz", + "integrity": "sha512-svTy6zh1ecQojvpbJSgH3aei/Rt7C6i090l5f2WQ4aB05lYHeZIR1qL4wZyyILTbtmnbHP5Yn8MrsOJMGa8RkQ==", "dev": true, "requires": { "get-stdin": "^6.0.0" } }, "eslint-config-standard": { - "version": "14.1.0", - "resolved": "https://registry.npmjs.org/eslint-config-standard/-/eslint-config-standard-14.1.0.tgz", - "integrity": "sha512-EF6XkrrGVbvv8hL/kYa/m6vnvmUT+K82pJJc4JJVMM6+Qgqh0pnwprSxdduDLB9p/7bIxD+YV5O0wfb8lmcPbA==", + "version": "14.1.1", + "resolved": "https://registry.npmjs.org/eslint-config-standard/-/eslint-config-standard-14.1.1.tgz", + "integrity": "sha512-Z9B+VR+JIXRxz21udPTL9HpFMyoMUEeX1G251EQ6e05WD9aPVtVBn09XUmZ259wCMlCDmYDSZG62Hhm+ZTJcUg==", "dev": true }, "eslint-import-resolver-node": { @@ -2161,9 +2173,9 @@ } }, "eslint-module-utils": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.5.2.tgz", - "integrity": "sha512-LGScZ/JSlqGKiT8OC+cYRxseMjyqt6QO54nl281CK93unD89ijSeRV6An8Ci/2nvWVKe8K/Tqdm75RQoIOCr+Q==", + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.6.0.tgz", + "integrity": "sha512-6j9xxegbqe8/kZY8cYpcp0xhbK0EgJlg3g9mib3/miLaExuuwc3n5UEfSnU6hWMbT0FAYVvDbL9RrRgpUeQIvA==", "dev": true, "requires": { "debug": "^2.6.9", @@ -2183,15 +2195,24 @@ "dev": true }, "eslint-plugin-es": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-es/-/eslint-plugin-es-2.0.0.tgz", - "integrity": "sha512-f6fceVtg27BR02EYnBhgWLFQfK6bN4Ll0nQFrBHOlCsAyxeZkn0NHns5O0YZOPrV1B3ramd6cgFwaoFLcSkwEQ==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-es/-/eslint-plugin-es-3.0.0.tgz", + "integrity": "sha512-6/Jb/J/ZvSebydwbBJO1R9E5ky7YeElfK56Veh7e4QGFHCXoIXGH9HhVz+ibJLM3XJ1XjP+T7rKBLUa/Y7eIng==", "dev": true, "requires": { - "eslint-utils": "^1.4.2", + "eslint-utils": "^2.0.0", "regexpp": "^3.0.0" }, "dependencies": { + "eslint-utils": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.0.0.tgz", + "integrity": "sha512-0HCPuJv+7Wv1bACm8y5/ECVfYdfsAm9xmVb7saeFlxjPYALefjhbYoCkBjPdPzGH8wWyTpAez82Fh3VKYEZ8OA==", + "dev": true, + "requires": { + "eslint-visitor-keys": "^1.1.0" + } + }, "regexpp": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.0.0.tgz", @@ -2201,9 +2222,9 @@ } }, "eslint-plugin-import": { - "version": "2.20.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.20.1.tgz", - "integrity": "sha512-qQHgFOTjguR+LnYRoToeZWT62XM55MBVXObHM6SKFd1VzDcX/vqT1kAz8ssqigh5eMj8qXcRoXXGZpPP6RfdCw==", + "version": "2.20.2", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.20.2.tgz", + "integrity": "sha512-FObidqpXrR8OnCh4iNsxy+WACztJLXAHBO5hK79T1Hc77PgQZkyDGA5Ag9xAvRpglvLNxhH/zSmZ70/pZ31dHg==", "dev": true, "requires": { "array-includes": "^3.0.3", @@ -2223,7 +2244,7 @@ "doctrine": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-1.5.0.tgz", - "integrity": "sha512-lsGyRuYr4/PIB0txi+Fy2xOMI2dGaTguCaotzFGkVZuKR5usKfcRWIFKNM3QNrU7hh/+w2bwTW+ZeXPK5l8uVg==", + "integrity": "sha1-N53Ocw9hZvds76TmcHoVmwLFpvo=", "dev": true, "requires": { "esutils": "^2.0.2", @@ -2254,19 +2275,28 @@ } }, "eslint-plugin-node": { - "version": "10.0.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-node/-/eslint-plugin-node-10.0.0.tgz", - "integrity": "sha512-1CSyM/QCjs6PXaT18+zuAXsjXGIGo5Rw630rSKwokSs2jrYURQc4R5JZpoanNCqwNmepg+0eZ9L7YiRUJb8jiQ==", + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-node/-/eslint-plugin-node-11.1.0.tgz", + "integrity": "sha512-oUwtPJ1W0SKD0Tr+wqu92c5xuCeQqB3hSCHasn/ZgjFdA9iDGNkNf2Zi9ztY7X+hNuMib23LNGRm6+uN+KLE3g==", "dev": true, "requires": { - "eslint-plugin-es": "^2.0.0", - "eslint-utils": "^1.4.2", + "eslint-plugin-es": "^3.0.0", + "eslint-utils": "^2.0.0", "ignore": "^5.1.1", "minimatch": "^3.0.4", "resolve": "^1.10.1", "semver": "^6.1.0" }, "dependencies": { + "eslint-utils": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.0.0.tgz", + "integrity": "sha512-0HCPuJv+7Wv1bACm8y5/ECVfYdfsAm9xmVb7saeFlxjPYALefjhbYoCkBjPdPzGH8wWyTpAez82Fh3VKYEZ8OA==", + "dev": true, + "requires": { + "eslint-visitor-keys": "^1.1.0" + } + }, "ignore": { "version": "5.1.4", "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.1.4.tgz", @@ -2333,9 +2363,9 @@ }, "dependencies": { "acorn": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.1.0.tgz", - "integrity": "sha512-kL5CuoXA/dgxlBbVrflsflzQ3PAas7RYZB52NOm/6839iVYJgKMJ3cQJD+t2i5+qFa8h3MDpEOJiS64E8JLnSQ==", + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.1.1.tgz", + "integrity": "sha512-add7dgA5ppRPxCFJoAGfMDi7PIBXq1RtGo7BhbLaxwrXPOmw8gq48Y9ozT01hUKy9byMjlR20EJhu5zlkErEkg==", "dev": true } } @@ -2405,7 +2435,7 @@ "events": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/events/-/events-1.1.1.tgz", - "integrity": "sha512-kEcvvCBByWXGnZy6JUlgAp2gBIUjfCAV6P6TgT1/aaQKcmuAEC4OZTV1I4EWQLz2gxZw76atuVyvHhTxvi0Flw==" + "integrity": "sha1-nr23Y1rQmccNzEwqH1AEKI6L2SQ=" }, "express": { "version": "4.17.1", @@ -2532,7 +2562,7 @@ "find-up": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", - "integrity": "sha512-NWzkk0jSJtTt08+FBFMvXoeZnOJD+jTtsRmBYbAIzJdX6l7dLgR7CTubCM5/eDdPUBvLCeVasP1brfVR/9/EZQ==", + "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=", "dev": true, "requires": { "locate-path": "^2.0.0" @@ -2541,7 +2571,7 @@ "findit2": { "version": "2.2.3", "resolved": "https://registry.npmjs.org/findit2/-/findit2-2.2.3.tgz", - "integrity": "sha512-lg/Moejf4qXovVutL0Lz4IsaPoNYMuxt4PA0nGqFxnJ1CTTGGlEO2wKgoDpwknhvZ8k4Q2F+eesgkLbG2Mxfog==" + "integrity": "sha1-WKRmaX34piBc39vzlVNri9d3pfY=" }, "flat-cache": { "version": "2.0.1", @@ -2653,9 +2683,9 @@ } }, "gcs-resumable-upload": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/gcs-resumable-upload/-/gcs-resumable-upload-2.3.2.tgz", - "integrity": "sha512-OPS0iAmPCV+r7PziOIhyxmQOzsazFCy76yYDOS/Z80O/7cuny1KMfqDQa2T0jLaL8EreTU7EMZG5pUuqBKgzHA==", + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/gcs-resumable-upload/-/gcs-resumable-upload-2.3.3.tgz", + "integrity": "sha512-sf896I5CC/1AxeaGfSFg3vKMjUq/r+A3bscmVzZm10CElyRanN0XwPu/MxeIO4LSP+9uF6yKzXvNsaTsMXUG6Q==", "requires": { "abort-controller": "^3.0.0", "configstore": "^5.0.0", @@ -2877,9 +2907,9 @@ "integrity": "sha512-Yu+q/XWr2fFQ11tHxPq4p4EiNkb2y+lAacJNhAdRXVfRIcDH6gi7htWFnnlIzvqHMHoWeIsfXlNAjZInpAOJDA==" }, "hosted-git-info": { - "version": "2.8.7", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.7.tgz", - "integrity": "sha512-ChkjQtKJ3GI6SsI4O5jwr8q8EPrWCnxuc4Tbx+vRI5x6mDOpjKKltNo1lRlszw3xwgTOSns1ZRBiMmmwpcvLxg==", + "version": "2.8.8", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.8.tgz", + "integrity": "sha512-f/wzC2QaWBs7t9IYqB4T3sR1xviIViXJRJTWBlx2Gf3g0Xi5vI7Yy4koXQ1c9OYDGHN9sBy1DQ2AB8fqZBWhUg==", "dev": true }, "http-errors": { @@ -3041,7 +3071,7 @@ "is-arrayish": { "version": "0.2.1", "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", - "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", + "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", "dev": true }, "is-buffer": { @@ -3151,7 +3181,7 @@ "jmespath": { "version": "0.15.0", "resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.15.0.tgz", - "integrity": "sha512-+kHj8HXArPfpPEKGLZ+kB5ONRTCiGQXo8RQYL0hH8t6pWXUBBK5KkkQmTNOwKK4LEsd0yTsgtjJVm4UBSZea4w==" + "integrity": "sha1-o/Iiqarp+Wb10nx5ZRDigJF2Qhc=" }, "js-tokens": { "version": "4.0.0", @@ -3258,7 +3288,7 @@ "load-json-file": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-2.0.0.tgz", - "integrity": "sha512-3p6ZOGNbiX4CdvEd1VcE6yi78UrGNpjHO33noGwHCnT/o2fyllJDepsm8+mFFv/DvtwFHht5HIHSyOy5a+ChVQ==", + "integrity": "sha1-eUfkIUmvgNaWy/eXvKq8/h/inKg=", "dev": true, "requires": { "graceful-fs": "^4.1.2", @@ -3270,7 +3300,7 @@ "pify": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", + "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", "dev": true } } @@ -3278,7 +3308,7 @@ "locate-path": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", - "integrity": "sha512-NCI2kiDkyR7VeEKm27Kda/iQHyKJe1Bu0FlTbYp3CqJu+9IFe9bLyAjMxf5ZDDbEg+iMPzB5zYyUTSm8wVTKmA==", + "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=", "dev": true, "requires": { "p-locate": "^2.0.0", @@ -3331,7 +3361,7 @@ "lodash.pickby": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/lodash.pickby/-/lodash.pickby-4.6.0.tgz", - "integrity": "sha512-AZV+GsS/6ckvPOVQPXSiFFacKvKB4kOQu6ynt9wz0F3LO4R9Ij4K1ddYsIytDpSgLz88JHd9P+oaLeej5/Sl7Q==" + "integrity": "sha1-feoh2MGNdwOifHBMFdO4SmfjOv8=" }, "lodash.unescape": { "version": "4.0.1", @@ -3481,7 +3511,7 @@ "lynx": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/lynx/-/lynx-0.1.1.tgz", - "integrity": "sha512-JI52N0NwK2b/Md0TFPdPtUBI46kjyJXF7+q08l2yvQ56q6QA8s7ZjZQQRoxFpS2jDXNf/B0p8ID+OIKcTsZwzw==", + "integrity": "sha1-Mxjc7xaQi4KG6Bisz9sxzXQkj50=", "requires": { "mersenne": "~0.0.3", "statsd-parser": "~0.0.4" @@ -3505,9 +3535,9 @@ }, "dependencies": { "minimist": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz", - "integrity": "sha512-7Wl+Jz+IGWuSdgsQEJ4JunV0si/iMhg42MnQQG6h1R6TNeVenp4U9x5CC5v/gYqz/fENLQITAWXidNtVL0NNbw==", + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", + "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==", "dev": true, "optional": true } @@ -3538,7 +3568,7 @@ "mersenne": { "version": "0.0.4", "resolved": "https://registry.npmjs.org/mersenne/-/mersenne-0.0.4.tgz", - "integrity": "sha512-XoSUL+nF8hMTKGQxUs8r3Btdsf1yuKKBdCCGbh3YXgCXuVKishpZv1CNc385w9s8t4Ynwc5h61BwW/FCVulkbg==" + "integrity": "sha1-QB/ex+whzbngPNPTAhOY2iGycIU=" }, "messageformat": { "version": "2.3.0", @@ -3569,9 +3599,9 @@ "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==" }, "metrics-sharelatex": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/metrics-sharelatex/-/metrics-sharelatex-2.5.0.tgz", - "integrity": "sha512-JG4yBe5bEzUW5P//8aAUoexInPosPLOXxLS4AjGxMrP78BS5PSV7uVrY0Op6b6c7ZqKItHTtEjzsUfLRPGQ/sQ==", + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/metrics-sharelatex/-/metrics-sharelatex-2.6.2.tgz", + "integrity": "sha512-bOLfkSCexiPgB96hdXhoOWyvvrwscgjeZPEqdcJ7BTGxY59anzvymNf5hTGJ1RtS4sblDKxITw3L5a+gYKhRYQ==", "requires": { "@google-cloud/debug-agent": "^3.0.0", "@google-cloud/profiler": "^0.2.3", @@ -3674,7 +3704,7 @@ "module-details-from-path": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/module-details-from-path/-/module-details-from-path-1.0.3.tgz", - "integrity": "sha512-ySViT69/76t8VhE1xXHK6Ch4NcDd26gx0MzKXLO+F7NOtnqH68d9zF94nT8ZWSxXh8ELOERsnJO/sWt1xZYw5A==" + "integrity": "sha1-EUyUlnPiqKNenTV4hSeqN7Z52is=" }, "moment": { "version": "2.24.0", @@ -3683,9 +3713,9 @@ "optional": true }, "mongodb": { - "version": "3.5.4", - "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-3.5.4.tgz", - "integrity": "sha512-xGH41Ig4dkSH5ROGezkgDbsgt/v5zbNUwE3TcFsSbDc6Qn3Qil17dhLsESSDDPTiyFDCPJRpfd4887dtsPgKtA==", + "version": "3.5.5", + "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-3.5.5.tgz", + "integrity": "sha512-GCjDxR3UOltDq00Zcpzql6dQo1sVry60OXJY3TDmFc2SWFY6c8Gn1Ardidc5jDirvJrx2GC3knGOImKphbSL3A==", "dev": true, "requires": { "bl": "^2.2.0", @@ -3940,7 +3970,7 @@ "p-locate": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", - "integrity": "sha512-nQja7m7gSKuewoVRen45CtVfODR3crN3goVQ0DDZ9N3yHxgpkuBhZqsaiotSQRrADUrne346peY7kT3TSACykg==", + "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=", "dev": true, "requires": { "p-limit": "^1.1.0" @@ -3958,7 +3988,7 @@ "p-try": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", - "integrity": "sha512-U1etNYuMJoIz3ZXSrrySFjsXQTWOx2/jdi86L+2pRvph/qMKL6sbcCYdH23fqsbm8TH2Gn0OybpT4eSFlCVHww==", + "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=", "dev": true } } @@ -3985,7 +4015,7 @@ "parse-json": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", - "integrity": "sha512-QR/GGaKCkhwk1ePQNYDRKYZ3mwU9ypsKhB0XyFnLQdomyEqk3e8wpW3V5Jp88zbxK4n5ST1nqo+g9juTpownhQ==", + "integrity": "sha1-9ID0BDTvgHQfhGkJn43qGPVaTck=", "dev": true, "requires": { "error-ex": "^1.2.0" @@ -4037,7 +4067,7 @@ "path-type": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/path-type/-/path-type-2.0.0.tgz", - "integrity": "sha512-dUnb5dXUf+kzhC/W/F4e5/SkluXIFf5VUHolW1Eg1irn1hGWjPGdsRcvYJ1nD6lhk8Ir7VM0bHJKsYTx8Jx9OQ==", + "integrity": "sha1-8BLMuEFbcJb8LaoQVMPXI4lZTHM=", "dev": true, "requires": { "pify": "^2.0.0" @@ -4046,7 +4076,7 @@ "pify": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", + "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", "dev": true } } @@ -4070,7 +4100,7 @@ "pkg-dir": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-2.0.0.tgz", - "integrity": "sha512-ojakdnUgL5pzJYWw2AIDEupaQCX5OPbM688ZevubICjdIX01PRSYKqm33fJoCOJBRseYCTUlQRnBNX+Pchaejw==", + "integrity": "sha1-9tXREJ4Z1j7fQo4L1X4Sd3YVM0s=", "dev": true, "requires": { "find-up": "^2.1.0" @@ -4757,7 +4787,7 @@ "punycode": { "version": "1.3.2", "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", - "integrity": "sha512-RofWgt/7fL5wP1Y7fxE7/EmTLzQVnB0ycyibJ0OOHIlJqTNzglYFxVwETOcIoJqJmpDXJ9xImDv+Fq34F/d4Dw==" + "integrity": "sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0=" }, "qs": { "version": "6.7.0", @@ -4767,7 +4797,7 @@ "querystring": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", - "integrity": "sha512-X/xY82scca2tau62i9mDyU9K+I+djTMUsvwf7xnUX5GLvVzgJybOJf4Y6o9Zx3oJK/LSXg5tTZBjwzqVPaPO2g==" + "integrity": "sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=" }, "quick-lru": { "version": "4.0.1", @@ -4824,7 +4854,7 @@ "read-pkg": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-2.0.0.tgz", - "integrity": "sha512-eFIBOPW7FGjzBuk3hdXEuNSiTZS/xEMlH49HxMyzb0hyPfu4EhVjT2DH32K1hSSmVq4sebAWnZuuY5auISUTGA==", + "integrity": "sha1-jvHAYjxqbbDcZxPEv6xGMysjaPg=", "dev": true, "requires": { "load-json-file": "^2.0.0", @@ -4835,7 +4865,7 @@ "read-pkg-up": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-2.0.0.tgz", - "integrity": "sha512-1orxQfbWGUiTn9XsPlChs6rLie/AV9jwZTGmu2NZw/CUDJQchXJFYE0Fq5j7+n558T1JhDWLdhyd1Zj+wLY//w==", + "integrity": "sha1-a3KoBImE4MQeeVEP1en6mbO1Sb4=", "dev": true, "requires": { "find-up": "^2.0.0", @@ -5116,7 +5146,7 @@ "sax": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz", - "integrity": "sha512-8I2a3LovHTOpm7NV5yOyO8IHqgVsfK4+UuySrXU8YXkSRX7k6hCV9b3HrkKCr3nMpgj+0bmocaJJWpvp1oc7ZA==" + "integrity": "sha1-e45lYZCyKOgaZq6nSEgNgozS03o=" }, "semver": { "version": "6.3.0", @@ -5357,7 +5387,7 @@ "statsd-parser": { "version": "0.0.4", "resolved": "https://registry.npmjs.org/statsd-parser/-/statsd-parser-0.0.4.tgz", - "integrity": "sha512-7XO+ur89EalMXXFQaydsczB8sclr5nDsNIoUu0IzJx1pIbHUhO3LtpSzBwetIuU9DyTLMiVaJBMtWS/Nb2KR4g==" + "integrity": "sha1-y9JDlTzELv/VSLXSI4jtaJ7GOb0=" }, "statuses": { "version": "1.5.0", @@ -5415,24 +5445,46 @@ } } }, - "string.prototype.trimleft": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/string.prototype.trimleft/-/string.prototype.trimleft-2.1.1.tgz", - "integrity": "sha512-iu2AGd3PuP5Rp7x2kEZCrB2Nf41ehzh+goo8TV7z8/XDBbsvc6HQIlUl9RjkZ4oyrW1XM5UwlGl1oVEaDjg6Ag==", + "string.prototype.trimend": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.0.tgz", + "integrity": "sha512-EEJnGqa/xNfIg05SxiPSqRS7S9qwDhYts1TSLR1BQfYUfPe1stofgGKvwERK9+9yf+PpfBMlpBaCHucXGPQfUA==", "dev": true, "requires": { "define-properties": "^1.1.3", - "function-bind": "^1.1.1" + "es-abstract": "^1.17.5" + } + }, + "string.prototype.trimleft": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/string.prototype.trimleft/-/string.prototype.trimleft-2.1.2.tgz", + "integrity": "sha512-gCA0tza1JBvqr3bfAIFJGqfdRTyPae82+KTnm3coDXkZN9wnuW3HjGgN386D7hfv5CHQYCI022/rJPVlqXyHSw==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.5", + "string.prototype.trimstart": "^1.0.0" } }, "string.prototype.trimright": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/string.prototype.trimright/-/string.prototype.trimright-2.1.1.tgz", - "integrity": "sha512-qFvWL3/+QIgZXVmJBfpHmxLB7xsUXz6HsUmP8+5dRaC3Q7oKUv9Vo6aMCRZC1smrtyECFsIT30PqBJ1gTjAs+g==", + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/string.prototype.trimright/-/string.prototype.trimright-2.1.2.tgz", + "integrity": "sha512-ZNRQ7sY3KroTaYjRS6EbNiiHrOkjihL9aQE/8gfQ4DtAC/aEBRHFJa44OmoWxGGqXuJlfKkZW4WcXErGr+9ZFg==", "dev": true, "requires": { "define-properties": "^1.1.3", - "function-bind": "^1.1.1" + "es-abstract": "^1.17.5", + "string.prototype.trimend": "^1.0.0" + } + }, + "string.prototype.trimstart": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.0.tgz", + "integrity": "sha512-iCP8g01NFYiiBOnwG1Xc3WZLyoo+RuBymwIlWncShXDDJYWN6DbnM3odslBJdgCdRlq94B5s63NWAZlcn2CS4w==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.5" } }, "string_decoder": { @@ -5463,7 +5515,7 @@ "strip-bom": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", - "integrity": "sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==", + "integrity": "sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=", "dev": true }, "strip-json-comments": { @@ -5526,7 +5578,7 @@ "tdigest": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/tdigest/-/tdigest-0.1.1.tgz", - "integrity": "sha512-CXcDY/NIgIbKZPx5H4JJNpq6JwJhU5Z4+yWj4ZghDc7/9nVajiRlPPyMXRePPPlBfcayUqtoCXjo7/Hm82ecUA==", + "integrity": "sha1-Ljyyw56kSeVdHmzZEReszKRYgCE=", "requires": { "bintrees": "1.0.1" } @@ -5718,7 +5770,7 @@ "underscore": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz", - "integrity": "sha512-z4o1fvKUojIWh9XuaVLUDdf86RQiq13AC1dmHbTpoyuu+bquHms76v16CjycCbec87J7z0k//SiQVk0sMdFmpQ==" + "integrity": "sha1-izixDKze9jM3uLJOT/htRa6lKag=" }, "unique-string": { "version": "2.0.0", @@ -5751,7 +5803,7 @@ "url": { "version": "0.10.3", "resolved": "https://registry.npmjs.org/url/-/url-0.10.3.tgz", - "integrity": "sha512-hzSUW2q06EqL1gKM/a+obYHLIO6ct2hwPuviqTTOcfFVc61UbfJ2Q32+uGL/HCPxKqrdGB5QUwIe7UqlDgwsOQ==", + "integrity": "sha1-Ah5NnHcF8hu/N9A861h2dAJ3TGQ=", "requires": { "punycode": "1.3.2", "querystring": "0.2.0" @@ -5976,7 +6028,7 @@ "xmlbuilder": { "version": "9.0.7", "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-9.0.7.tgz", - "integrity": "sha512-7YXTQc3P2l9+0rjaUbLwMKRhtmwg1M1eDf6nag7urC7pIPYLD9W/jmzQ4ptRSUbodw5S0jfoGTflLemQibSpeQ==" + "integrity": "sha1-Ey7mPS7FVlxVfiD0wi35rKaGsQ0=" }, "xtend": { "version": "4.0.2", diff --git a/services/filestore/package.json b/services/filestore/package.json index c6291f6f50..5d87b914a1 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -20,47 +20,47 @@ "test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js" }, "dependencies": { - "@google-cloud/storage": "^4.3.0", + "@google-cloud/storage": "^4.7.0", "@overleaf/o-error": "^2.1.0", - "aws-sdk": "^2.628.0", - "body-parser": "^1.2.0", - "express": "^4.2.0", + "aws-sdk": "^2.648.0", + "body-parser": "^1.19.0", + "express": "^4.17.1", "glob": "^7.1.6", "lodash.once": "^4.1.1", "logger-sharelatex": "^1.9.1", - "metrics-sharelatex": "^2.5.0", - "node-uuid": "~1.4.1", - "range-parser": "^1.0.2", - "request": "^2.88.0", + "metrics-sharelatex": "^2.6.2", + "node-uuid": "~1.4.8", + "range-parser": "^1.2.1", + "request": "^2.88.2", "request-promise-native": "^1.0.8", "settings-sharelatex": "^1.1.0", "stream-buffers": "~0.2.5", "tiny-async-pool": "^1.1.0" }, "devDependencies": { - "babel-eslint": "^10.0.3", - "bunyan": "^1.3.5", + "babel-eslint": "^10.1.0", + "bunyan": "^1.8.12", "chai": "4.2.0", "chai-as-promised": "^7.1.1", "disrequire": "^1.1.0", - "eslint": "^6.4.0", - "eslint-config-prettier": "^6.7.0", - "eslint-config-standard": "^14.1.0", + "eslint": "^6.8.0", + "eslint-config-prettier": "^6.10.1", + "eslint-config-standard": "^14.1.1", "eslint-plugin-chai-expect": "^2.1.0", "eslint-plugin-chai-friendly": "^0.5.0", - "eslint-plugin-import": "^2.19.1", - "eslint-plugin-mocha": "^6.2.2", - "eslint-plugin-node": "^10.0.0", + "eslint-plugin-import": "^2.20.2", + "eslint-plugin-mocha": "^6.3.0", + "eslint-plugin-node": "^11.1.0", "eslint-plugin-prettier": "^3.1.2", "eslint-plugin-promise": "^4.2.1", "eslint-plugin-standard": "^4.0.1", "mocha": "5.2.0", - "mongodb": "^3.5.4", + "mongodb": "^3.5.5", "prettier-eslint": "^9.0.1", "prettier-eslint-cli": "^5.0.0", "sandboxed-module": "2.0.3", "sinon": "7.1.1", - "sinon-chai": "^3.3.0", + "sinon-chai": "^3.5.0", "streamifier": "^0.1.1", "timekeeper": "^2.2.0" } From 70e4dd9fde54a259087cc17c1ac74bd3289ac3af Mon Sep 17 00:00:00 2001 From: mserranom Date: Mon, 30 Mar 2020 16:15:21 +0200 Subject: [PATCH 493/555] updated sinon, mocha --- services/filestore/package-lock.json | 499 ++++++++++++++++++++++----- services/filestore/package.json | 4 +- 2 files changed, 411 insertions(+), 92 deletions(-) diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index 8a551e6ce0..0151ac757d 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -975,34 +975,35 @@ "type-detect": "4.0.8" } }, + "@sinonjs/fake-timers": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-6.0.1.tgz", + "integrity": "sha512-MZPUxrmFubI36XS1DI3qmI0YdN1gks62JtFZvxR67ljjSNCeK6U08Zx4msEWOXuofgqUt6zPHSi1H9fbjR/NRA==", + "dev": true, + "requires": { + "@sinonjs/commons": "^1.7.0" + } + }, "@sinonjs/formatio": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/@sinonjs/formatio/-/formatio-3.2.2.tgz", - "integrity": "sha512-B8SEsgd8gArBLMD6zpRw3juQ2FVSsmdd7qlevyDqzS9WTCtvF55/gAL+h6gue8ZvPYcdiPdvueM/qm//9XzyTQ==", + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/@sinonjs/formatio/-/formatio-5.0.1.tgz", + "integrity": "sha512-KaiQ5pBf1MpS09MuA0kp6KBQt2JUOQycqVG1NZXvzeaXe5LGFqAKueIS0bw4w0P9r7KuBSVdUk5QjXsUdu2CxQ==", "dev": true, "requires": { "@sinonjs/commons": "^1", - "@sinonjs/samsam": "^3.1.0" - }, - "dependencies": { - "@sinonjs/samsam": { - "version": "3.3.3", - "resolved": "https://registry.npmjs.org/@sinonjs/samsam/-/samsam-3.3.3.tgz", - "integrity": "sha512-bKCMKZvWIjYD0BLGnNrxVuw4dkWCYsLqFOUWw8VgKF/+5Y+mE7LfHWPIYoDXowH+3a9LsWDMo0uAP8YDosPvHQ==", - "dev": true, - "requires": { - "@sinonjs/commons": "^1.3.0", - "array-from": "^2.1.1", - "lodash": "^4.17.15" - } - } + "@sinonjs/samsam": "^5.0.2" } }, "@sinonjs/samsam": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/@sinonjs/samsam/-/samsam-2.1.3.tgz", - "integrity": "sha512-8zNeBkSKhU9a5cRNbpCKau2WWPfan+Q2zDlcXvXyhn9EsMqgYs4qzo0XHNVlXC6ABQL8fT6nV+zzo5RTHJzyXw==", - "dev": true + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/@sinonjs/samsam/-/samsam-5.0.3.tgz", + "integrity": "sha512-QucHkc2uMJ0pFGjJUDP3F9dq5dx8QIaqISl9QgwLOh6P9yv877uONPGXh/OH/0zmM3tW1JjuJltAZV2l7zU+uQ==", + "dev": true, + "requires": { + "@sinonjs/commons": "^1.6.0", + "lodash.get": "^4.4.2", + "type-detect": "^4.0.8" + } }, "@sinonjs/text-encoding": { "version": "0.7.1", @@ -1211,6 +1212,12 @@ "uri-js": "^4.2.2" } }, + "ansi-colors": { + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-3.2.3.tgz", + "integrity": "sha512-LEHHyuhlPY3TmuUYMh2oz89lTShfvgbmzaBcxve9t/9Wuy7Dwf4yoAKcND7KFT1HAQfqZ12qtc+DUrBMeKF9nw==", + "dev": true + }, "ansi-escapes": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.0.tgz", @@ -1243,6 +1250,16 @@ "color-convert": "^1.9.0" } }, + "anymatch": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.1.tgz", + "integrity": "sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg==", + "dev": true, + "requires": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + } + }, "argparse": { "version": "1.0.10", "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", @@ -1257,12 +1274,6 @@ "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==" }, - "array-from": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/array-from/-/array-from-2.1.1.tgz", - "integrity": "sha512-GQTc6Uupx1FCavi5mPzBvVT7nEOeWMmUA9P95wpfpW1XwMSKs+KaymD5C2Up7KAUKg/mYwbsUYzdZWcoajlNZg==", - "dev": true - }, "array-includes": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.1.tgz", @@ -1414,6 +1425,12 @@ "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-7.2.1.tgz", "integrity": "sha512-S4XzBk5sMB+Rcb/LNcpzXr57VRTxgAvaAEDAl1AwRx27j00hT84O6OkteE7u8UB3NuaaygCRrEpqox4uDOrbdQ==" }, + "binary-extensions": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.0.0.tgz", + "integrity": "sha512-Phlt0plgpIIBOGTT/ehfFnbNlfsDEiqmzE2KRXoX1bLIlir4X/MR+zSyBEkL05ffWgnRSf/DXv+WrUAVr93/ow==", + "dev": true + }, "bindings": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", @@ -1469,6 +1486,15 @@ "concat-map": "0.0.1" } }, + "braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "requires": { + "fill-range": "^7.0.1" + } + }, "browser-stdout": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", @@ -1596,6 +1622,22 @@ "integrity": "sha512-BrgHpW9NURQgzoNyjfq0Wu6VFO6D7IZEmJNdtgNqpzGG8RuNFHt2jQxWlAs4HMe119chBnv+34syEZtc6IhLtA==", "dev": true }, + "chokidar": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.3.0.tgz", + "integrity": "sha512-dGmKLDdT3Gdl7fBUe8XK+gAtGmzy5Fn0XkkWQuYxGIgWVPPse2CxFA5mtrlD0TOHaHjEUqkWNyP1XdHoJES/4A==", + "dev": true, + "requires": { + "anymatch": "~3.1.1", + "braces": "~3.0.2", + "fsevents": "~2.1.1", + "glob-parent": "~5.1.0", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.2.0" + } + }, "cli-cursor": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", @@ -1675,12 +1717,6 @@ "delayed-stream": "~1.0.0" } }, - "commander": { - "version": "2.15.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.15.1.tgz", - "integrity": "sha512-VlfT9F3V0v+jr4yxPc5gg9s62/fIVWsd2Bk2iD435um1NlGMYdVCq+MjcXnhYq2icNOizHr1kK+5TI6H0Hy0ag==", - "dev": true - }, "common-tags": { "version": "1.8.0", "resolved": "https://registry.npmjs.org/common-tags/-/common-tags-1.8.0.tgz", @@ -2545,6 +2581,15 @@ "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==" }, + "fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "requires": { + "to-regex-range": "^5.0.1" + } + }, "finalhandler": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.2.tgz", @@ -2573,6 +2618,15 @@ "resolved": "https://registry.npmjs.org/findit2/-/findit2-2.2.3.tgz", "integrity": "sha1-WKRmaX34piBc39vzlVNri9d3pfY=" }, + "flat": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/flat/-/flat-4.1.0.tgz", + "integrity": "sha512-Px/TiLIznH7gEDlPXcUD4KnBusa6kR6ayRUVcnEAbreRIuhkqow/mun59BuRXwoYk7ZQOLW1ZM05ilIvK38hFw==", + "dev": true, + "requires": { + "is-buffer": "~2.0.3" + } + }, "flat-cache": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-2.0.1.tgz", @@ -2649,6 +2703,13 @@ "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" }, + "fsevents": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.1.2.tgz", + "integrity": "sha512-R4wDiBwZ0KzpgOWetKDug1FZcYhqYnUYKtfZYt4mD5SBz76q0KR4Q9o7GIPamsVPGmW3EYPPJ0dOOjvx32ldZA==", + "dev": true, + "optional": true + }, "function-bind": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", @@ -2896,9 +2957,9 @@ } }, "he": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/he/-/he-1.1.1.tgz", - "integrity": "sha512-z/GDPjlRMNOa2XJiB4em8wJpuuBfrFOlYKTZxtpkdr1uPdibHI8rYA3MY0KDObpVyaes0e/aunid/t88ZI2EKA==", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", + "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", "dev": true }, "hex2dec": { @@ -3074,6 +3135,15 @@ "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", "dev": true }, + "is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "requires": { + "binary-extensions": "^2.0.0" + } + }, "is-buffer": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.4.tgz", @@ -3112,6 +3182,12 @@ "is-extglob": "^2.1.1" } }, + "is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true + }, "is-obj": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", @@ -3333,7 +3409,7 @@ "lodash.get": { "version": "4.4.2", "resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz", - "integrity": "sha512-z+Uw/vLuy6gQe8cfaFWD7p0wVv8fJl3mbzXh33RS+0oW2wvUqiRXiQ69gLWSLpgB5/6sU+r6BlQR0MBILadqTQ==", + "integrity": "sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk=", "dev": true }, "lodash.has": { @@ -3374,6 +3450,15 @@ "resolved": "https://registry.npmjs.org/log-driver/-/log-driver-1.2.7.tgz", "integrity": "sha512-U7KCmLdqsGHBLeWqYlFA0V0Sl6P08EE1ZrmA9cxjUE0WVqT9qnyVDPz1kzpFEP0jdJuFnasWIfSd7fsaNXkpbg==" }, + "log-symbols": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-3.0.0.tgz", + "integrity": "sha512-dSkNGuI7iG3mfvDzUuYZyvk5dD9ocYCYzNU6CYDE6+Xqd+gwme6Z00NS3dUh8mq/73HaEtT7m6W+yUPtU6BZnQ==", + "dev": true, + "requires": { + "chalk": "^2.4.2" + } + }, "logger-sharelatex": { "version": "1.9.1", "resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-1.9.1.tgz", @@ -3484,12 +3569,6 @@ } } }, - "lolex": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/lolex/-/lolex-3.1.0.tgz", - "integrity": "sha512-zFo5MgCJ0rZ7gQg69S4pqBsLURbFw11X68C18OcJjJQbqaXm2NoTrGl1IMM3TIz0/BnN1tIs2tzmmqvCsOMMjw==", - "dev": true - }, "long": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz", @@ -3658,37 +3737,65 @@ } }, "mocha": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/mocha/-/mocha-5.2.0.tgz", - "integrity": "sha512-2IUgKDhc3J7Uug+FxMXuqIyYzH7gJjXECKe/w43IGgQHTSj3InJi+yAA7T24L9bQMRKiUEHxEX37G5JpVUGLcQ==", + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-7.1.1.tgz", + "integrity": "sha512-3qQsu3ijNS3GkWcccT5Zw0hf/rWvu1fTN9sPvEd81hlwsr30GX2GcDSSoBxo24IR8FelmrAydGC6/1J5QQP4WA==", "dev": true, "requires": { + "ansi-colors": "3.2.3", "browser-stdout": "1.3.1", - "commander": "2.15.1", - "debug": "3.1.0", + "chokidar": "3.3.0", + "debug": "3.2.6", "diff": "3.5.0", "escape-string-regexp": "1.0.5", - "glob": "7.1.2", + "find-up": "3.0.0", + "glob": "7.1.3", "growl": "1.10.5", - "he": "1.1.1", + "he": "1.2.0", + "js-yaml": "3.13.1", + "log-symbols": "3.0.0", "minimatch": "3.0.4", - "mkdirp": "0.5.1", - "supports-color": "5.4.0" + "mkdirp": "0.5.3", + "ms": "2.1.1", + "node-environment-flags": "1.0.6", + "object.assign": "4.1.0", + "strip-json-comments": "2.0.1", + "supports-color": "6.0.0", + "which": "1.3.1", + "wide-align": "1.1.3", + "yargs": "13.3.2", + "yargs-parser": "13.1.2", + "yargs-unparser": "1.6.0" }, "dependencies": { "debug": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz", - "integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==", + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", + "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", "dev": true, "requires": { - "ms": "2.0.0" + "ms": "^2.1.1" + } + }, + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", + "dev": true + }, + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dev": true, + "requires": { + "locate-path": "^3.0.0" } }, "glob": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.2.tgz", - "integrity": "sha512-MJTUg1kjuLeQCJ+ccE4Vpa6kKVXkPYJ2mOCQyUuKLcLQsdrMCpBPUi8qVE6+YuaJkozeA9NusTAw3hLr8Xe5EQ==", + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.3.tgz", + "integrity": "sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ==", "dev": true, "requires": { "fs.realpath": "^1.0.0", @@ -3698,6 +3805,106 @@ "once": "^1.3.0", "path-is-absolute": "^1.0.0" } + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dev": true, + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, + "minimist": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", + "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==", + "dev": true + }, + "mkdirp": { + "version": "0.5.3", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.3.tgz", + "integrity": "sha512-P+2gwrFqx8lhew375MQHHeTlY8AuOJSrGf0R5ddkEndUkmwpgUob/vQuBD1V22/Cw1/lJr4x+EjllSezBThzBg==", + "dev": true, + "requires": { + "minimist": "^1.2.5" + } + }, + "ms": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz", + "integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==", + "dev": true + }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dev": true, + "requires": { + "p-limit": "^2.0.0" + } + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + } + }, + "strip-json-comments": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", + "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=", + "dev": true + }, + "supports-color": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.0.0.tgz", + "integrity": "sha512-on9Kwidc1IUQo+bQdhi8+Tijpo0e1SS6RoGo2guUwn5vdaxw8RXOF9Vb2ws+ihWOmh4JnCJOvaziZWP1VABaLg==", + "dev": true, + "requires": { + "has-flag": "^3.0.0" + } + }, + "yargs": { + "version": "13.3.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-13.3.2.tgz", + "integrity": "sha512-AX3Zw5iPruN5ie6xGRIDgqkT+ZhnRlZMLMHAs8tg7nRruy2Nb+i5o9bwghAogtM08q1dpr2LVoS8KSTMYpWXUw==", + "dev": true, + "requires": { + "cliui": "^5.0.0", + "find-up": "^3.0.0", + "get-caller-file": "^2.0.1", + "require-directory": "^2.1.1", + "require-main-filename": "^2.0.0", + "set-blocking": "^2.0.0", + "string-width": "^3.0.0", + "which-module": "^2.0.0", + "y18n": "^4.0.0", + "yargs-parser": "^13.1.2" + } + }, + "yargs-parser": { + "version": "13.1.2", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-13.1.2.tgz", + "integrity": "sha512-3lbsNRf/j+A4QuSZfDRA7HRSfWrzO0YjqTJd5kjAq37Zep1CEgaYmrH9Q3GwPiB9cHyd1Y1UwggGhJGoxipbzg==", + "dev": true, + "requires": { + "camelcase": "^5.0.0", + "decamelize": "^1.2.0" + } } } }, @@ -3801,33 +4008,24 @@ "dev": true }, "nise": { - "version": "1.5.3", - "resolved": "https://registry.npmjs.org/nise/-/nise-1.5.3.tgz", - "integrity": "sha512-Ymbac/94xeIrMf59REBPOv0thr+CJVFMhrlAkW/gjCIE58BGQdCj0x7KRCb3yz+Ga2Rz3E9XXSvUyyxqqhjQAQ==", + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/nise/-/nise-4.0.3.tgz", + "integrity": "sha512-EGlhjm7/4KvmmE6B/UFsKh7eHykRl9VH+au8dduHLCyWUO/hr7+N+WtTvDUwc9zHuM1IaIJs/0lQ6Ag1jDkQSg==", "dev": true, "requires": { - "@sinonjs/formatio": "^3.2.1", + "@sinonjs/commons": "^1.7.0", + "@sinonjs/fake-timers": "^6.0.0", "@sinonjs/text-encoding": "^0.7.1", "just-extend": "^4.0.2", - "lolex": "^5.0.1", "path-to-regexp": "^1.7.0" }, "dependencies": { "isarray": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", - "integrity": "sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ==", + "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=", "dev": true }, - "lolex": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/lolex/-/lolex-5.1.2.tgz", - "integrity": "sha512-h4hmjAvHTmd+25JSwrtTIuwbKdwg5NzZVRMLn9saij4SZaepCrTCxPr35H/3bjwfMJtN+t3CX8672UIkglz28A==", - "dev": true, - "requires": { - "@sinonjs/commons": "^1.7.0" - } - }, "path-to-regexp": { "version": "1.8.0", "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.8.0.tgz", @@ -3839,6 +4037,24 @@ } } }, + "node-environment-flags": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/node-environment-flags/-/node-environment-flags-1.0.6.tgz", + "integrity": "sha512-5Evy2epuL+6TM0lCQGpFIj6KwiEsGh1SrHUhTbNX+sLbBtjidPZFAnVK9y5yU1+h//RitLbRHTIMyxQPtxMdHw==", + "dev": true, + "requires": { + "object.getownpropertydescriptors": "^2.0.3", + "semver": "^5.7.0" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true + } + } + }, "node-fetch": { "version": "2.6.0", "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.0.tgz", @@ -3874,6 +4090,12 @@ } } }, + "normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true + }, "oauth-sign": { "version": "0.9.0", "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", @@ -3903,6 +4125,16 @@ "object-keys": "^1.0.11" } }, + "object.getownpropertydescriptors": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.0.tgz", + "integrity": "sha512-Z53Oah9A3TdLoblT7VKJaTDdXdT+lQO+cNpKVnya5JDe9uLvzu1YyY1yFDFrcxrlRgWrEFH0jJtD/IbuwjcEVg==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0-next.1" + } + }, "object.values": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.1.tgz", @@ -4092,6 +4324,12 @@ "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", "integrity": "sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow==" }, + "picomatch": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.2.2.tgz", + "integrity": "sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==", + "dev": true + }, "pify": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", @@ -4886,6 +5124,15 @@ "util-deprecate": "~1.0.1" } }, + "readdirp": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.2.0.tgz", + "integrity": "sha512-crk4Qu3pmXwgxdSgGhgA/eXiJAPQiX4GMOZZMXnqKxHX7TaoL+3gQVo/WeuAiogr07DpnfjIMpXXa+PAIvwPGQ==", + "dev": true, + "requires": { + "picomatch": "^2.0.4" + } + }, "regexpp": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-2.0.1.tgz", @@ -5236,29 +5483,39 @@ "integrity": "sha512-meQNNykwecVxdu1RlYMKpQx4+wefIYpmxi6gexo/KAbwquJrBUrBmKYJrE8KFkVQAAVWEnwNdu21PgrD77J3xA==" }, "sinon": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/sinon/-/sinon-7.1.1.tgz", - "integrity": "sha512-iYagtjLVt1vN3zZY7D8oH7dkjNJEjLjyuzy8daX5+3bbQl8gaohrheB9VfH1O3L6LKuue5WTJvFluHiuZ9y3nQ==", + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/sinon/-/sinon-9.0.1.tgz", + "integrity": "sha512-iTTyiQo5T94jrOx7X7QLBZyucUJ2WvL9J13+96HMfm2CGoJYbIPqRfl6wgNcqmzk0DI28jeGx5bUTXizkrqBmg==", "dev": true, "requires": { - "@sinonjs/commons": "^1.2.0", - "@sinonjs/formatio": "^3.0.0", - "@sinonjs/samsam": "^2.1.2", - "diff": "^3.5.0", - "lodash.get": "^4.4.2", - "lolex": "^3.0.0", - "nise": "^1.4.6", - "supports-color": "^5.5.0", - "type-detect": "^4.0.8" + "@sinonjs/commons": "^1.7.0", + "@sinonjs/fake-timers": "^6.0.0", + "@sinonjs/formatio": "^5.0.1", + "@sinonjs/samsam": "^5.0.3", + "diff": "^4.0.2", + "nise": "^4.0.1", + "supports-color": "^7.1.0" }, "dependencies": { + "diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, "supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", + "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", "dev": true, "requires": { - "has-flag": "^3.0.0" + "has-flag": "^4.0.0" } } } @@ -5663,6 +5920,15 @@ "resolved": "https://registry.npmjs.org/to-no-case/-/to-no-case-1.0.2.tgz", "integrity": "sha512-Z3g735FxuZY8rodxV4gH7LxClE4H0hTIyHNIHdk+vpQxjLm0cwnKXq/OFVZ76SOQmto7txVcwSCwkU5kqp+FKg==" }, + "to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "requires": { + "is-number": "^7.0.0" + } + }, "to-snake-case": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/to-snake-case/-/to-snake-case-1.0.0.tgz", @@ -5944,6 +6210,48 @@ "integrity": "sha512-B+enWhmw6cjfVC7kS8Pj9pCrKSc5txArRyaYGe088shv/FGWH+0Rjx/xPgtsWfsUtS27FkP697E4DDhgrgoc0Q==", "dev": true }, + "wide-align": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.3.tgz", + "integrity": "sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA==", + "dev": true, + "requires": { + "string-width": "^1.0.2 || 2" + }, + "dependencies": { + "ansi-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", + "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true + }, + "string-width": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", + "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", + "dev": true, + "requires": { + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^4.0.0" + } + }, + "strip-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", + "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "dev": true, + "requires": { + "ansi-regex": "^3.0.0" + } + } + } + }, "word-wrap": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", @@ -6132,6 +6440,17 @@ "decamelize": "^1.2.0" } }, + "yargs-unparser": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-1.6.0.tgz", + "integrity": "sha512-W9tKgmSn0DpSatfri0nx52Joq5hVXgeLiqR/5G0sZNDoLZFOr/xjBUDcShCOGNsBnEMNo1KAMBkTej1Hm62HTw==", + "dev": true, + "requires": { + "flat": "^4.1.0", + "lodash": "^4.17.15", + "yargs": "^13.3.0" + } + }, "yn": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", diff --git a/services/filestore/package.json b/services/filestore/package.json index 5d87b914a1..6da33d37ee 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -54,12 +54,12 @@ "eslint-plugin-prettier": "^3.1.2", "eslint-plugin-promise": "^4.2.1", "eslint-plugin-standard": "^4.0.1", - "mocha": "5.2.0", + "mocha": "7.1.1", "mongodb": "^3.5.5", "prettier-eslint": "^9.0.1", "prettier-eslint-cli": "^5.0.0", "sandboxed-module": "2.0.3", - "sinon": "7.1.1", + "sinon": "9.0.1", "sinon-chai": "^3.5.0", "streamifier": "^0.1.1", "timekeeper": "^2.2.0" From 87b8f8e194130815382a2a4af994dad760cf569f Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 30 Mar 2020 17:13:40 +0100 Subject: [PATCH 494/555] Destroy file-download stream on error --- services/filestore/app/js/FileController.js | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/services/filestore/app/js/FileController.js b/services/filestore/app/js/FileController.js index e39afd67bb..748026f76c 100644 --- a/services/filestore/app/js/FileController.js +++ b/services/filestore/app/js/FileController.js @@ -61,15 +61,18 @@ function getFile(req, res, next) { } pipeline(fileStream, res, err => { - if (err && err.code === 'ERR_STREAM_PREMATURE_CLOSE') { - res.end() - } else if (err) { - next( - new Errors.ReadError({ - message: 'error transferring stream', - info: { bucket, key, format, style } - }).withCause(err) - ) + if (err) { + fileStream.destroy() + if (err.code === 'ERR_STREAM_PREMATURE_CLOSE') { + res.end() + } else { + next( + new Errors.ReadError({ + message: 'error transferring stream', + info: { bucket, key, format, style } + }).withCause(err) + ) + } } }) }) From 5fadafaf961732012052b856c077b50f10c3636a Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 30 Mar 2020 17:14:46 +0100 Subject: [PATCH 495/555] Destroy streams handled by getReadyPipeline on error --- services/filestore/app/js/PersistorHelper.js | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/services/filestore/app/js/PersistorHelper.js b/services/filestore/app/js/PersistorHelper.js index f2d0013915..826412b302 100644 --- a/services/filestore/app/js/PersistorHelper.js +++ b/services/filestore/app/js/PersistorHelper.js @@ -112,6 +112,11 @@ function getReadyPipeline(...streams) { } resolve(lastStream) } + if (err) { + for (const stream of streams) { + stream.destroy() + } + } } pipeline(...streams).catch(handler) From f440cec8ec15f9df9513a2ddb1cffd9dd1138e0b Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Tue, 31 Mar 2020 11:07:31 +0100 Subject: [PATCH 496/555] Check if streams are already destroyed before destroying --- services/filestore/app/js/FileController.js | 22 ++++++++++---------- services/filestore/app/js/PersistorHelper.js | 4 +++- 2 files changed, 14 insertions(+), 12 deletions(-) diff --git a/services/filestore/app/js/FileController.js b/services/filestore/app/js/FileController.js index 748026f76c..72f68047ab 100644 --- a/services/filestore/app/js/FileController.js +++ b/services/filestore/app/js/FileController.js @@ -61,18 +61,18 @@ function getFile(req, res, next) { } pipeline(fileStream, res, err => { - if (err) { + if (!fileStream.destroyed) { fileStream.destroy() - if (err.code === 'ERR_STREAM_PREMATURE_CLOSE') { - res.end() - } else { - next( - new Errors.ReadError({ - message: 'error transferring stream', - info: { bucket, key, format, style } - }).withCause(err) - ) - } + } + if (err && err.code === 'ERR_STREAM_PREMATURE_CLOSE') { + res.end() + } else if (err) { + next( + new Errors.ReadError({ + message: 'error transferring stream', + info: { bucket, key, format, style } + }).withCause(err) + ) } }) }) diff --git a/services/filestore/app/js/PersistorHelper.js b/services/filestore/app/js/PersistorHelper.js index 826412b302..99ac26aece 100644 --- a/services/filestore/app/js/PersistorHelper.js +++ b/services/filestore/app/js/PersistorHelper.js @@ -114,7 +114,9 @@ function getReadyPipeline(...streams) { } if (err) { for (const stream of streams) { - stream.destroy() + if (!stream.destroyed) { + stream.destroy() + } } } } From 44896704972d9b0d1bec1331999607dc986e2a6e Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Tue, 31 Mar 2020 15:38:42 +0100 Subject: [PATCH 497/555] Only delete the converted-cache folder if conversions are enabled --- services/filestore/app/js/FileHandler.js | 14 +++-- .../test/unit/js/FileHandlerTests.js | 57 +++++++++++++------ 2 files changed, 50 insertions(+), 21 deletions(-) diff --git a/services/filestore/app/js/FileHandler.js b/services/filestore/app/js/FileHandler.js index a6032350b1..6f49e05d7b 100644 --- a/services/filestore/app/js/FileHandler.js +++ b/services/filestore/app/js/FileHandler.js @@ -1,3 +1,4 @@ +const Settings = require('settings-sharelatex') const { callbackify } = require('util') const fs = require('fs') const PersistorManager = require('./PersistorManager') @@ -32,7 +33,9 @@ async function insertFile(bucket, key, stream) { info: { bucket, key, convertedKey } }) } - await PersistorManager.promises.deleteDirectory(bucket, convertedKey) + if (Settings.enableConversions) { + await PersistorManager.promises.deleteDirectory(bucket, convertedKey) + } await PersistorManager.promises.sendStream(bucket, key, stream) } @@ -44,10 +47,11 @@ async function deleteFile(bucket, key) { info: { bucket, key, convertedKey } }) } - await Promise.all([ - PersistorManager.promises.deleteFile(bucket, key), - PersistorManager.promises.deleteDirectory(bucket, convertedKey) - ]) + const jobs = [PersistorManager.promises.deleteFile(bucket, key)] + if (Settings.enableConversions) { + jobs.push(PersistorManager.promises.deleteDirectory(bucket, convertedKey)) + } + await Promise.all([jobs]) } async function deleteProject(bucket, key) { diff --git a/services/filestore/test/unit/js/FileHandlerTests.js b/services/filestore/test/unit/js/FileHandlerTests.js index 7823c9454f..60ee2553c3 100644 --- a/services/filestore/test/unit/js/FileHandlerTests.js +++ b/services/filestore/test/unit/js/FileHandlerTests.js @@ -15,14 +15,8 @@ describe('FileHandler', function() { KeyBuilder, ImageOptimiser, FileHandler, + Settings, fs - const settings = { - s3: { - buckets: { - user_files: 'user_files' - } - } - } const bucket = 'my_bucket' const key = `${ObjectId()}/${ObjectId()}` @@ -72,18 +66,19 @@ describe('FileHandler', function() { compressPng: sinon.stub().resolves() } } + Settings = {} fs = { createReadStream: sinon.stub().returns(readStream) } FileHandler = SandboxedModule.require(modulePath, { requires: { - 'settings-sharelatex': settings, './PersistorManager': PersistorManager, './LocalFileWriter': LocalFileWriter, './FileConverter': FileConverter, './KeyBuilder': KeyBuilder, './ImageOptimiser': ImageOptimiser, + 'settings-sharelatex': Settings, fs: fs }, globals: { console } @@ -105,12 +100,11 @@ describe('FileHandler', function() { }) }) - it('should delete the convertedKey folder', function(done) { + it('should not make a delete request for the convertedKey folder', function(done) { FileHandler.insertFile(bucket, key, stream, err => { expect(err).not.to.exist - expect( - PersistorManager.promises.deleteDirectory - ).to.have.been.calledWith(bucket, convertedFolderKey) + expect(PersistorManager.promises.deleteDirectory).not.to.have.been + .called done() }) }) @@ -122,6 +116,22 @@ describe('FileHandler', function() { done() }) }) + + describe('when conversions are enabled', function() { + beforeEach(function() { + Settings.enableConversions = true + }) + + it('should delete the convertedKey folder', function(done) { + FileHandler.insertFile(bucket, key, stream, err => { + expect(err).not.to.exist + expect( + PersistorManager.promises.deleteDirectory + ).to.have.been.calledWith(bucket, convertedFolderKey) + done() + }) + }) + }) }) describe('deleteFile', function() { @@ -136,12 +146,11 @@ describe('FileHandler', function() { }) }) - it('should tell the filestore manager to delete the cached folder', function(done) { + it('should not tell the filestore manager to delete the cached folder', function(done) { FileHandler.deleteFile(bucket, key, err => { expect(err).not.to.exist - expect( - PersistorManager.promises.deleteDirectory - ).to.have.been.calledWith(bucket, convertedFolderKey) + expect(PersistorManager.promises.deleteDirectory).not.to.have.been + .called done() }) }) @@ -153,6 +162,22 @@ describe('FileHandler', function() { done() }) }) + + describe('when conversions are enabled', function() { + beforeEach(function() { + Settings.enableConversions = true + }) + + it('should delete the convertedKey folder', function(done) { + FileHandler.deleteFile(bucket, key, err => { + expect(err).not.to.exist + expect( + PersistorManager.promises.deleteDirectory + ).to.have.been.calledWith(bucket, convertedFolderKey) + done() + }) + }) + }) }) describe('deleteProject', function() { From dceef85ccb20197ccca4368f53455fa4cd829b2a Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Tue, 31 Mar 2020 16:03:38 +0100 Subject: [PATCH 498/555] Update app/js/FileHandler.js Co-Authored-By: Jakob Ackermann --- services/filestore/app/js/FileHandler.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/app/js/FileHandler.js b/services/filestore/app/js/FileHandler.js index 6f49e05d7b..056206b0fe 100644 --- a/services/filestore/app/js/FileHandler.js +++ b/services/filestore/app/js/FileHandler.js @@ -51,7 +51,7 @@ async function deleteFile(bucket, key) { if (Settings.enableConversions) { jobs.push(PersistorManager.promises.deleteDirectory(bucket, convertedKey)) } - await Promise.all([jobs]) + await Promise.all(jobs) } async function deleteProject(bucket, key) { From 60ede0bf46bb29f151d561e71343f93f5a3fe8ea Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Tue, 31 Mar 2020 17:00:39 +0100 Subject: [PATCH 499/555] Find more ways to destroy streams --- services/filestore/app/js/PersistorHelper.js | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/services/filestore/app/js/PersistorHelper.js b/services/filestore/app/js/PersistorHelper.js index 99ac26aece..b829e5ec45 100644 --- a/services/filestore/app/js/PersistorHelper.js +++ b/services/filestore/app/js/PersistorHelper.js @@ -106,11 +106,12 @@ function getReadyPipeline(...streams) { lastStream.removeListener('readable', handler) if (err) { - return reject( + reject( wrapError(err, 'error before stream became ready', {}, ReadError) ) + } else { + resolve(lastStream) } - resolve(lastStream) } if (err) { for (const stream of streams) { @@ -121,6 +122,9 @@ function getReadyPipeline(...streams) { } } + for (let index = 0; index < streams.length - 1; index++) { + streams[index + 1].on('close', () => streams[index].destroy()) + } pipeline(...streams).catch(handler) lastStream.on('readable', handler) }) From ccf5f8b9e8f3c00af92c529bbab845dcb0e3fa6a Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 2 Apr 2020 11:58:43 +0100 Subject: [PATCH 500/555] Add acceptance test for leaked sockets on aborted connections --- .../test/acceptance/js/FilestoreTests.js | 76 ++++++++++++++++++- 1 file changed, 72 insertions(+), 4 deletions(-) diff --git a/services/filestore/test/acceptance/js/FilestoreTests.js b/services/filestore/test/acceptance/js/FilestoreTests.js index 7e3b197a9c..8382a48de5 100644 --- a/services/filestore/test/acceptance/js/FilestoreTests.js +++ b/services/filestore/test/acceptance/js/FilestoreTests.js @@ -17,10 +17,13 @@ const streamifier = require('streamifier') chai.use(require('chai-as-promised')) const { ObjectId } = require('mongodb') const tk = require('timekeeper') +const ChildProcess = require('child_process') const fsWriteFile = promisify(fs.writeFile) const fsStat = promisify(fs.stat) const pipeline = promisify(Stream.pipeline) +const exec = promisify(ChildProcess.exec) +const msleep = promisify(setTimeout) if (!process.env.AWS_ACCESS_KEY_ID) { throw new Error('please provide credentials for the AWS S3 test server') @@ -40,6 +43,41 @@ describe('Filestore', function() { this.timeout(1000 * 10) const filestoreUrl = `http://localhost:${Settings.internal.filestore.port}` + const seenSockets = [] + async function expectNoSockets() { + try { + await msleep(1000) + const { stdout } = await exec('ss -tnH') + + const badSockets = [] + for (const socket of stdout.split('\n')) { + const fields = socket.split(' ').filter(part => part !== '') + if ( + fields.length > 2 && + parseInt(fields[1]) && + !seenSockets.includes(socket) + ) { + badSockets.push(socket) + seenSockets.push(socket) + } + } + + if (badSockets.length) { + // eslint-disable-next-line no-console + console.error( + 'ERR: Sockets still have receive buffer after connection closed' + ) + for (const socket of badSockets) { + // eslint-disable-next-line no-console + console.error(socket) + } + throw new Error('Sockets still open after connection closed') + } + } catch (err) { + expect(err).not.to.exist + } + } + // redefine the test suite for every available backend Object.keys(BackendSettings).forEach(backend => { describe(backend, function() { @@ -71,7 +109,8 @@ describe('Filestore', function() { } after(async function() { - return app.stop() + await msleep(3000) + await app.stop() }) beforeEach(async function() { @@ -156,6 +195,11 @@ describe('Filestore', function() { expect(res.body).to.equal(constantFileContent) }) + it('should not leak a socket', async function() { + await rp.get(fileUrl) + await expectNoSockets() + }) + it('should be able to get back the first 9 bytes of the file', async function() { const options = { uri: fileUrl, @@ -378,6 +422,30 @@ describe('Filestore', function() { it('should not throw an error', function() { expect(error).not.to.exist }) + + it('should not leak a socket', async function() { + await rp.get(fileUrl) + await expectNoSockets() + }) + + it('should not leak a socket if the connection is aborted', async function() { + this.timeout(20000) + for (let i = 0; i < 5; i++) { + // test is not 100% reliable, so repeat + // create a new connection and have it time out before reading any data + await new Promise(resolve => { + const streamThatHangs = new Stream.PassThrough() + const stream = request({ url: fileUrl, timeout: 1000 }) + stream.pipe(streamThatHangs) + stream.on('error', () => { + stream.destroy() + streamThatHangs.destroy() + resolve() + }) + }) + await expectNoSockets() + } + }) }) if (backend === 'S3Persistor' || backend === 'FallbackGcsToS3Persistor') { @@ -554,7 +622,7 @@ describe('Filestore', function() { it('copies the file to the primary', async function() { await rp.get(fileUrl) // wait for the file to copy in the background - await promisify(setTimeout)(1000) + await msleep(1000) await TestHelper.expectPersistorToHaveFile( app.persistor.primaryPersistor, @@ -622,7 +690,7 @@ describe('Filestore', function() { it('should not copy the old file to the primary with the old key', async function() { // wait for the file to copy in the background - await promisify(setTimeout)(1000) + await msleep(1000) await TestHelper.expectPersistorNotToHaveFile( app.persistor.primaryPersistor, @@ -668,7 +736,7 @@ describe('Filestore', function() { it('should copy the old file to the primary with the old key', async function() { // wait for the file to copy in the background - await promisify(setTimeout)(1000) + await msleep(1000) await TestHelper.expectPersistorToHaveFile( app.persistor.primaryPersistor, From 114883a9e948d7ccd995fe68b6b93fc04f913911 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 2 Apr 2020 15:55:30 +0100 Subject: [PATCH 501/555] Ensure streams are always drained on close or error --- services/filestore/app/js/FileController.js | 3 - services/filestore/app/js/PersistorHelper.js | 69 ++++++++++++++------ 2 files changed, 48 insertions(+), 24 deletions(-) diff --git a/services/filestore/app/js/FileController.js b/services/filestore/app/js/FileController.js index 72f68047ab..e39afd67bb 100644 --- a/services/filestore/app/js/FileController.js +++ b/services/filestore/app/js/FileController.js @@ -61,9 +61,6 @@ function getFile(req, res, next) { } pipeline(fileStream, res, err => { - if (!fileStream.destroyed) { - fileStream.destroy() - } if (err && err.code === 'ERR_STREAM_PREMATURE_CLOSE') { res.end() } else if (err) { diff --git a/services/filestore/app/js/PersistorHelper.js b/services/filestore/app/js/PersistorHelper.js index b829e5ec45..1c2512b690 100644 --- a/services/filestore/app/js/PersistorHelper.js +++ b/services/filestore/app/js/PersistorHelper.js @@ -23,6 +23,7 @@ class ObserverStream extends Stream.Transform { if (options.hash) { this.hash = crypto.createHash(options.hash) } + if (options.metric) { const onEnd = () => { metrics.count(options.metric, this.bytes) @@ -98,35 +99,61 @@ async function verifyMd5(persistor, bucket, key, sourceMd5, destMd5 = null) { function getReadyPipeline(...streams) { return new Promise((resolve, reject) => { const lastStream = streams.slice(-1)[0] - let resolvedOrErrored = false + // in case of error or stream close, we must ensure that we drain the + // previous stream so that it can clean up its socket (if it has one) + const drainPreviousStream = function(previousStream) { + // this stream is no longer reliable, so don't pipe anything more into it + previousStream.unpipe(this) + previousStream.resume() + } + + // handler to resolve when either: + // - an error happens, or + // - the last stream in the chain is readable + // for example, in the case of a 4xx error an error will occur and the + // streams will not become readable const handler = function(err) { - if (!resolvedOrErrored) { - resolvedOrErrored = true - - lastStream.removeListener('readable', handler) - if (err) { - reject( - wrapError(err, 'error before stream became ready', {}, ReadError) - ) - } else { - resolve(lastStream) - } + // remove handler from all streams because we don't want to do this on + // later errors + lastStream.removeListener('readable', handler) + for (const stream of streams) { + stream.removeListener('error', handler) } + + // return control to the caller if (err) { - for (const stream of streams) { - if (!stream.destroyed) { - stream.destroy() - } - } + reject( + wrapError(err, 'error before stream became ready', {}, ReadError) + ) + } else { + resolve(lastStream) } } - for (let index = 0; index < streams.length - 1; index++) { - streams[index + 1].on('close', () => streams[index].destroy()) - } - pipeline(...streams).catch(handler) + // ensure the handler fires when the last strem becomes readable lastStream.on('readable', handler) + + for (const stream of streams) { + // when a stream receives a pipe, set up the drain handler to drain the + // connection if an error occurs or the stream is closed + stream.on('pipe', previousStream => { + stream.on('error', x => { + drainPreviousStream(previousStream) + }) + stream.on('close', () => { + drainPreviousStream(previousStream) + }) + }) + // add the handler function to resolve this method on error if we can't + // set up the pipeline + stream.on('error', handler) + } + + // begin the pipeline + for (let index = 0; index < streams.length - 1; index++) { + streams[index].pipe(streams[index + 1]) + } }) } From 13ab7b5ca1a217d05ced890950e6a364b70a04f6 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 2 Apr 2020 16:49:45 +0100 Subject: [PATCH 502/555] Fix unit tests for pipeline changes --- .../test/unit/js/GcsPersistorTests.js | 22 +++++++------- .../test/unit/js/S3PersistorTests.js | 30 +++++++++++-------- 2 files changed, 30 insertions(+), 22 deletions(-) diff --git a/services/filestore/test/unit/js/GcsPersistorTests.js b/services/filestore/test/unit/js/GcsPersistorTests.js index cc13c45ce7..3c386e5002 100644 --- a/services/filestore/test/unit/js/GcsPersistorTests.js +++ b/services/filestore/test/unit/js/GcsPersistorTests.js @@ -61,12 +61,15 @@ describe('GcsPersistorTests', function() { ReadStream = { pipe: sinon.stub().returns('readStream'), - on: sinon - .stub() - .withArgs('end') - .yields(), + on: sinon.stub(), removeListener: sinon.stub() } + ReadStream.on.withArgs('end').yields() + ReadStream.on.withArgs('pipe').yields({ + unpipe: sinon.stub(), + resume: sinon.stub(), + on: sinon.stub() + }) Transform = class { on(event, callback) { @@ -168,8 +171,7 @@ describe('GcsPersistorTests', function() { }) it('pipes the stream through the meter', function() { - expect(Stream.pipeline).to.have.been.calledWith( - ReadStream, + expect(ReadStream.pipe).to.have.been.calledWith( sinon.match.instanceOf(Transform) ) }) @@ -202,11 +204,11 @@ describe('GcsPersistorTests', function() { beforeEach(async function() { Transform.prototype.on = sinon.stub() - Stream.pipeline.yields(GcsNotFoundError) + ReadStream.on.withArgs('error').yields(GcsNotFoundError) try { stream = await GcsPersistor.promises.getFileStream(bucket, key) - } catch (err) { - error = err + } catch (e) { + error = e } }) @@ -232,7 +234,7 @@ describe('GcsPersistorTests', function() { beforeEach(async function() { Transform.prototype.on = sinon.stub() - Stream.pipeline.yields(genericError) + ReadStream.on.withArgs('error').yields(genericError) try { stream = await GcsPersistor.promises.getFileStream(bucket, key) } catch (err) { diff --git a/services/filestore/test/unit/js/S3PersistorTests.js b/services/filestore/test/unit/js/S3PersistorTests.js index 565e3e0bc9..c236de25ef 100644 --- a/services/filestore/test/unit/js/S3PersistorTests.js +++ b/services/filestore/test/unit/js/S3PersistorTests.js @@ -87,11 +87,14 @@ describe('S3PersistorTests', function() { ReadStream = { pipe: sinon.stub().returns('readStream'), - on: sinon - .stub() - .withArgs('end') - .yields() + on: sinon.stub(), + removeListener: sinon.stub() } + ReadStream.on.withArgs('end').yields() + ReadStream.on.withArgs('pipe').yields({ + unpipe: sinon.stub(), + resume: sinon.stub() + }) FileNotFoundError = new Error('File not found') FileNotFoundError.code = 'ENOENT' @@ -111,6 +114,11 @@ describe('S3PersistorTests', function() { pipe: sinon.stub(), removeListener: sinon.stub() } + S3ReadStream.on.withArgs('end').yields() + S3ReadStream.on.withArgs('pipe').yields({ + unpipe: sinon.stub(), + resume: sinon.stub() + }) S3Client = { getObject: sinon.stub().returns({ createReadStream: sinon.stub().returns(S3ReadStream) @@ -187,9 +195,8 @@ describe('S3PersistorTests', function() { }) }) - it('pipes the stream through the meter', function() { - expect(Stream.pipeline).to.have.been.calledWith( - S3ReadStream, + it('pipes the stream through the meter', async function() { + expect(S3ReadStream.pipe).to.have.been.calledWith( sinon.match.instanceOf(Transform) ) }) @@ -292,7 +299,7 @@ describe('S3PersistorTests', function() { beforeEach(async function() { Transform.prototype.on = sinon.stub() - Stream.pipeline.yields(S3NotFoundError) + S3ReadStream.on.withArgs('error').yields(S3NotFoundError) try { stream = await S3Persistor.promises.getFileStream(bucket, key) } catch (err) { @@ -322,7 +329,7 @@ describe('S3PersistorTests', function() { beforeEach(async function() { Transform.prototype.on = sinon.stub() - Stream.pipeline.yields(S3AccessDeniedError) + S3ReadStream.on.withArgs('error').yields(S3AccessDeniedError) try { stream = await S3Persistor.promises.getFileStream(bucket, key) } catch (err) { @@ -352,7 +359,7 @@ describe('S3PersistorTests', function() { beforeEach(async function() { Transform.prototype.on = sinon.stub() - Stream.pipeline.yields(genericError) + S3ReadStream.on.withArgs('error').yields(genericError) try { stream = await S3Persistor.promises.getFileStream(bucket, key) } catch (err) { @@ -544,8 +551,7 @@ describe('S3PersistorTests', function() { }) it('should meter the download', function() { - expect(Stream.pipeline).to.have.been.calledWith( - S3ReadStream, + expect(S3ReadStream.pipe).to.have.been.calledWith( sinon.match.instanceOf(Stream.Transform) ) }) From 003d54e8f0e6d6f0347ed0ba89f3401ac147ebd0 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Fri, 3 Apr 2020 11:57:33 -0400 Subject: [PATCH 503/555] Fix error handler returning two responses The error handler mistakenly sent two responses on error, the first being a 200. --- services/filestore/app/js/RequestLogger.js | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/services/filestore/app/js/RequestLogger.js b/services/filestore/app/js/RequestLogger.js index fa1fc14426..f68e4b4304 100644 --- a/services/filestore/app/js/RequestLogger.js +++ b/services/filestore/app/js/RequestLogger.js @@ -17,10 +17,7 @@ class RequestLogger { static errorHandler(err, req, res, next) { req.requestLogger.addFields({ error: err }) - res - .send(err.message) - .status(500) - .end() + res.status(500).send(err.message) } static middleware(req, res, next) { From 256cebf7a7f630c4ffb52d53c79e85327a625f08 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Fri, 3 Apr 2020 16:54:59 +0100 Subject: [PATCH 504/555] Add fast-crc32c library --- services/filestore/package-lock.json | 116 ++++++++++++++++----------- services/filestore/package.json | 1 + 2 files changed, 71 insertions(+), 46 deletions(-) diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index 0151ac757d..194a96382b 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -451,7 +451,7 @@ "arrify": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", - "integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=" + "integrity": "sha512-3CYzex9M9FGQjCGMGyi6/31c8GJbgb0qGyrx5HWxPd0aCwh4cB2YjMb2Xf9UuoogrMrlO9cTqnB5rI5GHZTcUA==" }, "debug": { "version": "3.2.6", @@ -1442,7 +1442,7 @@ "bintrees": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/bintrees/-/bintrees-1.0.1.tgz", - "integrity": "sha1-DmVcm5wkNeqraL9AJyJtK1WjRSQ=" + "integrity": "sha512-tbaUB1QpTIj4cKY8c1rvNAvEQXA+ekzHmbe4jzNfW3QWsF9GnnP/BRWyl6/qqS53heoYJ93naaFcm/jooONH8g==" }, "bl": { "version": "2.2.0", @@ -1510,7 +1510,7 @@ "buffer": { "version": "4.9.1", "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.1.tgz", - "integrity": "sha1-bRu2AbB6TvztlwlBMgkwJ8lbwpg=", + "integrity": "sha512-DNK4ruAqtyHaN8Zne7PkBTO+dD1Lr0YfTduMqlIyjvQIoztBkUxrvL+hKeLW8NXFKHOq/2upkxuoS9znQ9bW9A==", "requires": { "base64-js": "^1.0.2", "ieee754": "^1.1.4", @@ -1780,7 +1780,7 @@ "contains-path": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/contains-path/-/contains-path-0.1.0.tgz", - "integrity": "sha1-/ozxhP9mcLa67wGp1IYaXL7EEgo=", + "integrity": "sha512-OKZnPGeMQy2RPaUIBPFFd71iNf4791H12MCRuVQDnzGRwCYNYmTDy5pdafo2SLAcEMKzTOQnLWG4QdcjeJUMEg==", "dev": true }, "content-disposition": { @@ -2090,7 +2090,7 @@ "es6-promisify": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/es6-promisify/-/es6-promisify-5.0.0.tgz", - "integrity": "sha1-UQnWLz5W6pZ8S2NQWu8IKRyKUgM=", + "integrity": "sha512-C+d6UdsYDk0lMebHNR4S2NybQMMngAOnOwYBQjTOiv0MkoJMP0Myw2mgpDLBcpfCmRLxyFqYhS/CfOENq4SJhQ==", "requires": { "es6-promise": "^4.0.3" } @@ -2280,7 +2280,7 @@ "doctrine": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-1.5.0.tgz", - "integrity": "sha1-N53Ocw9hZvds76TmcHoVmwLFpvo=", + "integrity": "sha512-lsGyRuYr4/PIB0txi+Fy2xOMI2dGaTguCaotzFGkVZuKR5usKfcRWIFKNM3QNrU7hh/+w2bwTW+ZeXPK5l8uVg==", "dev": true, "requires": { "esutils": "^2.0.2", @@ -2471,7 +2471,7 @@ "events": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/events/-/events-1.1.1.tgz", - "integrity": "sha1-nr23Y1rQmccNzEwqH1AEKI6L2SQ=" + "integrity": "sha512-kEcvvCBByWXGnZy6JUlgAp2gBIUjfCAV6P6TgT1/aaQKcmuAEC4OZTV1I4EWQLz2gxZw76atuVyvHhTxvi0Flw==" }, "express": { "version": "4.17.1", @@ -2531,6 +2531,14 @@ "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", "integrity": "sha512-11Ndz7Nv+mvAC1j0ktTa7fAb0vLyGGX+rMHNBYQviQDGU0Hw7lhctJANqbPhu9nV9/izT/IntTgZ7Im/9LJs9g==" }, + "fast-crc32c": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/fast-crc32c/-/fast-crc32c-2.0.0.tgz", + "integrity": "sha512-LIREwygxtxzHF11oLJ4xIVKu/ZWNgrj/QaGvaSD8ZggIsgCyCtSYevlrpWVqNau57ZwezV8K1HFBSjQ7FcRbTQ==", + "requires": { + "sse4_crc32": "^6.0.1" + } + }, "fast-deep-equal": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.1.tgz", @@ -2607,7 +2615,7 @@ "find-up": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", - "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=", + "integrity": "sha512-NWzkk0jSJtTt08+FBFMvXoeZnOJD+jTtsRmBYbAIzJdX6l7dLgR7CTubCM5/eDdPUBvLCeVasP1brfVR/9/EZQ==", "dev": true, "requires": { "locate-path": "^2.0.0" @@ -2616,7 +2624,7 @@ "findit2": { "version": "2.2.3", "resolved": "https://registry.npmjs.org/findit2/-/findit2-2.2.3.tgz", - "integrity": "sha1-WKRmaX34piBc39vzlVNri9d3pfY=" + "integrity": "sha512-lg/Moejf4qXovVutL0Lz4IsaPoNYMuxt4PA0nGqFxnJ1CTTGGlEO2wKgoDpwknhvZ8k4Q2F+eesgkLbG2Mxfog==" }, "flat": { "version": "4.1.0", @@ -3132,7 +3140,7 @@ "is-arrayish": { "version": "0.2.1", "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", - "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", + "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", "dev": true }, "is-binary-path": { @@ -3257,7 +3265,7 @@ "jmespath": { "version": "0.15.0", "resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.15.0.tgz", - "integrity": "sha1-o/Iiqarp+Wb10nx5ZRDigJF2Qhc=" + "integrity": "sha512-+kHj8HXArPfpPEKGLZ+kB5ONRTCiGQXo8RQYL0hH8t6pWXUBBK5KkkQmTNOwKK4LEsd0yTsgtjJVm4UBSZea4w==" }, "js-tokens": { "version": "4.0.0", @@ -3364,7 +3372,7 @@ "load-json-file": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-2.0.0.tgz", - "integrity": "sha1-eUfkIUmvgNaWy/eXvKq8/h/inKg=", + "integrity": "sha512-3p6ZOGNbiX4CdvEd1VcE6yi78UrGNpjHO33noGwHCnT/o2fyllJDepsm8+mFFv/DvtwFHht5HIHSyOy5a+ChVQ==", "dev": true, "requires": { "graceful-fs": "^4.1.2", @@ -3376,7 +3384,7 @@ "pify": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", + "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", "dev": true } } @@ -3384,7 +3392,7 @@ "locate-path": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", - "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=", + "integrity": "sha512-NCI2kiDkyR7VeEKm27Kda/iQHyKJe1Bu0FlTbYp3CqJu+9IFe9bLyAjMxf5ZDDbEg+iMPzB5zYyUTSm8wVTKmA==", "dev": true, "requires": { "p-locate": "^2.0.0", @@ -3409,7 +3417,7 @@ "lodash.get": { "version": "4.4.2", "resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz", - "integrity": "sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk=", + "integrity": "sha512-z+Uw/vLuy6gQe8cfaFWD7p0wVv8fJl3mbzXh33RS+0oW2wvUqiRXiQ69gLWSLpgB5/6sU+r6BlQR0MBILadqTQ==", "dev": true }, "lodash.has": { @@ -3437,7 +3445,7 @@ "lodash.pickby": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/lodash.pickby/-/lodash.pickby-4.6.0.tgz", - "integrity": "sha1-feoh2MGNdwOifHBMFdO4SmfjOv8=" + "integrity": "sha512-AZV+GsS/6ckvPOVQPXSiFFacKvKB4kOQu6ynt9wz0F3LO4R9Ij4K1ddYsIytDpSgLz88JHd9P+oaLeej5/Sl7Q==" }, "lodash.unescape": { "version": "4.0.1", @@ -3590,7 +3598,7 @@ "lynx": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/lynx/-/lynx-0.1.1.tgz", - "integrity": "sha1-Mxjc7xaQi4KG6Bisz9sxzXQkj50=", + "integrity": "sha512-JI52N0NwK2b/Md0TFPdPtUBI46kjyJXF7+q08l2yvQ56q6QA8s7ZjZQQRoxFpS2jDXNf/B0p8ID+OIKcTsZwzw==", "requires": { "mersenne": "~0.0.3", "statsd-parser": "~0.0.4" @@ -3647,7 +3655,7 @@ "mersenne": { "version": "0.0.4", "resolved": "https://registry.npmjs.org/mersenne/-/mersenne-0.0.4.tgz", - "integrity": "sha1-QB/ex+whzbngPNPTAhOY2iGycIU=" + "integrity": "sha512-XoSUL+nF8hMTKGQxUs8r3Btdsf1yuKKBdCCGbh3YXgCXuVKishpZv1CNc385w9s8t4Ynwc5h61BwW/FCVulkbg==" }, "messageformat": { "version": "2.3.0", @@ -3809,7 +3817,7 @@ "is-fullwidth-code-point": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "integrity": "sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w==", "dev": true }, "locate-path": { @@ -3866,7 +3874,7 @@ "strip-json-comments": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", - "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=", + "integrity": "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==", "dev": true }, "supports-color": { @@ -3911,7 +3919,7 @@ "module-details-from-path": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/module-details-from-path/-/module-details-from-path-1.0.3.tgz", - "integrity": "sha1-EUyUlnPiqKNenTV4hSeqN7Z52is=" + "integrity": "sha512-ySViT69/76t8VhE1xXHK6Ch4NcDd26gx0MzKXLO+F7NOtnqH68d9zF94nT8ZWSxXh8ELOERsnJO/sWt1xZYw5A==" }, "moment": { "version": "2.24.0", @@ -4023,7 +4031,7 @@ "isarray": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", - "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=", + "integrity": "sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ==", "dev": true }, "path-to-regexp": { @@ -4037,6 +4045,12 @@ } } }, + "node-addon-api": { + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-1.7.1.tgz", + "integrity": "sha512-2+DuKodWvwRTrCfKOeR24KIc5unKjOh8mz17NCzVnHWfjAdDqbfbjqh7gUT+BkXBRQM52+xCHciKWonJ3CbJMQ==", + "optional": true + }, "node-environment-flags": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/node-environment-flags/-/node-environment-flags-1.0.6.tgz", @@ -4202,7 +4216,7 @@ "p-locate": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", - "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=", + "integrity": "sha512-nQja7m7gSKuewoVRen45CtVfODR3crN3goVQ0DDZ9N3yHxgpkuBhZqsaiotSQRrADUrne346peY7kT3TSACykg==", "dev": true, "requires": { "p-limit": "^1.1.0" @@ -4220,7 +4234,7 @@ "p-try": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", - "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=", + "integrity": "sha512-U1etNYuMJoIz3ZXSrrySFjsXQTWOx2/jdi86L+2pRvph/qMKL6sbcCYdH23fqsbm8TH2Gn0OybpT4eSFlCVHww==", "dev": true } } @@ -4247,7 +4261,7 @@ "parse-json": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", - "integrity": "sha1-9ID0BDTvgHQfhGkJn43qGPVaTck=", + "integrity": "sha512-QR/GGaKCkhwk1ePQNYDRKYZ3mwU9ypsKhB0XyFnLQdomyEqk3e8wpW3V5Jp88zbxK4n5ST1nqo+g9juTpownhQ==", "dev": true, "requires": { "error-ex": "^1.2.0" @@ -4299,7 +4313,7 @@ "path-type": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/path-type/-/path-type-2.0.0.tgz", - "integrity": "sha1-8BLMuEFbcJb8LaoQVMPXI4lZTHM=", + "integrity": "sha512-dUnb5dXUf+kzhC/W/F4e5/SkluXIFf5VUHolW1Eg1irn1hGWjPGdsRcvYJ1nD6lhk8Ir7VM0bHJKsYTx8Jx9OQ==", "dev": true, "requires": { "pify": "^2.0.0" @@ -4308,7 +4322,7 @@ "pify": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", + "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", "dev": true } } @@ -4338,7 +4352,7 @@ "pkg-dir": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-2.0.0.tgz", - "integrity": "sha1-9tXREJ4Z1j7fQo4L1X4Sd3YVM0s=", + "integrity": "sha512-ojakdnUgL5pzJYWw2AIDEupaQCX5OPbM688ZevubICjdIX01PRSYKqm33fJoCOJBRseYCTUlQRnBNX+Pchaejw==", "dev": true, "requires": { "find-up": "^2.1.0" @@ -5025,7 +5039,7 @@ "punycode": { "version": "1.3.2", "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", - "integrity": "sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0=" + "integrity": "sha512-RofWgt/7fL5wP1Y7fxE7/EmTLzQVnB0ycyibJ0OOHIlJqTNzglYFxVwETOcIoJqJmpDXJ9xImDv+Fq34F/d4Dw==" }, "qs": { "version": "6.7.0", @@ -5035,7 +5049,7 @@ "querystring": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", - "integrity": "sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=" + "integrity": "sha512-X/xY82scca2tau62i9mDyU9K+I+djTMUsvwf7xnUX5GLvVzgJybOJf4Y6o9Zx3oJK/LSXg5tTZBjwzqVPaPO2g==" }, "quick-lru": { "version": "4.0.1", @@ -5092,7 +5106,7 @@ "read-pkg": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-2.0.0.tgz", - "integrity": "sha1-jvHAYjxqbbDcZxPEv6xGMysjaPg=", + "integrity": "sha512-eFIBOPW7FGjzBuk3hdXEuNSiTZS/xEMlH49HxMyzb0hyPfu4EhVjT2DH32K1hSSmVq4sebAWnZuuY5auISUTGA==", "dev": true, "requires": { "load-json-file": "^2.0.0", @@ -5103,7 +5117,7 @@ "read-pkg-up": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-2.0.0.tgz", - "integrity": "sha1-a3KoBImE4MQeeVEP1en6mbO1Sb4=", + "integrity": "sha512-1orxQfbWGUiTn9XsPlChs6rLie/AV9jwZTGmu2NZw/CUDJQchXJFYE0Fq5j7+n558T1JhDWLdhyd1Zj+wLY//w==", "dev": true, "requires": { "find-up": "^2.0.0", @@ -5272,7 +5286,7 @@ "resolve-from": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-2.0.0.tgz", - "integrity": "sha1-lICrIOlP+h2egKgEx+oUdhGWa1c=", + "integrity": "sha512-qpFcKaXsq8+oRoLilkwyc7zHGF5i9Q2/25NIgLQQ/+VVv9rU4qvr6nXVAw1DsnXJyQkZsR4Ytfbtg5ehfcUssQ==", "dev": true }, "semver": { @@ -5393,7 +5407,7 @@ "sax": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz", - "integrity": "sha1-e45lYZCyKOgaZq6nSEgNgozS03o=" + "integrity": "sha512-8I2a3LovHTOpm7NV5yOyO8IHqgVsfK4+UuySrXU8YXkSRX7k6hCV9b3HrkKCr3nMpgj+0bmocaJJWpvp1oc7ZA==" }, "semver": { "version": "6.3.0", @@ -5557,7 +5571,7 @@ "snakeize": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/snakeize/-/snakeize-0.1.0.tgz", - "integrity": "sha1-EMCI2LWOsHazIpu1oE4jLOEmQi0=" + "integrity": "sha512-ot3bb6pQt6IVq5G/JQ640ceSYTPtriVrwNyfoUw1LmQQGzPMAGxE5F+ded2UwSUCyf2PW1fFAYUnVEX21PWbpQ==" }, "source-map": { "version": "0.6.1", @@ -5567,7 +5581,7 @@ "sparse-bitfield": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/sparse-bitfield/-/sparse-bitfield-3.0.3.tgz", - "integrity": "sha1-/0rm5oZWBWuks+eSqzM004JzyhE=", + "integrity": "sha512-kvzhi7vqKTfkh0PZU+2D2PIllw2ymqJKujUcyPMd9Y75Nv4nPbGJZXNhxsgdQab2BmlDct1YnfQCguEvHr7VsQ==", "dev": true, "optional": true, "requires": { @@ -5620,6 +5634,16 @@ "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", "dev": true }, + "sse4_crc32": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/sse4_crc32/-/sse4_crc32-6.0.1.tgz", + "integrity": "sha512-FUTYXpLroqytNKWIfHzlDWoy9E4tmBB/RklNMy6w3VJs+/XEYAHgbiylg4SS43iOk/9bM0BlJ2EDpFAGT66IoQ==", + "optional": true, + "requires": { + "bindings": "^1.3.0", + "node-addon-api": "^1.3.0" + } + }, "sshpk": { "version": "1.16.1", "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz", @@ -5644,7 +5668,7 @@ "statsd-parser": { "version": "0.0.4", "resolved": "https://registry.npmjs.org/statsd-parser/-/statsd-parser-0.0.4.tgz", - "integrity": "sha1-y9JDlTzELv/VSLXSI4jtaJ7GOb0=" + "integrity": "sha512-7XO+ur89EalMXXFQaydsczB8sclr5nDsNIoUu0IzJx1pIbHUhO3LtpSzBwetIuU9DyTLMiVaJBMtWS/Nb2KR4g==" }, "statuses": { "version": "1.5.0", @@ -5772,7 +5796,7 @@ "strip-bom": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", - "integrity": "sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=", + "integrity": "sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==", "dev": true }, "strip-json-comments": { @@ -5835,7 +5859,7 @@ "tdigest": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/tdigest/-/tdigest-0.1.1.tgz", - "integrity": "sha1-Ljyyw56kSeVdHmzZEReszKRYgCE=", + "integrity": "sha512-CXcDY/NIgIbKZPx5H4JJNpq6JwJhU5Z4+yWj4ZghDc7/9nVajiRlPPyMXRePPPlBfcayUqtoCXjo7/Hm82ecUA==", "requires": { "bintrees": "1.0.1" } @@ -6017,7 +6041,7 @@ "typedarray": { "version": "0.0.6", "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz", - "integrity": "sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c=" + "integrity": "sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA==" }, "typedarray-to-buffer": { "version": "3.1.5", @@ -6036,7 +6060,7 @@ "underscore": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz", - "integrity": "sha1-izixDKze9jM3uLJOT/htRa6lKag=" + "integrity": "sha512-z4o1fvKUojIWh9XuaVLUDdf86RQiq13AC1dmHbTpoyuu+bquHms76v16CjycCbec87J7z0k//SiQVk0sMdFmpQ==" }, "unique-string": { "version": "2.0.0", @@ -6069,7 +6093,7 @@ "url": { "version": "0.10.3", "resolved": "https://registry.npmjs.org/url/-/url-0.10.3.tgz", - "integrity": "sha1-Ah5NnHcF8hu/N9A861h2dAJ3TGQ=", + "integrity": "sha512-hzSUW2q06EqL1gKM/a+obYHLIO6ct2hwPuviqTTOcfFVc61UbfJ2Q32+uGL/HCPxKqrdGB5QUwIe7UqlDgwsOQ==", "requires": { "punycode": "1.3.2", "querystring": "0.2.0" @@ -6222,13 +6246,13 @@ "ansi-regex": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", - "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", + "integrity": "sha512-wFUFA5bg5dviipbQQ32yOQhl6gcJaJXiHE7dvR8VYPG97+J/GNC5FKGepKdEDUFeXRzDxPF1X/Btc8L+v7oqIQ==", "dev": true }, "is-fullwidth-code-point": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "integrity": "sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w==", "dev": true }, "string-width": { @@ -6244,7 +6268,7 @@ "strip-ansi": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", - "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "integrity": "sha512-4XaJ2zQdCzROZDivEVIDPkcQn8LMFSa8kj8Gxb/Lnwzv9A8VctNZ+lfivC/sV3ivW8ElJTERXZoPBRrZKkNKow==", "dev": true, "requires": { "ansi-regex": "^3.0.0" @@ -6336,7 +6360,7 @@ "xmlbuilder": { "version": "9.0.7", "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-9.0.7.tgz", - "integrity": "sha1-Ey7mPS7FVlxVfiD0wi35rKaGsQ0=" + "integrity": "sha512-7YXTQc3P2l9+0rjaUbLwMKRhtmwg1M1eDf6nag7urC7pIPYLD9W/jmzQ4ptRSUbodw5S0jfoGTflLemQibSpeQ==" }, "xtend": { "version": "4.0.2", diff --git a/services/filestore/package.json b/services/filestore/package.json index 6da33d37ee..9ecb28617b 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -25,6 +25,7 @@ "aws-sdk": "^2.648.0", "body-parser": "^1.19.0", "express": "^4.17.1", + "fast-crc32c": "^2.0.0", "glob": "^7.1.6", "lodash.once": "^4.1.1", "logger-sharelatex": "^1.9.1", From 3637cd70aed3c2a17ea01b030c2516ebba09c4f5 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 9 Apr 2020 17:11:19 +0100 Subject: [PATCH 505/555] Add support for redirecting to signed URLs --- services/filestore/app/js/FSPersistor.js | 7 +++ services/filestore/app/js/FileController.js | 49 +++++++++------ services/filestore/app/js/FileHandler.js | 20 +++++++ services/filestore/app/js/GcsPersistor.js | 22 +++++++ .../filestore/app/js/MigrationPersistor.js | 2 + services/filestore/app/js/S3Persistor.js | 7 +++ .../filestore/config/settings.defaults.coffee | 3 + .../test/unit/js/FileControllerTests.js | 48 ++++++++++++++- .../test/unit/js/FileHandlerTests.js | 60 +++++++++++++++++++ .../test/unit/js/GcsPersistorTests.js | 20 ++++++- 10 files changed, 217 insertions(+), 21 deletions(-) diff --git a/services/filestore/app/js/FSPersistor.js b/services/filestore/app/js/FSPersistor.js index 4e514e3350..2e3b4985cb 100644 --- a/services/filestore/app/js/FSPersistor.js +++ b/services/filestore/app/js/FSPersistor.js @@ -74,6 +74,11 @@ async function getFileStream(location, name, opts) { return fs.createReadStream(null, opts) } +async function getRedirectUrl() { + // not implemented + return null +} + async function getFileSize(location, filename) { const fullPath = path.join(location, filterName(filename)) @@ -211,6 +216,7 @@ module.exports = { sendFile: callbackify(sendFile), sendStream: callbackify(sendStream), getFileStream: callbackify(getFileStream), + getRedirectUrl: callbackify(getRedirectUrl), getFileSize: callbackify(getFileSize), getFileMd5Hash: callbackify(getFileMd5Hash), copyFile: callbackify(copyFile), @@ -222,6 +228,7 @@ module.exports = { sendFile, sendStream, getFileStream, + getRedirectUrl, getFileSize, getFileMd5Hash, copyFile, diff --git a/services/filestore/app/js/FileController.js b/services/filestore/app/js/FileController.js index e39afd67bb..d72b4c841c 100644 --- a/services/filestore/app/js/FileController.js +++ b/services/filestore/app/js/FileController.js @@ -46,31 +46,42 @@ function getFile(req, res, next) { } } - FileHandler.getFile(bucket, key, options, function(err, fileStream) { + FileHandler.getRedirectUrl(bucket, key, options, function(err, redirectUrl) { if (err) { - if (err instanceof Errors.NotFoundError) { - res.sendStatus(404) - } else { - next(err) - } - return + metrics.inc('file_redirect_error') } - if (req.query.cacheWarm) { - return res.sendStatus(200).end() + if (redirectUrl) { + metrics.inc('file_redirect') + return res.redirect(redirectUrl) } - pipeline(fileStream, res, err => { - if (err && err.code === 'ERR_STREAM_PREMATURE_CLOSE') { - res.end() - } else if (err) { - next( - new Errors.ReadError({ - message: 'error transferring stream', - info: { bucket, key, format, style } - }).withCause(err) - ) + FileHandler.getFile(bucket, key, options, function(err, fileStream) { + if (err) { + if (err instanceof Errors.NotFoundError) { + res.sendStatus(404) + } else { + next(err) + } + return } + + if (req.query.cacheWarm) { + return res.sendStatus(200).end() + } + + pipeline(fileStream, res, err => { + if (err && err.code === 'ERR_STREAM_PREMATURE_CLOSE') { + res.end() + } else if (err) { + next( + new Errors.ReadError({ + message: 'error transferring stream', + info: { bucket, key, format, style } + }).withCause(err) + ) + } + }) }) }) } diff --git a/services/filestore/app/js/FileHandler.js b/services/filestore/app/js/FileHandler.js index 056206b0fe..b76bdff7a2 100644 --- a/services/filestore/app/js/FileHandler.js +++ b/services/filestore/app/js/FileHandler.js @@ -13,10 +13,12 @@ module.exports = { deleteFile: callbackify(deleteFile), deleteProject: callbackify(deleteProject), getFile: callbackify(getFile), + getRedirectUrl: callbackify(getRedirectUrl), getFileSize: callbackify(getFileSize), getDirectorySize: callbackify(getDirectorySize), promises: { getFile, + getRedirectUrl, insertFile, deleteFile, deleteProject, @@ -73,6 +75,24 @@ async function getFile(bucket, key, opts) { } } +async function getRedirectUrl(bucket, key, opts) { + // if we're doing anything unusual with options, or the request isn't for + // one of the default buckets, return null so that we proxy the file + opts = opts || {} + if ( + !opts.start && + !opts.end && + !opts.format && + !opts.style && + Object.values(Settings.filestore.stores).includes(bucket) && + Settings.filestore.allowRedirects + ) { + return PersistorManager.promises.getRedirectUrl(bucket, key) + } + + return null +} + async function getFileSize(bucket, key) { return PersistorManager.promises.getFileSize(bucket, key) } diff --git a/services/filestore/app/js/GcsPersistor.js b/services/filestore/app/js/GcsPersistor.js index 99a8c1a513..c3bf81f45d 100644 --- a/services/filestore/app/js/GcsPersistor.js +++ b/services/filestore/app/js/GcsPersistor.js @@ -36,6 +36,7 @@ const GcsPersistor = { sendFile: callbackify(sendFile), sendStream: callbackify(sendStream), getFileStream: callbackify(getFileStream), + getRedirectUrl: callbackify(getRedirectUrl), getFileMd5Hash: callbackify(getFileMd5Hash), deleteDirectory: callbackify(deleteDirectory), getFileSize: callbackify(getFileSize), @@ -47,6 +48,7 @@ const GcsPersistor = { sendFile, sendStream, getFileStream, + getRedirectUrl, getFileMd5Hash, deleteDirectory, getFileSize, @@ -141,6 +143,26 @@ async function getFileStream(bucketName, key, _opts = {}) { } } +async function getRedirectUrl(bucketName, key) { + try { + const [url] = await storage + .bucket(bucketName) + .file(key) + .getSignedUrl({ + action: 'read', + expires: new Date().getTime() + settings.filestore.signedUrlExpiryInMs + }) + return url + } catch (err) { + throw PersistorHelper.wrapError( + err, + 'error generating signed url for GCS file', + { bucketName, key }, + ReadError + ) + } +} + async function getFileSize(bucketName, key) { try { const [metadata] = await storage diff --git a/services/filestore/app/js/MigrationPersistor.js b/services/filestore/app/js/MigrationPersistor.js index d25ea84ce4..347dd58726 100644 --- a/services/filestore/app/js/MigrationPersistor.js +++ b/services/filestore/app/js/MigrationPersistor.js @@ -203,6 +203,7 @@ module.exports = function(primary, fallback) { sendFile: primary.sendFile, sendStream: primary.sendStream, getFileStream: callbackify(getFileStreamWithFallback), + getRedirectUrl: primary.getRedirectUrl, getFileMd5Hash: callbackify(_wrapFallbackMethod('getFileMd5Hash')), deleteDirectory: callbackify( _wrapMethodOnBothPersistors('deleteDirectory') @@ -216,6 +217,7 @@ module.exports = function(primary, fallback) { sendFile: primary.promises.sendFile, sendStream: primary.promises.sendStream, getFileStream: getFileStreamWithFallback, + getRedirectUrl: primary.promises.getRedirectUrl, getFileMd5Hash: _wrapFallbackMethod('getFileMd5Hash'), deleteDirectory: _wrapMethodOnBothPersistors('deleteDirectory'), getFileSize: _wrapFallbackMethod('getFileSize'), diff --git a/services/filestore/app/js/S3Persistor.js b/services/filestore/app/js/S3Persistor.js index 8216c5f7cb..7e9a66a0ab 100644 --- a/services/filestore/app/js/S3Persistor.js +++ b/services/filestore/app/js/S3Persistor.js @@ -24,6 +24,7 @@ const S3Persistor = { sendFile: callbackify(sendFile), sendStream: callbackify(sendStream), getFileStream: callbackify(getFileStream), + getRedirectUrl: callbackify(getRedirectUrl), getFileMd5Hash: callbackify(getFileMd5Hash), deleteDirectory: callbackify(deleteDirectory), getFileSize: callbackify(getFileSize), @@ -35,6 +36,7 @@ const S3Persistor = { sendFile, sendStream, getFileStream, + getRedirectUrl, getFileMd5Hash, deleteDirectory, getFileSize, @@ -146,6 +148,11 @@ async function getFileStream(bucketName, key, opts) { } } +async function getRedirectUrl() { + // not implemented + return null +} + async function deleteDirectory(bucketName, key) { let response diff --git a/services/filestore/config/settings.defaults.coffee b/services/filestore/config/settings.defaults.coffee index 6867945d10..272230f918 100644 --- a/services/filestore/config/settings.defaults.coffee +++ b/services/filestore/config/settings.defaults.coffee @@ -70,6 +70,9 @@ settings = buckets: JSON.parse(process.env['FALLBACK_BUCKET_MAPPING'] || '{}') copyOnMiss: process.env['COPY_ON_MISS'] == 'true' + allowRedirects: if process.env['ALLOW_REDIRECTS'] == 'true' then true else false + signedUrlExpiryInMs: parseInt(process.env['LINK_EXPIRY_TIMEOUT'] || 60000) + path: uploadFolder: Path.resolve(__dirname + "/../uploads") diff --git a/services/filestore/test/unit/js/FileControllerTests.js b/services/filestore/test/unit/js/FileControllerTests.js index 16fbb3641c..cd1d19ef02 100644 --- a/services/filestore/test/unit/js/FileControllerTests.js +++ b/services/filestore/test/unit/js/FileControllerTests.js @@ -42,7 +42,8 @@ describe('FileController', function() { deleteFile: sinon.stub().yields(), deleteProject: sinon.stub().yields(), insertFile: sinon.stub().yields(), - getDirectorySize: sinon.stub().yields(null, fileSize) + getDirectorySize: sinon.stub().yields(null, fileSize), + getRedirectUrl: sinon.stub().yields(null, null) } LocalFileWriter = {} @@ -91,6 +92,11 @@ describe('FileController', function() { }) describe('getFile', function() { + it('should try and get a redirect url first', function() { + FileController.getFile(req, res, next) + expect(FileHandler.getRedirectUrl).to.have.been.calledWith(bucket, key) + }) + it('should pipe the stream', function() { FileController.getFile(req, res, next) expect(stream.pipeline).to.have.been.calledWith(fileStream, res) @@ -111,6 +117,46 @@ describe('FileController', function() { expect(next).to.have.been.calledWith(error) }) + describe('with a redirect url', function() { + const redirectUrl = 'https://wombat.potato/giraffe' + + beforeEach(function() { + FileHandler.getRedirectUrl.yields(null, redirectUrl) + res.redirect = sinon.stub() + }) + + it('should redirect', function() { + FileController.getFile(req, res, next) + expect(res.redirect).to.have.been.calledWith(redirectUrl) + }) + + it('should not get a file stream', function() { + FileController.getFile(req, res, next) + expect(FileHandler.getFile).not.to.have.been.called + }) + + describe('when there is an error getting the redirect url', function() { + beforeEach(function() { + FileHandler.getRedirectUrl.yields(new Error('wombat herding error')) + }) + + it('should not redirect', function() { + FileController.getFile(req, res, next) + expect(res.redirect).not.to.have.been.called + }) + + it('should not return an error', function() { + FileController.getFile(req, res, next) + expect(next).not.to.have.been.called + }) + + it('should proxy the file', function() { + FileController.getFile(req, res, next) + expect(FileHandler.getFile).to.have.been.calledWith(bucket, key) + }) + }) + }) + describe('with a range header', function() { let expectedOptions diff --git a/services/filestore/test/unit/js/FileHandlerTests.js b/services/filestore/test/unit/js/FileHandlerTests.js index 60ee2553c3..f6a0c02fa4 100644 --- a/services/filestore/test/unit/js/FileHandlerTests.js +++ b/services/filestore/test/unit/js/FileHandlerTests.js @@ -24,6 +24,7 @@ describe('FileHandler', function() { const projectKey = `${ObjectId()}/` const sourceStream = 'sourceStream' const convertedKey = 'convertedKey' + const redirectUrl = 'https://wombat.potato/giraffe' const readStream = { stream: 'readStream', on: sinon.stub() @@ -33,6 +34,7 @@ describe('FileHandler', function() { PersistorManager = { promises: { getFileStream: sinon.stub().resolves(sourceStream), + getRedirectUrl: sinon.stub().resolves(redirectUrl), checkIfFileExists: sinon.stub().resolves(), deleteFile: sinon.stub().resolves(), deleteDirectory: sinon.stub().resolves(), @@ -299,6 +301,64 @@ describe('FileHandler', function() { }) }) + describe('getRedirectUrl', function() { + beforeEach(function() { + Settings.filestore = { + allowRedirects: true, + stores: { + userFiles: bucket + } + } + }) + + it('should return a redirect url', function(done) { + FileHandler.getRedirectUrl(bucket, key, (err, url) => { + expect(err).not.to.exist + expect(url).to.equal(redirectUrl) + done() + }) + }) + + it('should call the persistor to get a redirect url', function(done) { + FileHandler.getRedirectUrl(bucket, key, () => { + expect( + PersistorManager.promises.getRedirectUrl + ).to.have.been.calledWith(bucket, key) + done() + }) + }) + + it('should return null if options are supplied', function(done) { + FileHandler.getRedirectUrl( + bucket, + key, + { start: 100, end: 200 }, + (err, url) => { + expect(err).not.to.exist + expect(url).to.be.null + done() + } + ) + }) + + it('should return null if the bucket is not one of the defined ones', function(done) { + FileHandler.getRedirectUrl('a_different_bucket', key, (err, url) => { + expect(err).not.to.exist + expect(url).to.be.null + done() + }) + }) + + it('should return null if redirects are not enabled', function(done) { + Settings.filestore.allowRedirects = false + FileHandler.getRedirectUrl(bucket, key, (err, url) => { + expect(err).not.to.exist + expect(url).to.be.null + done() + }) + }) + }) + describe('getDirectorySize', function() { it('should call the filestore manager to get directory size', function(done) { FileHandler.getDirectorySize(bucket, key, err => { diff --git a/services/filestore/test/unit/js/GcsPersistorTests.js b/services/filestore/test/unit/js/GcsPersistorTests.js index 3c386e5002..07d132fcde 100644 --- a/services/filestore/test/unit/js/GcsPersistorTests.js +++ b/services/filestore/test/unit/js/GcsPersistorTests.js @@ -17,6 +17,7 @@ describe('GcsPersistorTests', function() { const filesSize = 33 const md5 = 'ffffffff00000000ffffffff00000000' const WriteStream = 'writeStream' + const redirectUrl = 'https://wombat.potato/giraffe' let Metrics, Logger, @@ -97,7 +98,8 @@ describe('GcsPersistorTests', function() { getMetadata: sinon.stub().resolves([files[0].metadata]), createWriteStream: sinon.stub().returns(WriteStream), copy: sinon.stub().resolves(), - exists: sinon.stub().resolves([true]) + exists: sinon.stub().resolves([true]), + getSignedUrl: sinon.stub().resolves([redirectUrl]) } GcsBucket = { @@ -260,6 +262,22 @@ describe('GcsPersistorTests', function() { }) }) + describe('getFile', function() { + let signedUrl + + beforeEach(async function() { + signedUrl = await GcsPersistor.promises.getRedirectUrl(bucket, key) + }) + + it('should request a signed URL', function() { + expect(GcsFile.getSignedUrl).to.have.been.called + }) + + it('should return the url', function() { + expect(signedUrl).to.equal(redirectUrl) + }) + }) + describe('getFileSize', function() { describe('when called with valid parameters', function() { let size From e014aeb10088ca1fe474bd1637a17f113da878e4 Mon Sep 17 00:00:00 2001 From: Miguel Serrano Date: Tue, 26 May 2020 17:21:21 +0200 Subject: [PATCH 506/555] Fixed regular expression to insert/delete files not supporting templates api --- services/filestore/app/js/FileHandler.js | 4 ++-- .../test/unit/js/FileHandlerTests.js | 20 +++++++++++++++++++ 2 files changed, 22 insertions(+), 2 deletions(-) diff --git a/services/filestore/app/js/FileHandler.js b/services/filestore/app/js/FileHandler.js index b76bdff7a2..e5933ffca7 100644 --- a/services/filestore/app/js/FileHandler.js +++ b/services/filestore/app/js/FileHandler.js @@ -29,7 +29,7 @@ module.exports = { async function insertFile(bucket, key, stream) { const convertedKey = KeyBuilder.getConvertedFolderKey(key) - if (!convertedKey.match(/^[0-9a-f]{24}\/[0-9a-f]{24}/i)) { + if (!convertedKey.match(/^[0-9a-f]{24}\/([0-9a-f]{24}|v\/[0-9]+\/[a-z]+)/i)) { throw new InvalidParametersError({ message: 'key does not match validation regex', info: { bucket, key, convertedKey } @@ -43,7 +43,7 @@ async function insertFile(bucket, key, stream) { async function deleteFile(bucket, key) { const convertedKey = KeyBuilder.getConvertedFolderKey(key) - if (!convertedKey.match(/^[0-9a-f]{24}\/[0-9a-f]{24}/i)) { + if (!convertedKey.match(/^[0-9a-f]{24}\/([0-9a-f]{24}|v\/[0-9]+\/[a-z]+)/i)) { throw new InvalidParametersError({ message: 'key does not match validation regex', info: { bucket, key, convertedKey } diff --git a/services/filestore/test/unit/js/FileHandlerTests.js b/services/filestore/test/unit/js/FileHandlerTests.js index f6a0c02fa4..22fe8d1503 100644 --- a/services/filestore/test/unit/js/FileHandlerTests.js +++ b/services/filestore/test/unit/js/FileHandlerTests.js @@ -111,6 +111,16 @@ describe('FileHandler', function() { }) }) + it('should accept templates-api key format', function(done) { + KeyBuilder.getConvertedFolderKey.returns( + '5ecba29f1a294e007d0bccb4/v/0/pdf' + ) + FileHandler.insertFile(bucket, key, stream, err => { + expect(err).not.to.exist + done() + }) + }) + it('should throw an error when the key is in the wrong format', function(done) { KeyBuilder.getConvertedFolderKey.returns('wombat') FileHandler.insertFile(bucket, key, stream, err => { @@ -157,6 +167,16 @@ describe('FileHandler', function() { }) }) + it('should accept templates-api key format', function(done) { + KeyBuilder.getConvertedFolderKey.returns( + '5ecba29f1a294e007d0bccb4/v/0/pdf' + ) + FileHandler.deleteFile(bucket, key, err => { + expect(err).not.to.exist + done() + }) + }) + it('should throw an error when the key is in the wrong format', function(done) { KeyBuilder.getConvertedFolderKey.returns('wombat') FileHandler.deleteFile(bucket, key, err => { From 37e3531cc932fd4e36c76a17f873aa8df2e7a993 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Wed, 3 Jun 2020 09:59:54 +0100 Subject: [PATCH 507/555] Upgrade Node to 12.18.0 --- services/filestore/.eslintrc | 1 - services/filestore/.nvmrc | 2 +- services/filestore/.prettierrc | 1 - services/filestore/Dockerfile | 5 ++- services/filestore/Makefile | 43 +++++++++++++++++++----- services/filestore/buildscript.txt | 6 ++-- services/filestore/docker-compose.ci.yml | 1 - services/filestore/docker-compose.yml | 1 - services/filestore/package.json | 8 ++--- 9 files changed, 44 insertions(+), 24 deletions(-) diff --git a/services/filestore/.eslintrc b/services/filestore/.eslintrc index 7cc2ffbf8a..2e945d6ffb 100644 --- a/services/filestore/.eslintrc +++ b/services/filestore/.eslintrc @@ -1,7 +1,6 @@ // this file was auto-generated, do not edit it directly. // instead run bin/update_build_scripts from // https://github.com/sharelatex/sharelatex-dev-environment -// Version: 1.3.6 { "extends": [ "standard", diff --git a/services/filestore/.nvmrc b/services/filestore/.nvmrc index 66df3b7ab2..b06cd07c44 100644 --- a/services/filestore/.nvmrc +++ b/services/filestore/.nvmrc @@ -1 +1 @@ -12.16.1 +12.18.0 diff --git a/services/filestore/.prettierrc b/services/filestore/.prettierrc index b5f22cc658..24f9ec526f 100644 --- a/services/filestore/.prettierrc +++ b/services/filestore/.prettierrc @@ -1,7 +1,6 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.3.6 { "semi": false, "singleQuote": true diff --git a/services/filestore/Dockerfile b/services/filestore/Dockerfile index a652abb47c..0004f0efc7 100644 --- a/services/filestore/Dockerfile +++ b/services/filestore/Dockerfile @@ -1,9 +1,8 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.3.6 -FROM node:12.16.1 as base +FROM node:12.18.0 as base WORKDIR /app COPY install_deps.sh /app @@ -14,7 +13,7 @@ FROM base as app #wildcard as some files may not be in all repos COPY package*.json npm-shrink*.json /app/ -RUN npm install --quiet +RUN npm ci --quiet COPY . /app diff --git a/services/filestore/Makefile b/services/filestore/Makefile index ec1324e9a8..87c33e1bbd 100644 --- a/services/filestore/Makefile +++ b/services/filestore/Makefile @@ -1,11 +1,12 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.3.6 BUILD_NUMBER ?= local BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) PROJECT_NAME = filestore +BUILD_DIR_NAME = $(shell pwd | xargs basename | tr -cd '[a-zA-Z0-9_.\-]') + DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \ BRANCH_NAME=$(BRANCH_NAME) \ @@ -13,6 +14,12 @@ DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \ MOCHA_GREP=${MOCHA_GREP} \ docker-compose ${DOCKER_COMPOSE_FLAGS} +DOCKER_COMPOSE_TEST_ACCEPTANCE = \ + COMPOSE_PROJECT_NAME=test_acceptance_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) + +DOCKER_COMPOSE_TEST_UNIT = \ + COMPOSE_PROJECT_NAME=test_unit_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) + clean: docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) @@ -29,23 +36,41 @@ lint: test: format lint test_unit test_acceptance test_unit: - @[ ! -d test/unit ] && echo "filestore has no unit tests" || $(DOCKER_COMPOSE) run --rm test_unit +ifneq (,$(wildcard test/unit)) + $(DOCKER_COMPOSE_TEST_UNIT) run --rm test_unit + $(MAKE) test_unit_clean +endif -test_acceptance: test_clean test_acceptance_pre_run test_acceptance_run +test_clean: test_unit_clean +test_unit_clean: +ifneq (,$(wildcard test/unit)) + $(DOCKER_COMPOSE_TEST_UNIT) down -v -t 0 +endif -test_acceptance_debug: test_clean test_acceptance_pre_run test_acceptance_run_debug +test_acceptance: test_acceptance_clean test_acceptance_pre_run test_acceptance_run + $(MAKE) test_acceptance_clean + +test_acceptance_debug: test_acceptance_clean test_acceptance_pre_run test_acceptance_run_debug + $(MAKE) test_acceptance_clean test_acceptance_run: - @[ ! -d test/acceptance ] && echo "filestore has no acceptance tests" || $(DOCKER_COMPOSE) run --rm test_acceptance +ifneq (,$(wildcard test/acceptance)) + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance +endif test_acceptance_run_debug: - @[ ! -d test/acceptance ] && echo "filestore has no acceptance tests" || $(DOCKER_COMPOSE) run -p 127.0.0.9:19999:19999 --rm test_acceptance npm run test:acceptance -- --inspect=0.0.0.0:19999 --inspect-brk +ifneq (,$(wildcard test/acceptance)) + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run -p 127.0.0.9:19999:19999 --rm test_acceptance npm run test:acceptance -- --inspect=0.0.0.0:19999 --inspect-brk +endif -test_clean: - $(DOCKER_COMPOSE) down -v -t 0 +test_clean: test_acceptance_clean +test_acceptance_clean: + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) down -v -t 0 test_acceptance_pre_run: - @[ ! -f test/acceptance/js/scripts/pre-run ] && echo "filestore has no pre acceptance tests task" || $(DOCKER_COMPOSE) run --rm test_acceptance test/acceptance/js/scripts/pre-run +ifneq (,$(wildcard test/acceptance/js/scripts/pre-run)) + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance test/acceptance/js/scripts/pre-run +endif build: docker build --pull --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ diff --git a/services/filestore/buildscript.txt b/services/filestore/buildscript.txt index ae58ad6c18..5938593a5d 100644 --- a/services/filestore/buildscript.txt +++ b/services/filestore/buildscript.txt @@ -1,11 +1,11 @@ filestore --acceptance-creds= --data-dirs=uploads,user_files,template_files ---dependencies=s3 +--dependencies=s3,gcs --docker-repos=gcr.io/overleaf-ops --env-add=ENABLE_CONVERSIONS="true",USE_PROM_METRICS="true",AWS_S3_USER_FILES_BUCKET_NAME=fake_user_files,AWS_S3_TEMPLATE_FILES_BUCKET_NAME=fake_template_files,AWS_S3_PUBLIC_FILES_BUCKET_NAME=fake_public_files --env-pass-through= --language=es ---node-version=12.16.1 +--node-version=12.18.0 --public-repo=True ---script-version=1.3.6 +--script-version=2.2.0 diff --git a/services/filestore/docker-compose.ci.yml b/services/filestore/docker-compose.ci.yml index d3accee799..e6d4ddcc76 100644 --- a/services/filestore/docker-compose.ci.yml +++ b/services/filestore/docker-compose.ci.yml @@ -1,7 +1,6 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.3.6 version: "2.3" diff --git a/services/filestore/docker-compose.yml b/services/filestore/docker-compose.yml index 54ef9c00c9..a0093ddb8e 100644 --- a/services/filestore/docker-compose.yml +++ b/services/filestore/docker-compose.yml @@ -1,7 +1,6 @@ # This file was auto-generated, do not edit it directly. # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -# Version: 1.3.6 version: "2.3" diff --git a/services/filestore/package.json b/services/filestore/package.json index 9ecb28617b..8c06e83aac 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -45,13 +45,13 @@ "chai-as-promised": "^7.1.1", "disrequire": "^1.1.0", "eslint": "^6.8.0", - "eslint-config-prettier": "^6.10.1", - "eslint-config-standard": "^14.1.1", + "eslint-config-prettier": "^6.10.0", + "eslint-config-standard": "^14.1.0", "eslint-plugin-chai-expect": "^2.1.0", "eslint-plugin-chai-friendly": "^0.5.0", - "eslint-plugin-import": "^2.20.2", + "eslint-plugin-import": "^2.20.1", "eslint-plugin-mocha": "^6.3.0", - "eslint-plugin-node": "^11.1.0", + "eslint-plugin-node": "^11.0.0", "eslint-plugin-prettier": "^3.1.2", "eslint-plugin-promise": "^4.2.1", "eslint-plugin-standard": "^4.0.1", From 06e7c6f22a839544e01d6e7d4a3af06de3518204 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Wed, 3 Jun 2020 10:08:45 +0100 Subject: [PATCH 508/555] npm audit fix --- services/filestore/package-lock.json | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index 194a96382b..835e106f18 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -1168,9 +1168,9 @@ } }, "acorn": { - "version": "6.4.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.4.0.tgz", - "integrity": "sha512-gac8OEcQ2Li1dxIEWGZzsp2BitJxwkwcOm0zHAJLcPJaVvm58FRnk6RkuLRpU1EujipU2ZFODv2P9DLMfnV8mw==" + "version": "6.4.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.4.1.tgz", + "integrity": "sha512-ZVA9k326Nwrj3Cj9jlh3wGFutC2ZornPNARZwsNYqQYgN0EsV2d53w5RN/co65Ohn4sUAUtb1rSUAOD6XN9idA==" }, "acorn-jsx": { "version": "5.2.0", @@ -3732,16 +3732,16 @@ } }, "minimist": { - "version": "0.0.8", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", - "integrity": "sha512-miQKw5Hv4NS1Psg2517mV4e4dYNaO3++hjAvLOAzKqZ61rH8NS1SK+vbfBWZ5PY/Me/bEWhUwqMghEW5Fb9T7Q==" + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", + "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==" }, "mkdirp": { - "version": "0.5.1", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", - "integrity": "sha512-SknJC52obPfGQPnjIkXbmA6+5H15E+fR+E4iR2oQ3zzCLbd7/ONua69R/Gw7AgkTLsRG+r5fzksYwWe1AgTyWA==", + "version": "0.5.5", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", + "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==", "requires": { - "minimist": "0.0.8" + "minimist": "^1.2.5" } }, "mocha": { @@ -6455,9 +6455,9 @@ } }, "yargs-parser": { - "version": "13.1.1", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-13.1.1.tgz", - "integrity": "sha512-oVAVsHz6uFrg3XQheFII8ESO2ssAf9luWuAd6Wexsu4F3OtIW0o8IribPXYrD4WC24LWtPrJlGy87y5udK+dxQ==", + "version": "13.1.2", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-13.1.2.tgz", + "integrity": "sha512-3lbsNRf/j+A4QuSZfDRA7HRSfWrzO0YjqTJd5kjAq37Zep1CEgaYmrH9Q3GwPiB9cHyd1Y1UwggGhJGoxipbzg==", "dev": true, "requires": { "camelcase": "^5.0.0", From b37f97e2e3dfa7e0d452719a909bac4d65c38675 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Wed, 3 Jun 2020 10:52:10 +0100 Subject: [PATCH 509/555] Pin fake-gcs to 1.18 --- services/filestore/test/acceptance/deps/Dockerfile.fake-gcs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/test/acceptance/deps/Dockerfile.fake-gcs b/services/filestore/test/acceptance/deps/Dockerfile.fake-gcs index 6acb2d63b4..4bbd1153ad 100644 --- a/services/filestore/test/acceptance/deps/Dockerfile.fake-gcs +++ b/services/filestore/test/acceptance/deps/Dockerfile.fake-gcs @@ -1,4 +1,4 @@ -FROM fsouza/fake-gcs-server +FROM fsouza/fake-gcs-server:1.18.4 RUN apk add --update --no-cache curl COPY healthcheck.sh /healthcheck.sh HEALTHCHECK --interval=1s --timeout=1s --retries=30 CMD /healthcheck.sh http://localhost:9090 From 0027d274739c7e5500f6dbae66ccc0f93bae345f Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Tue, 9 Jun 2020 10:01:20 +0100 Subject: [PATCH 510/555] Use latest version of gcs server for tests --- services/filestore/test/acceptance/deps/Dockerfile.fake-gcs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/test/acceptance/deps/Dockerfile.fake-gcs b/services/filestore/test/acceptance/deps/Dockerfile.fake-gcs index 4bbd1153ad..0e6de7e735 100644 --- a/services/filestore/test/acceptance/deps/Dockerfile.fake-gcs +++ b/services/filestore/test/acceptance/deps/Dockerfile.fake-gcs @@ -1,4 +1,4 @@ -FROM fsouza/fake-gcs-server:1.18.4 +FROM fsouza/fake-gcs-server:latest RUN apk add --update --no-cache curl COPY healthcheck.sh /healthcheck.sh HEALTHCHECK --interval=1s --timeout=1s --retries=30 CMD /healthcheck.sh http://localhost:9090 From 5d9f78322d2fc827be5ce1b4a46f2cffa16cba00 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Tue, 9 Jun 2020 10:01:47 +0100 Subject: [PATCH 511/555] Don't increment range end when passing to GCS backend --- services/filestore/app/js/GcsPersistor.js | 4 ---- 1 file changed, 4 deletions(-) diff --git a/services/filestore/app/js/GcsPersistor.js b/services/filestore/app/js/GcsPersistor.js index c3bf81f45d..05425abee5 100644 --- a/services/filestore/app/js/GcsPersistor.js +++ b/services/filestore/app/js/GcsPersistor.js @@ -115,10 +115,6 @@ async function sendStream(bucketName, key, readStream, sourceMd5) { async function getFileStream(bucketName, key, _opts = {}) { const opts = Object.assign({}, _opts) - if (opts.end) { - // S3 (and http range headers) treat 'end' as inclusive, so increase this by 1 - opts.end++ - } const stream = storage .bucket(bucketName) .file(key) From 7b6299e05a60365a24727e0bc37098118babb153 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Tue, 9 Jun 2020 13:55:03 +0100 Subject: [PATCH 512/555] Update GcsPersistor range unit test --- services/filestore/test/unit/js/GcsPersistorTests.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/test/unit/js/GcsPersistorTests.js b/services/filestore/test/unit/js/GcsPersistorTests.js index 07d132fcde..68db78d8a8 100644 --- a/services/filestore/test/unit/js/GcsPersistorTests.js +++ b/services/filestore/test/unit/js/GcsPersistorTests.js @@ -196,7 +196,7 @@ describe('GcsPersistorTests', function() { it('passes the byte range on to GCS', function() { expect(GcsFile.createReadStream).to.have.been.calledWith({ start: 5, - end: 11 // we increment the end because Google's 'end' is exclusive + end: 10 }) }) }) From 0e4be81406d05d7afe5764d471d306f30525b087 Mon Sep 17 00:00:00 2001 From: Ersun Warncke Date: Thu, 30 Apr 2020 08:20:40 -0400 Subject: [PATCH 513/555] upgrade to o-error v3 --- services/filestore/app/js/Errors.js | 44 +++++---------- services/filestore/app/js/FSPersistor.js | 14 ++--- services/filestore/app/js/FileController.js | 8 +-- services/filestore/app/js/FileConverter.js | 13 ++--- services/filestore/app/js/FileHandler.js | 53 ++++++++++--------- services/filestore/app/js/LocalFileWriter.js | 11 ++-- .../filestore/app/js/MigrationPersistor.js | 16 +++--- services/filestore/app/js/PersistorHelper.js | 23 +++----- services/filestore/app/js/S3Persistor.js | 8 +-- services/filestore/app/js/SafeExec.js | 11 ++-- services/filestore/package-lock.json | 10 ++-- services/filestore/package.json | 2 +- .../test/unit/js/GcsPersistorTests.js | 2 +- 13 files changed, 96 insertions(+), 119 deletions(-) diff --git a/services/filestore/app/js/Errors.js b/services/filestore/app/js/Errors.js index 1beefb79c8..d2ba18c328 100644 --- a/services/filestore/app/js/Errors.js +++ b/services/filestore/app/js/Errors.js @@ -1,40 +1,20 @@ const OError = require('@overleaf/o-error') -// Error class for legacy errors so they inherit OError while staying -// backward-compatible (can be instantiated with string as argument instead -// of object) -class BackwardCompatibleError extends OError { - constructor(messageOrOptions) { - let options - if (typeof messageOrOptions === 'string') { - options = { message: messageOrOptions } - } else if (!messageOrOptions) { - options = {} - } else { - options = messageOrOptions - } - super(options) - } -} - -class NotFoundError extends BackwardCompatibleError {} -class WriteError extends BackwardCompatibleError {} -class ReadError extends BackwardCompatibleError {} -class HealthCheckError extends BackwardCompatibleError {} -class ConversionsDisabledError extends BackwardCompatibleError {} -class ConversionError extends BackwardCompatibleError {} -class SettingsError extends BackwardCompatibleError {} -class TimeoutError extends BackwardCompatibleError {} -class InvalidParametersError extends BackwardCompatibleError {} +class NotFoundError extends OError {} +class WriteError extends OError {} +class ReadError extends OError {} +class HealthCheckError extends OError {} +class ConversionsDisabledError extends OError {} +class ConversionError extends OError {} +class SettingsError extends OError {} +class TimeoutError extends OError {} +class InvalidParametersError extends OError {} class FailedCommandError extends OError { constructor(command, code, stdout, stderr) { - super({ - message: 'command failed with error exit code', - info: { - command, - code - } + super('command failed with error exit code', { + command, + code }) this.stdout = stdout this.stderr = stderr diff --git a/services/filestore/app/js/FSPersistor.js b/services/filestore/app/js/FSPersistor.js index 2e3b4985cb..60ee0f4053 100644 --- a/services/filestore/app/js/FSPersistor.js +++ b/services/filestore/app/js/FSPersistor.js @@ -46,9 +46,11 @@ async function sendStream(location, target, sourceStream, sourceMd5) { const destMd5 = await getFileMd5Hash(location, target) if (sourceMd5 !== destMd5) { await LocalFileWriter.deleteFile(`${location}/${filterName(target)}`) - throw new WriteError({ - message: 'md5 hash mismatch', - info: { sourceMd5, destMd5, location, target } + throw new WriteError('md5 hash mismatch', { + sourceMd5, + destMd5, + location, + target }) } } finally { @@ -100,9 +102,9 @@ async function getFileMd5Hash(location, filename) { try { return await _getFileMd5HashForPath(fullPath) } catch (err) { - throw new ReadError({ - message: 'unable to get md5 hash from file', - info: { location, filename } + throw new ReadError('unable to get md5 hash from file', { + location, + filename }).withCause(err) } } diff --git a/services/filestore/app/js/FileController.js b/services/filestore/app/js/FileController.js index d72b4c841c..9e978c6a8b 100644 --- a/services/filestore/app/js/FileController.js +++ b/services/filestore/app/js/FileController.js @@ -75,9 +75,11 @@ function getFile(req, res, next) { res.end() } else if (err) { next( - new Errors.ReadError({ - message: 'error transferring stream', - info: { bucket, key, format, style } + new Errors.ReadError('error transferring stream', { + bucket, + key, + format, + style }).withCause(err) ) } diff --git a/services/filestore/app/js/FileConverter.js b/services/filestore/app/js/FileConverter.js index 5ef42cc493..aec8e3bb3a 100644 --- a/services/filestore/app/js/FileConverter.js +++ b/services/filestore/app/js/FileConverter.js @@ -69,9 +69,8 @@ async function preview(sourcePath) { async function _convert(sourcePath, requestedFormat, command) { if (!APPROVED_FORMATS.includes(requestedFormat)) { - throw new ConversionError({ - message: 'invalid format requested', - info: { format: requestedFormat } + throw new ConversionError('invalid format requested', { + format: requestedFormat }) } @@ -87,9 +86,11 @@ async function _convert(sourcePath, requestedFormat, command) { timeout: FOURTY_SECONDS }) } catch (err) { - throw new ConversionError({ - message: 'something went wrong converting file', - info: { stderr: err.stderr, sourcePath, requestedFormat, destPath } + throw new ConversionError('something went wrong converting file', { + stderr: err.stderr, + sourcePath, + requestedFormat, + destPath }).withCause(err) } diff --git a/services/filestore/app/js/FileHandler.js b/services/filestore/app/js/FileHandler.js index e5933ffca7..e3e93b33dd 100644 --- a/services/filestore/app/js/FileHandler.js +++ b/services/filestore/app/js/FileHandler.js @@ -30,9 +30,10 @@ module.exports = { async function insertFile(bucket, key, stream) { const convertedKey = KeyBuilder.getConvertedFolderKey(key) if (!convertedKey.match(/^[0-9a-f]{24}\/([0-9a-f]{24}|v\/[0-9]+\/[a-z]+)/i)) { - throw new InvalidParametersError({ - message: 'key does not match validation regex', - info: { bucket, key, convertedKey } + throw new InvalidParametersError('key does not match validation regex', { + bucket, + key, + convertedKey }) } if (Settings.enableConversions) { @@ -44,9 +45,10 @@ async function insertFile(bucket, key, stream) { async function deleteFile(bucket, key) { const convertedKey = KeyBuilder.getConvertedFolderKey(key) if (!convertedKey.match(/^[0-9a-f]{24}\/([0-9a-f]{24}|v\/[0-9]+\/[a-z]+)/i)) { - throw new InvalidParametersError({ - message: 'key does not match validation regex', - info: { bucket, key, convertedKey } + throw new InvalidParametersError('key does not match validation regex', { + bucket, + key, + convertedKey }) } const jobs = [PersistorManager.promises.deleteFile(bucket, key)] @@ -58,9 +60,9 @@ async function deleteFile(bucket, key) { async function deleteProject(bucket, key) { if (!key.match(/^[0-9a-f]{24}\//i)) { - throw new InvalidParametersError({ - message: 'key does not match validation regex', - info: { bucket, key } + throw new InvalidParametersError('key does not match validation regex', { + bucket, + key }) } await PersistorManager.promises.deleteDirectory(bucket, key) @@ -126,9 +128,11 @@ async function _getConvertedFileAndCache(bucket, key, convertedKey, opts) { ) } catch (err) { LocalFileWriter.deleteFile(convertedFsPath, () => {}) - throw new ConversionError({ - message: 'failed to convert file', - info: { opts, bucket, key, convertedKey } + throw new ConversionError('failed to convert file', { + opts, + bucket, + key, + convertedKey }).withCause(err) } // Send back the converted file from the local copy to avoid problems @@ -155,9 +159,10 @@ async function _convertFile(bucket, originalKey, opts) { try { originalFsPath = await _writeFileToDisk(bucket, originalKey, opts) } catch (err) { - throw new ConversionError({ - message: 'unable to write file to disk', - info: { bucket, originalKey, opts } + throw new ConversionError('unable to write file to disk', { + bucket, + originalKey, + opts }).withCause(err) } @@ -169,22 +174,20 @@ async function _convertFile(bucket, originalKey, opts) { } else if (opts.style === 'preview') { promise = FileConverter.promises.preview(originalFsPath) } else { - throw new ConversionError({ - message: 'invalid file conversion options', - info: { - bucket, - originalKey, - opts - } + throw new ConversionError('invalid file conversion options', { + bucket, + originalKey, + opts }) } let destPath try { destPath = await promise } catch (err) { - throw new ConversionError({ - message: 'error converting file', - info: { bucket, originalKey, opts } + throw new ConversionError('error converting file', { + bucket, + originalKey, + opts }).withCause(err) } LocalFileWriter.deleteFile(originalFsPath, function() {}) diff --git a/services/filestore/app/js/LocalFileWriter.js b/services/filestore/app/js/LocalFileWriter.js index 7af282a558..da5aeb7a3a 100644 --- a/services/filestore/app/js/LocalFileWriter.js +++ b/services/filestore/app/js/LocalFileWriter.js @@ -30,9 +30,9 @@ async function writeStream(stream, key) { } catch (err) { await deleteFile(fsPath) - throw new WriteError({ - message: 'problem writing file locally', - info: { err, fsPath } + throw new WriteError('problem writing file locally', { + err, + fsPath }).withCause(err) } } @@ -45,10 +45,7 @@ async function deleteFile(fsPath) { await promisify(fs.unlink)(fsPath) } catch (err) { if (err.code !== 'ENOENT') { - throw new WriteError({ - message: 'failed to delete file', - info: { fsPath } - }).withCause(err) + throw new WriteError('failed to delete file', { fsPath }).withCause(err) } } } diff --git a/services/filestore/app/js/MigrationPersistor.js b/services/filestore/app/js/MigrationPersistor.js index 347dd58726..2a9fe5d2a0 100644 --- a/services/filestore/app/js/MigrationPersistor.js +++ b/services/filestore/app/js/MigrationPersistor.js @@ -169,27 +169,27 @@ module.exports = function(primary, fallback) { await primary.promises.sendStream(destBucket, destKey, stream, sourceMd5) } catch (err) { - const error = new WriteError({ - message: 'unable to copy file to destination persistor', - info: { + const error = new WriteError( + 'unable to copy file to destination persistor', + { sourceBucket, destBucket, sourceKey, destKey } - }).withCause(err) + ).withCause(err) metrics.inc('fallback.copy.failure') try { await primary.promises.deleteFile(destBucket, destKey) } catch (err) { - error.info.cleanupError = new WriteError({ - message: 'unable to clean up destination copy artifact', - info: { + error.info.cleanupError = new WriteError( + 'unable to clean up destination copy artifact', + { destBucket, destKey } - }).withCause(err) + ).withCause(err) } logger.warn({ error }, 'failed to copy file from fallback') diff --git a/services/filestore/app/js/PersistorHelper.js b/services/filestore/app/js/PersistorHelper.js index 1c2512b690..1a836a2b09 100644 --- a/services/filestore/app/js/PersistorHelper.js +++ b/services/filestore/app/js/PersistorHelper.js @@ -81,14 +81,11 @@ async function verifyMd5(persistor, bucket, key, sourceMd5, destMd5 = null) { logger.warn(err, 'error deleting file for invalid upload') } - throw new WriteError({ - message: 'source and destination hashes do not match', - info: { - sourceMd5, - destMd5, - bucket, - key - } + throw new WriteError('source and destination hashes do not match', { + sourceMd5, + destMd5, + bucket, + key }) } } @@ -165,15 +162,9 @@ function wrapError(error, message, params, ErrorType) { ) || (error.response && error.response.statusCode === 404) ) { - return new NotFoundError({ - message: 'no such file', - info: params - }).withCause(error) + return new NotFoundError('no such file', params).withCause(error) } else { - return new ErrorType({ - message: message, - info: params - }).withCause(error) + return new ErrorType(message, params).withCause(error) } } diff --git a/services/filestore/app/js/S3Persistor.js b/services/filestore/app/js/S3Persistor.js index 7e9a66a0ab..f0df46f10d 100644 --- a/services/filestore/app/js/S3Persistor.js +++ b/services/filestore/app/js/S3Persistor.js @@ -322,10 +322,10 @@ function _getClientForBucket(bucket) { return _defaultClient } - throw new SettingsError({ - message: 'no bucket-specific or default credentials provided', - info: { bucket } - }) + throw new SettingsError( + 'no bucket-specific or default credentials provided', + { bucket } + ) } function _buildClientOptions(bucketCredentials) { diff --git a/services/filestore/app/js/SafeExec.js b/services/filestore/app/js/SafeExec.js index a9d1398441..5ee8e8830b 100644 --- a/services/filestore/app/js/SafeExec.js +++ b/services/filestore/app/js/SafeExec.js @@ -42,13 +42,10 @@ function safeExec(command, options, callback) { process.kill(-child.pid, options.killSignal || 'SIGTERM') } catch (error) { cleanup( - new FailedCommandError({ - message: 'failed to kill process after timeout', - info: { - command, - options, - pid: child.pid - } + new FailedCommandError('failed to kill process after timeout', { + command, + options, + pid: child.pid }) ) } diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index 835e106f18..4753c14cd4 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -903,9 +903,8 @@ } }, "@overleaf/o-error": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@overleaf/o-error/-/o-error-2.1.0.tgz", - "integrity": "sha512-Zd9sks9LrLw8ErHt/cXeWIkyxWAqNAvNGn7wIjLQJH6TTEEW835PWOhpch+hQwwWsTxWIx/JDj+IpZ3ouw925g==" + "version": "git://github.com/overleaf/o-error.git#14e515a195d6dbd3711c5a211a730752802d3b03", + "from": "git://github.com/overleaf/o-error.git#14e515a195d6dbd3711c5a211a730752802d3b03" }, "@protobufjs/aspromise": { "version": "1.1.2", @@ -3480,6 +3479,11 @@ "yn": "^3.1.1" }, "dependencies": { + "@overleaf/o-error": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@overleaf/o-error/-/o-error-2.1.0.tgz", + "integrity": "sha512-Zd9sks9LrLw8ErHt/cXeWIkyxWAqNAvNGn7wIjLQJH6TTEEW835PWOhpch+hQwwWsTxWIx/JDj+IpZ3ouw925g==" + }, "qs": { "version": "6.5.2", "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", diff --git a/services/filestore/package.json b/services/filestore/package.json index 8c06e83aac..5680bf33fb 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -21,7 +21,7 @@ }, "dependencies": { "@google-cloud/storage": "^4.7.0", - "@overleaf/o-error": "^2.1.0", + "@overleaf/o-error": "git://github.com/overleaf/o-error.git#14e515a195d6dbd3711c5a211a730752802d3b03", "aws-sdk": "^2.648.0", "body-parser": "^1.19.0", "express": "^4.17.1", diff --git a/services/filestore/test/unit/js/GcsPersistorTests.js b/services/filestore/test/unit/js/GcsPersistorTests.js index 68db78d8a8..1e886e3e0c 100644 --- a/services/filestore/test/unit/js/GcsPersistorTests.js +++ b/services/filestore/test/unit/js/GcsPersistorTests.js @@ -44,6 +44,7 @@ describe('GcsPersistorTests', function() { user_files: 'user_files' }, gcs: { + deleteConcurrency: 1, directoryKeyRegex: /^[0-9a-fA-F]{24}\/[0-9a-fA-F]{24}/ } } @@ -526,7 +527,6 @@ describe('GcsPersistorTests', function() { const directoryName = `${ObjectId()}/${ObjectId()}` describe('with valid parameters', function() { beforeEach(async function() { - console.log(key) return GcsPersistor.promises.deleteDirectory(bucket, directoryName) }) From e83f6b609bf7bc9a0fe1137d28e38693cd383b2c Mon Sep 17 00:00:00 2001 From: Ersun Warncke Date: Thu, 25 Jun 2020 08:27:14 -0400 Subject: [PATCH 514/555] pr fixes --- services/filestore/app/js/LocalFileWriter.js | 1 - services/filestore/package-lock.json | 5 +++-- services/filestore/package.json | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/services/filestore/app/js/LocalFileWriter.js b/services/filestore/app/js/LocalFileWriter.js index da5aeb7a3a..015f12fe33 100644 --- a/services/filestore/app/js/LocalFileWriter.js +++ b/services/filestore/app/js/LocalFileWriter.js @@ -31,7 +31,6 @@ async function writeStream(stream, key) { await deleteFile(fsPath) throw new WriteError('problem writing file locally', { - err, fsPath }).withCause(err) } diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index 4753c14cd4..dff153da85 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -903,8 +903,9 @@ } }, "@overleaf/o-error": { - "version": "git://github.com/overleaf/o-error.git#14e515a195d6dbd3711c5a211a730752802d3b03", - "from": "git://github.com/overleaf/o-error.git#14e515a195d6dbd3711c5a211a730752802d3b03" + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@overleaf/o-error/-/o-error-3.0.0.tgz", + "integrity": "sha512-LsM2s6Iy9G97ktPo0ys4VxtI/m3ahc1ZHwjo5XnhXtjeIkkkVAehsrcRRoV/yWepPjymB0oZonhcfojpjYR/tg==" }, "@protobufjs/aspromise": { "version": "1.1.2", diff --git a/services/filestore/package.json b/services/filestore/package.json index 5680bf33fb..683d65271d 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -21,7 +21,7 @@ }, "dependencies": { "@google-cloud/storage": "^4.7.0", - "@overleaf/o-error": "git://github.com/overleaf/o-error.git#14e515a195d6dbd3711c5a211a730752802d3b03", + "@overleaf/o-error": "^3.0.0", "aws-sdk": "^2.648.0", "body-parser": "^1.19.0", "express": "^4.17.1", From 83f6eabaf616a87eb214e4195d42a8b13cef96f4 Mon Sep 17 00:00:00 2001 From: Ersun Warncke Date: Mon, 6 Jul 2020 09:26:59 -0400 Subject: [PATCH 515/555] upgarde logger-sharelatex --- services/filestore/package-lock.json | 305 +++++++++++++++++---------- services/filestore/package.json | 2 +- 2 files changed, 194 insertions(+), 113 deletions(-) diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index dff153da85..d740b46ff6 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -346,9 +346,9 @@ } }, "@google-cloud/logging": { - "version": "7.2.3", - "resolved": "https://registry.npmjs.org/@google-cloud/logging/-/logging-7.2.3.tgz", - "integrity": "sha512-MLAlYVBihCs0e581n9VUYOPJcrSpwSdL7KAjy2wgQidmRD9aWjvg97F22JMCtvrUiFBmAjiJVtn1JyNIXlv0Yw==", + "version": "7.3.0", + "resolved": "https://registry.npmjs.org/@google-cloud/logging/-/logging-7.3.0.tgz", + "integrity": "sha512-xTW1V4MKpYC0mjSugyuiyUoZ9g6A42IhrrO3z7Tt3SmAb2IRj2Gf4RLoguKKncs340ooZFXrrVN/++t2Aj5zgg==", "requires": { "@google-cloud/common": "^2.2.2", "@google-cloud/paginator": "^2.0.0", @@ -368,16 +368,79 @@ "snakecase-keys": "^3.0.0", "stream-events": "^1.0.4", "through2": "^3.0.0", - "type-fest": "^0.11.0" + "type-fest": "^0.12.0" } }, "@google-cloud/logging-bunyan": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/@google-cloud/logging-bunyan/-/logging-bunyan-2.0.3.tgz", - "integrity": "sha512-8n9MwsCRd4v8WZg17+d3m7qInud7lYTm5rpwXHY0/lzWEJYjeiztT09BiCYh56EEhHr+ynymJnzUDZKazkywlg==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@google-cloud/logging-bunyan/-/logging-bunyan-3.0.0.tgz", + "integrity": "sha512-ZLVXEejNQ27ktGcA3S/sd7GPefp7kywbn+/KoBajdb1Syqcmtc98jhXpYQBXVtNP2065iyu77s4SBaiYFbTC5A==", "requires": { "@google-cloud/logging": "^7.0.0", - "google-auth-library": "^5.0.0" + "google-auth-library": "^6.0.0" + }, + "dependencies": { + "gaxios": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-3.0.3.tgz", + "integrity": "sha512-PkzQludeIFhd535/yucALT/Wxyj/y2zLyrMwPcJmnLHDugmV49NvAi/vb+VUq/eWztATZCNcb8ue+ywPG+oLuw==", + "requires": { + "abort-controller": "^3.0.0", + "extend": "^3.0.2", + "https-proxy-agent": "^5.0.0", + "is-stream": "^2.0.0", + "node-fetch": "^2.3.0" + } + }, + "gcp-metadata": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.1.0.tgz", + "integrity": "sha512-r57SV28+olVsflPlKyVig3Muo/VDlcsObMtvDGOEtEJXj+DDE8bEl0coIkXh//hbkSDTvo+f5lbihZOndYXQQQ==", + "requires": { + "gaxios": "^3.0.0", + "json-bigint": "^0.3.0" + } + }, + "google-auth-library": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.0.2.tgz", + "integrity": "sha512-o/F/GiOPzDc49v5/6vfrEz3gRXvES49qGP84rrl3SO0efJA/M52hFwv2ozd1EC1TPrLj75Moj3iPgKGuGs6smA==", + "requires": { + "arrify": "^2.0.0", + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "fast-text-encoding": "^1.0.0", + "gaxios": "^3.0.0", + "gcp-metadata": "^4.1.0", + "gtoken": "^5.0.0", + "jws": "^4.0.0", + "lru-cache": "^5.0.0" + } + }, + "google-p12-pem": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.1.tgz", + "integrity": "sha512-VlQgtozgNVVVcYTXS36eQz4PXPt9gIPqLOhHN0QiV6W6h4qSCNVKPtKC5INtJsaHHF2r7+nOIa26MJeJMTaZEQ==", + "requires": { + "node-forge": "^0.9.0" + } + }, + "gtoken": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.0.1.tgz", + "integrity": "sha512-33w4FNDkUcyIOq/TqyC+drnKdI4PdXmWp9lZzssyEQKuvu9ZFN3KttaSnDKo52U3E51oujVGop93mKxmqO8HHg==", + "requires": { + "gaxios": "^3.0.0", + "google-p12-pem": "^3.0.0", + "jws": "^4.0.0", + "mime": "^2.2.0" + } + }, + "mime": { + "version": "2.4.6", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.6.tgz", + "integrity": "sha512-RZKhC3EmpBchfTGBVb8fb+RL2cWyw/32lshnsETttkBAyAUXSGHxbEJWWRXc751DrIxG1q04b8QwMbAwkRPpUA==" + } } }, "@google-cloud/paginator": { @@ -850,17 +913,17 @@ } }, "@grpc/grpc-js": { - "version": "0.6.18", - "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-0.6.18.tgz", - "integrity": "sha512-uAzv/tM8qpbf1vpx1xPMfcUMzbfdqJtdCYAqY/LsLeQQlnTb4vApylojr+wlCyr7bZeg3AFfHvtihnNOQQt/nA==", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.0.5.tgz", + "integrity": "sha512-Hm+xOiqAhcpT9RYM8lc15dbQD7aQurM7ZU8ulmulepiPlN7iwBXXwP3vSBUimoFoApRqz7pSIisXU8pZaCB4og==", "requires": { "semver": "^6.2.0" } }, "@grpc/proto-loader": { - "version": "0.5.3", - "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.5.3.tgz", - "integrity": "sha512-8qvUtGg77G2ZT2HqdqYoM/OY97gQd/0crSG34xNmZ4ZOsv3aQT/FQV9QfZPazTGna6MIoyUd+u6AxsoZjJ/VMQ==", + "version": "0.5.4", + "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.5.4.tgz", + "integrity": "sha512-HTM4QpI9B2XFkPz7pjwMyMgZchJ93TVkL3kWPW8GDMDKYxsMnmf4w2TNMJK7+KNiYHS5cJrCEAFlF+AwtXWVPA==", "requires": { "lodash.camelcase": "^4.3.0", "protobufjs": "^6.8.6" @@ -876,13 +939,6 @@ "semver": "^6.0.0", "shimmer": "^1.2.0", "uuid": "^3.2.1" - }, - "dependencies": { - "uuid": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", - "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" - } } }, "@opencensus/propagation-stackdriver": { @@ -893,13 +949,6 @@ "@opencensus/core": "^0.0.20", "hex2dec": "^1.0.1", "uuid": "^3.2.1" - }, - "dependencies": { - "uuid": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", - "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" - } } }, "@overleaf/o-error": { @@ -1041,9 +1090,9 @@ "dev": true }, "@types/fs-extra": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-8.1.0.tgz", - "integrity": "sha512-UoOfVEzAUpeSPmjm7h1uk5MH6KZma2z2O7a75onTGjnNvAvMVrPzPL/vBbT65iIGHWj6rokwfmYcmxmlSf2uwg==", + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-8.1.1.tgz", + "integrity": "sha512-TcUlBem321DFQzBNuz8p0CLLKp0VvF/XH9E4KHNmgwyp4E3AfgI5cjiIVZWlbfThBop2qxFIh4+LeY6hVWWZ2w==", "requires": { "@types/node": "*" } @@ -1536,6 +1585,7 @@ "version": "1.8.12", "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.12.tgz", "integrity": "sha512-dmDUbGHeGcvCDLRFOscZkwx1ZO/aFz3bJOCi5nCgzdhFGPxwK+y5AcDBnqagNGlJZ7lje/l6JUEz9mQcutttdg==", + "dev": true, "requires": { "dtrace-provider": "~0.8", "moment": "^2.10.6", @@ -1616,6 +1666,11 @@ "integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==", "dev": true }, + "charenc": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/charenc/-/charenc-0.0.2.tgz", + "integrity": "sha512-yrLQ/yVUFXkzg7EDQsPieE/53+0RlaWTs+wBrvW36cyilJ2SaDWfl4Yj7MtLTXleV9uEKefbAGUPv2/iWSooRA==" + }, "check-error": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz", @@ -1847,6 +1902,11 @@ } } }, + "crypt": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/crypt/-/crypt-0.0.2.tgz", + "integrity": "sha512-mCxBlsHFYh9C+HVpiEacem8FEBnMXgU9gy4zmNC+SXAZNB/1idgp/aulFJ4FgCi7GPEVbfyng092GqL2k2rmow==" + }, "crypto-random-string": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-2.0.0.tgz", @@ -2459,13 +2519,6 @@ "requires": { "d64": "^1.0.0", "uuid": "^3.0.1" - }, - "dependencies": { - "uuid": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", - "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" - } } }, "events": { @@ -2835,11 +2888,11 @@ } }, "google-gax": { - "version": "1.14.2", - "resolved": "https://registry.npmjs.org/google-gax/-/google-gax-1.14.2.tgz", - "integrity": "sha512-Nde+FdqALbV3QgMA4KlkxOHfrj9busnZ3EECwy/1gDJm9vhKGwDLWzErqRU5g80OoGSAMgyY7DWIfqz7ina4Jw==", + "version": "1.15.3", + "resolved": "https://registry.npmjs.org/google-gax/-/google-gax-1.15.3.tgz", + "integrity": "sha512-3JKJCRumNm3x2EksUTw4P1Rad43FTpqrtW9jzpf3xSMYXx+ogaqTM1vGo7VixHB4xkAyATXVIa3OcNSh8H9zsQ==", "requires": { - "@grpc/grpc-js": "^0.6.18", + "@grpc/grpc-js": "~1.0.3", "@grpc/proto-loader": "^0.5.1", "@types/fs-extra": "^8.0.1", "@types/long": "^4.0.0", @@ -2850,10 +2903,37 @@ "lodash.at": "^4.6.0", "lodash.has": "^4.5.2", "node-fetch": "^2.6.0", - "protobufjs": "^6.8.8", + "protobufjs": "^6.8.9", "retry-request": "^4.0.0", "semver": "^6.0.0", "walkdir": "^0.4.0" + }, + "dependencies": { + "@types/node": { + "version": "13.13.12", + "resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.12.tgz", + "integrity": "sha512-zWz/8NEPxoXNT9YyF2osqyA9WjssZukYpgI4UYZpOjcyqwIUqWGkcCionaEb9Ki+FULyPyvNFpg/329Kd2/pbw==" + }, + "protobufjs": { + "version": "6.9.0", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.9.0.tgz", + "integrity": "sha512-LlGVfEWDXoI/STstRDdZZKb/qusoAWUnmLg9R8OLSO473mBLWHowx8clbX5/+mKDEI+v7GzjoK9tRPZMMcoTrg==", + "requires": { + "@protobufjs/aspromise": "^1.1.2", + "@protobufjs/base64": "^1.1.2", + "@protobufjs/codegen": "^2.0.4", + "@protobufjs/eventemitter": "^1.1.0", + "@protobufjs/fetch": "^1.1.0", + "@protobufjs/float": "^1.0.2", + "@protobufjs/inquire": "^1.1.0", + "@protobufjs/path": "^1.1.2", + "@protobufjs/pool": "^1.1.0", + "@protobufjs/utf8": "^1.1.0", + "@types/long": "^4.0.1", + "@types/node": "^13.7.0", + "long": "^4.0.0" + } + } } }, "google-p12-pem": { @@ -3468,59 +3548,32 @@ } }, "logger-sharelatex": { - "version": "1.9.1", - "resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-1.9.1.tgz", - "integrity": "sha512-9s6JQnH/PN+Js2CmI8+J3MQCTNlRzP2Dh4pcekXrV6Jm5J4HzyPi+6d3zfBskZ4NBmaUVw9hC4p5dmdaRmh4mQ==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-2.1.1.tgz", + "integrity": "sha512-qqSrBqUgHWnStxtTZ/fSsqPxj9Ju9onok7Vfm3bv5MS702jH+hRsCSA9oXOMvOLcWJrZFnhCZaLGeOvXToUaxw==", "requires": { - "@google-cloud/logging-bunyan": "^2.0.0", - "@overleaf/o-error": "^2.0.0", - "bunyan": "1.8.12", - "raven": "1.1.3", - "request": "2.88.0", - "yn": "^3.1.1" + "@google-cloud/logging-bunyan": "^3.0.0", + "@overleaf/o-error": "^3.0.0", + "bunyan": "^1.8.14", + "raven": "^2.6.4", + "yn": "^4.0.0" }, "dependencies": { - "@overleaf/o-error": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@overleaf/o-error/-/o-error-2.1.0.tgz", - "integrity": "sha512-Zd9sks9LrLw8ErHt/cXeWIkyxWAqNAvNGn7wIjLQJH6TTEEW835PWOhpch+hQwwWsTxWIx/JDj+IpZ3ouw925g==" - }, - "qs": { - "version": "6.5.2", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", - "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==" - }, - "request": { - "version": "2.88.0", - "resolved": "https://registry.npmjs.org/request/-/request-2.88.0.tgz", - "integrity": "sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg==", + "bunyan": { + "version": "1.8.14", + "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.14.tgz", + "integrity": "sha512-LlahJUxXzZLuw/hetUQJmRgZ1LF6+cr5TPpRj6jf327AsiIq2jhYEH4oqUUkVKTor+9w2BT3oxVwhzE5lw9tcg==", "requires": { - "aws-sign2": "~0.7.0", - "aws4": "^1.8.0", - "caseless": "~0.12.0", - "combined-stream": "~1.0.6", - "extend": "~3.0.2", - "forever-agent": "~0.6.1", - "form-data": "~2.3.2", - "har-validator": "~5.1.0", - "http-signature": "~1.2.0", - "is-typedarray": "~1.0.0", - "isstream": "~0.1.2", - "json-stringify-safe": "~5.0.1", - "mime-types": "~2.1.19", - "oauth-sign": "~0.9.0", - "performance-now": "^2.1.0", - "qs": "~6.5.2", - "safe-buffer": "^5.1.2", - "tough-cookie": "~2.4.3", - "tunnel-agent": "^0.6.0", - "uuid": "^3.3.2" + "dtrace-provider": "~0.8", + "moment": "^2.19.3", + "mv": "~2", + "safe-json-stringify": "~1" } }, - "uuid": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", - "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" + "yn": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yn/-/yn-4.0.0.tgz", + "integrity": "sha512-huWiiCS4TxKc4SfgmTwW1K7JmXPPAmuXWYy4j9qjQo4+27Kni8mGhAAi1cloRWmBe2EqcLgt3IGqQoRL/MtPgg==" } } }, @@ -3595,11 +3648,6 @@ "yallist": "^3.0.2" } }, - "lsmod": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/lsmod/-/lsmod-1.0.0.tgz", - "integrity": "sha512-Y+6V75r+mGWzWEPr9h6PFmStielICu5JBHLUg18jCsD2VFmEfgHbq/EgnY4inElsUD9eKL9id1qp34w46rSIKQ==" - }, "lynx": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/lynx/-/lynx-0.1.1.tgz", @@ -3640,6 +3688,23 @@ "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.1.0.tgz", "integrity": "sha512-glc9y00wgtwcDmp7GaE/0b0OnxpNJsVf3ael/An6Fe2Q51LLwN1er6sdomLRzz5h0+yMpiYLhWYF5R7HeqVd4g==" }, + "md5": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/md5/-/md5-2.2.1.tgz", + "integrity": "sha512-PlGG4z5mBANDGCKsYQe0CaUYHdZYZt8ZPZLmEt+Urf0W4GlpTX4HescwHU+dc9+Z/G/vZKYZYFrwgm9VxK6QOQ==", + "requires": { + "charenc": "~0.0.1", + "crypt": "~0.0.1", + "is-buffer": "~1.1.1" + }, + "dependencies": { + "is-buffer": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", + "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==" + } + } + }, "media-typer": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", @@ -5074,15 +5139,15 @@ "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==" }, "raven": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/raven/-/raven-1.1.3.tgz", - "integrity": "sha512-RYov4wAaflZasWiCrZuizd3jNXxCOkW1WrXgWsGVb8kRpdHNZ+vPY27R6RhVtqzWp+DG9a5l6iP0QUPK4EgzaQ==", + "version": "2.6.4", + "resolved": "https://registry.npmjs.org/raven/-/raven-2.6.4.tgz", + "integrity": "sha512-6PQdfC4+DQSFncowthLf+B6Hr0JpPsFBgTVYTAOq7tCmx/kR4SXbeawtPch20+3QfUcQDoJBLjWW1ybvZ4kXTw==", "requires": { "cookie": "0.3.1", - "json-stringify-safe": "5.0.1", - "lsmod": "1.0.0", - "stack-trace": "0.0.9", - "uuid": "3.0.0" + "md5": "^2.2.1", + "stack-trace": "0.0.10", + "timed-out": "4.0.1", + "uuid": "3.3.2" }, "dependencies": { "cookie": { @@ -5090,10 +5155,15 @@ "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz", "integrity": "sha512-+IJOX0OqlHCszo2mBUq+SrEbCj6w7Kpffqx60zYbPTFaO4+yYgRjHwcZNpWvaTylDHaV7PPmBHzSecZiMhtPgw==" }, + "stack-trace": { + "version": "0.0.10", + "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.10.tgz", + "integrity": "sha512-KGzahc7puUKkzyMt+IqAep+TVNbKP+k2Lmwhub39m1AsTSkaDutx56aDCo+HLDzf/D26BIHTJWNiTG1KAJiQCg==" + }, "uuid": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.0.0.tgz", - "integrity": "sha512-rqE1LoOVLv3QrZMjb4NkF5UWlkurCfPyItVnFPNKDDGkHw4dQUdE4zMcLqx28+0Kcf3+bnUk4PisaiRJT4aiaQ==" + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", + "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==" } } }, @@ -5565,9 +5635,9 @@ } }, "snakecase-keys": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/snakecase-keys/-/snakecase-keys-3.1.2.tgz", - "integrity": "sha512-NrzHj8ctStnd1LYx3+L4buS7yildFum7WAbQQxkhPCNi3Qeqv7hoBne2c9n++HWxDG9Nv23pNEyyLCITZTv24Q==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/snakecase-keys/-/snakecase-keys-3.2.0.tgz", + "integrity": "sha512-WTJ0NhCH/37J+PU3fuz0x5b6TvtWQChTcKPOndWoUy0pteKOe0hrHMzSRsJOWSIP48EQkzUEsgQPmrG3W8pFNQ==", "requires": { "map-obj": "^4.0.0", "to-snake-case": "^1.0.0" @@ -5668,7 +5738,8 @@ "stack-trace": { "version": "0.0.9", "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.9.tgz", - "integrity": "sha512-vjUc6sfgtgY0dxCdnc40mK6Oftjo9+2K8H/NG81TMhgL392FtiPA9tn9RLyTxXmTLPJPjF3VyzFp6bsWFLisMQ==" + "integrity": "sha512-vjUc6sfgtgY0dxCdnc40mK6Oftjo9+2K8H/NG81TMhgL392FtiPA9tn9RLyTxXmTLPJPjF3VyzFp6bsWFLisMQ==", + "dev": true }, "statsd-parser": { "version": "0.0.4", @@ -5907,6 +5978,11 @@ "readable-stream": "2 || 3" } }, + "timed-out": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/timed-out/-/timed-out-4.0.1.tgz", + "integrity": "sha512-G7r3AhovYtr5YKOWQkta8RKAPb+J9IsO4uVmzjl8AZwfhs8UcUwTiD6gcJYSgOtzyjvQKrKYn41syHbUWMkafA==" + }, "timekeeper": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/timekeeper/-/timekeeper-2.2.0.tgz", @@ -6030,9 +6106,9 @@ "dev": true }, "type-fest": { - "version": "0.11.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.11.0.tgz", - "integrity": "sha512-OdjXJxnCN1AvyLSzeKIgXTXxV+99ZuXl3Hpo9XpJAv9MBcHrrJOQ5kV7ypXOuQie+AmWG25hLbiKdwYTifzcfQ==" + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.12.0.tgz", + "integrity": "sha512-53RyidyjvkGpnWPMF9bQgFtWp+Sl8O2Rp13VavmJgfAP9WWG6q6TkrKU8iyJdnwnfgHI6k2hTlgqH4aSdjoTbg==" }, "type-is": { "version": "1.6.18", @@ -6114,6 +6190,11 @@ "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", "integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==" }, + "uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" + }, "v8-compile-cache": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.1.0.tgz", diff --git a/services/filestore/package.json b/services/filestore/package.json index 683d65271d..f8f468cdde 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -28,7 +28,7 @@ "fast-crc32c": "^2.0.0", "glob": "^7.1.6", "lodash.once": "^4.1.1", - "logger-sharelatex": "^1.9.1", + "logger-sharelatex": "2.1.1", "metrics-sharelatex": "^2.6.2", "node-uuid": "~1.4.8", "range-parser": "^1.2.1", From 046c8d916bd61fca0e8e010af1b499c861122103 Mon Sep 17 00:00:00 2001 From: Ersun Warncke Date: Mon, 6 Jul 2020 09:27:12 -0400 Subject: [PATCH 516/555] remove un-needed config change in unit test --- services/filestore/test/unit/js/GcsPersistorTests.js | 1 - 1 file changed, 1 deletion(-) diff --git a/services/filestore/test/unit/js/GcsPersistorTests.js b/services/filestore/test/unit/js/GcsPersistorTests.js index 1e886e3e0c..027a63298b 100644 --- a/services/filestore/test/unit/js/GcsPersistorTests.js +++ b/services/filestore/test/unit/js/GcsPersistorTests.js @@ -44,7 +44,6 @@ describe('GcsPersistorTests', function() { user_files: 'user_files' }, gcs: { - deleteConcurrency: 1, directoryKeyRegex: /^[0-9a-fA-F]{24}\/[0-9a-fA-F]{24}/ } } From 6c853de5d5c59fa608a18c9273f9ef416f2d9503 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Tue, 7 Jul 2020 13:49:54 +0100 Subject: [PATCH 517/555] Migrate to new object-persistor module (#122) * Migrate to new object-persistor module * Support updated persistor module using ES6 classes * Update object-persistor * Upgrade new persistor code to OError3 * Update to latest minor release for outdated packages * Update package hashes to sha512 * Point object-persistor at master branch --- services/filestore/.eslintrc | 2 +- services/filestore/app/js/Errors.js | 12 +- services/filestore/app/js/FSPersistor.js | 247 ----- services/filestore/app/js/FileController.js | 25 +- services/filestore/app/js/FileConverter.js | 11 +- services/filestore/app/js/FileHandler.js | 65 +- services/filestore/app/js/GcsPersistor.js | 305 ------ .../filestore/app/js/HealthCheckController.js | 2 +- services/filestore/app/js/LocalFileWriter.js | 6 +- .../filestore/app/js/MigrationPersistor.js | 230 ----- services/filestore/app/js/PersistorHelper.js | 177 ---- services/filestore/app/js/PersistorManager.js | 39 +- services/filestore/app/js/S3Persistor.js | 367 -------- services/filestore/package-lock.json | 656 ++++++++----- services/filestore/package.json | 35 +- .../test/acceptance/js/FilestoreApp.js | 1 + .../test/acceptance/js/FilestoreTests.js | 8 +- .../test/acceptance/js/TestHelper.js | 4 +- .../test/unit/js/FSPersistorTests.js | 353 ------- .../test/unit/js/FileControllerTests.js | 24 +- .../test/unit/js/FileConverterTests.js | 6 +- .../test/unit/js/FileHandlerTests.js | 84 +- .../test/unit/js/GcsPersistorTests.js | 683 -------------- .../test/unit/js/LocalFileWriterTests.js | 6 +- .../test/unit/js/MigrationPersistorTests.js | 519 ----------- .../test/unit/js/PersistorManagerTests.js | 78 -- .../test/unit/js/S3PersistorTests.js | 865 ------------------ .../filestore/test/unit/js/SafeExecTests.js | 6 +- 28 files changed, 586 insertions(+), 4230 deletions(-) delete mode 100644 services/filestore/app/js/FSPersistor.js delete mode 100644 services/filestore/app/js/GcsPersistor.js delete mode 100644 services/filestore/app/js/MigrationPersistor.js delete mode 100644 services/filestore/app/js/PersistorHelper.js delete mode 100644 services/filestore/app/js/S3Persistor.js delete mode 100644 services/filestore/test/unit/js/FSPersistorTests.js delete mode 100644 services/filestore/test/unit/js/GcsPersistorTests.js delete mode 100644 services/filestore/test/unit/js/MigrationPersistorTests.js delete mode 100644 services/filestore/test/unit/js/PersistorManagerTests.js delete mode 100644 services/filestore/test/unit/js/S3PersistorTests.js diff --git a/services/filestore/.eslintrc b/services/filestore/.eslintrc index 2e945d6ffb..76dad1561d 100644 --- a/services/filestore/.eslintrc +++ b/services/filestore/.eslintrc @@ -8,7 +8,7 @@ "prettier/standard" ], "parserOptions": { - "ecmaVersion": 2017 + "ecmaVersion": 2018 }, "plugins": [ "mocha", diff --git a/services/filestore/app/js/Errors.js b/services/filestore/app/js/Errors.js index d2ba18c328..6aa91a65f3 100644 --- a/services/filestore/app/js/Errors.js +++ b/services/filestore/app/js/Errors.js @@ -1,12 +1,9 @@ const OError = require('@overleaf/o-error') +const { Errors } = require('@overleaf/object-persistor') -class NotFoundError extends OError {} -class WriteError extends OError {} -class ReadError extends OError {} class HealthCheckError extends OError {} class ConversionsDisabledError extends OError {} class ConversionError extends OError {} -class SettingsError extends OError {} class TimeoutError extends OError {} class InvalidParametersError extends OError {} @@ -23,14 +20,11 @@ class FailedCommandError extends OError { } module.exports = { - NotFoundError, FailedCommandError, ConversionsDisabledError, - WriteError, - ReadError, ConversionError, HealthCheckError, - SettingsError, TimeoutError, - InvalidParametersError + InvalidParametersError, + ...Errors } diff --git a/services/filestore/app/js/FSPersistor.js b/services/filestore/app/js/FSPersistor.js deleted file mode 100644 index 60ee0f4053..0000000000 --- a/services/filestore/app/js/FSPersistor.js +++ /dev/null @@ -1,247 +0,0 @@ -const fs = require('fs') -const glob = require('glob') -const path = require('path') -const Stream = require('stream') -const { promisify, callbackify } = require('util') - -const LocalFileWriter = require('./LocalFileWriter').promises -const { NotFoundError, ReadError, WriteError } = require('./Errors') -const PersistorHelper = require('./PersistorHelper') - -const pipeline = promisify(Stream.pipeline) -const fsUnlink = promisify(fs.unlink) -const fsOpen = promisify(fs.open) -const fsStat = promisify(fs.stat) -const fsGlob = promisify(glob) - -const filterName = key => key.replace(/\//g, '_') - -async function sendFile(location, target, source) { - const filteredTarget = filterName(target) - - // actually copy the file (instead of moving it) to maintain consistent behaviour - // between the different implementations - try { - const sourceStream = fs.createReadStream(source) - const targetStream = fs.createWriteStream(`${location}/${filteredTarget}`) - await pipeline(sourceStream, targetStream) - } catch (err) { - throw PersistorHelper.wrapError( - err, - 'failed to copy the specified file', - { location, target, source }, - WriteError - ) - } -} - -async function sendStream(location, target, sourceStream, sourceMd5) { - const fsPath = await LocalFileWriter.writeStream(sourceStream) - if (!sourceMd5) { - sourceMd5 = await _getFileMd5HashForPath(fsPath) - } - - try { - await sendFile(location, target, fsPath) - const destMd5 = await getFileMd5Hash(location, target) - if (sourceMd5 !== destMd5) { - await LocalFileWriter.deleteFile(`${location}/${filterName(target)}`) - throw new WriteError('md5 hash mismatch', { - sourceMd5, - destMd5, - location, - target - }) - } - } finally { - await LocalFileWriter.deleteFile(fsPath) - } -} - -// opts may be {start: Number, end: Number} -async function getFileStream(location, name, opts) { - const filteredName = filterName(name) - - try { - opts.fd = await fsOpen(`${location}/${filteredName}`, 'r') - } catch (err) { - throw PersistorHelper.wrapError( - err, - 'failed to open file for streaming', - { location, filteredName, opts }, - ReadError - ) - } - - return fs.createReadStream(null, opts) -} - -async function getRedirectUrl() { - // not implemented - return null -} - -async function getFileSize(location, filename) { - const fullPath = path.join(location, filterName(filename)) - - try { - const stat = await fsStat(fullPath) - return stat.size - } catch (err) { - throw PersistorHelper.wrapError( - err, - 'failed to stat file', - { location, filename }, - ReadError - ) - } -} - -async function getFileMd5Hash(location, filename) { - const fullPath = path.join(location, filterName(filename)) - try { - return await _getFileMd5HashForPath(fullPath) - } catch (err) { - throw new ReadError('unable to get md5 hash from file', { - location, - filename - }).withCause(err) - } -} - -async function copyFile(location, fromName, toName) { - const filteredFromName = filterName(fromName) - const filteredToName = filterName(toName) - - try { - const sourceStream = fs.createReadStream(`${location}/${filteredFromName}`) - const targetStream = fs.createWriteStream(`${location}/${filteredToName}`) - await pipeline(sourceStream, targetStream) - } catch (err) { - throw PersistorHelper.wrapError( - err, - 'failed to copy file', - { location, filteredFromName, filteredToName }, - WriteError - ) - } -} - -async function deleteFile(location, name) { - const filteredName = filterName(name) - try { - await fsUnlink(`${location}/${filteredName}`) - } catch (err) { - const wrappedError = PersistorHelper.wrapError( - err, - 'failed to delete file', - { location, filteredName }, - WriteError - ) - if (!(wrappedError instanceof NotFoundError)) { - // S3 doesn't give us a 404 when a file wasn't there to be deleted, so we - // should be consistent here as well - throw wrappedError - } - } -} - -// this is only called internally for clean-up by `FileHandler` and isn't part of the external API -async function deleteDirectory(location, name) { - const filteredName = filterName(name.replace(/\/$/, '')) - - try { - await Promise.all( - (await fsGlob(`${location}/${filteredName}*`)).map(file => fsUnlink(file)) - ) - } catch (err) { - throw PersistorHelper.wrapError( - err, - 'failed to delete directory', - { location, filteredName }, - WriteError - ) - } -} - -async function checkIfFileExists(location, name) { - const filteredName = filterName(name) - try { - const stat = await fsStat(`${location}/${filteredName}`) - return !!stat - } catch (err) { - if (err.code === 'ENOENT') { - return false - } - throw PersistorHelper.wrapError( - err, - 'failed to stat file', - { location, filteredName }, - ReadError - ) - } -} - -// note, does not recurse into subdirectories, as we use a flattened directory structure -async function directorySize(location, name) { - const filteredName = filterName(name.replace(/\/$/, '')) - let size = 0 - - try { - const files = await fsGlob(`${location}/${filteredName}_*`) - for (const file of files) { - try { - const stat = await fsStat(file) - if (stat.isFile()) { - size += stat.size - } - } catch (err) { - // ignore files that may have just been deleted - if (err.code !== 'ENOENT') { - throw err - } - } - } - } catch (err) { - throw PersistorHelper.wrapError( - err, - 'failed to get directory size', - { location, name }, - ReadError - ) - } - - return size -} - -module.exports = { - sendFile: callbackify(sendFile), - sendStream: callbackify(sendStream), - getFileStream: callbackify(getFileStream), - getRedirectUrl: callbackify(getRedirectUrl), - getFileSize: callbackify(getFileSize), - getFileMd5Hash: callbackify(getFileMd5Hash), - copyFile: callbackify(copyFile), - deleteFile: callbackify(deleteFile), - deleteDirectory: callbackify(deleteDirectory), - checkIfFileExists: callbackify(checkIfFileExists), - directorySize: callbackify(directorySize), - promises: { - sendFile, - sendStream, - getFileStream, - getRedirectUrl, - getFileSize, - getFileMd5Hash, - copyFile, - deleteFile, - deleteDirectory, - checkIfFileExists, - directorySize - } -} - -async function _getFileMd5HashForPath(fullPath) { - const stream = fs.createReadStream(fullPath) - return PersistorHelper.calculateStreamMd5(stream) -} diff --git a/services/filestore/app/js/FileController.js b/services/filestore/app/js/FileController.js index 9e978c6a8b..5d1f0aa5b1 100644 --- a/services/filestore/app/js/FileController.js +++ b/services/filestore/app/js/FileController.js @@ -75,12 +75,11 @@ function getFile(req, res, next) { res.end() } else if (err) { next( - new Errors.ReadError('error transferring stream', { - bucket, - key, - format, - style - }).withCause(err) + new Errors.ReadError( + 'error transferring stream', + { bucket, key, format, style }, + err + ) ) } }) @@ -139,23 +138,17 @@ function copyFile(req, res, next) { }) req.requestLogger.setMessage('copying file') - PersistorManager.copyFile( - bucket, - `${oldProjectId}/${oldFileId}`, - key, - function(err) { + PersistorManager.copyObject(bucket, `${oldProjectId}/${oldFileId}`, key) + .then(() => res.sendStatus(200)) + .catch(err => { if (err) { if (err instanceof Errors.NotFoundError) { res.sendStatus(404) } else { next(err) } - return } - - res.sendStatus(200) - } - ) + }) } function deleteFile(req, res, next) { diff --git a/services/filestore/app/js/FileConverter.js b/services/filestore/app/js/FileConverter.js index aec8e3bb3a..3258fa62fd 100644 --- a/services/filestore/app/js/FileConverter.js +++ b/services/filestore/app/js/FileConverter.js @@ -86,12 +86,11 @@ async function _convert(sourcePath, requestedFormat, command) { timeout: FOURTY_SECONDS }) } catch (err) { - throw new ConversionError('something went wrong converting file', { - stderr: err.stderr, - sourcePath, - requestedFormat, - destPath - }).withCause(err) + throw new ConversionError( + 'something went wrong converting file', + { stderr: err.stderr, sourcePath, requestedFormat, destPath }, + err + ) } timer.done() diff --git a/services/filestore/app/js/FileHandler.js b/services/filestore/app/js/FileHandler.js index e3e93b33dd..6ac25a3c9e 100644 --- a/services/filestore/app/js/FileHandler.js +++ b/services/filestore/app/js/FileHandler.js @@ -37,9 +37,9 @@ async function insertFile(bucket, key, stream) { }) } if (Settings.enableConversions) { - await PersistorManager.promises.deleteDirectory(bucket, convertedKey) + await PersistorManager.deleteDirectory(bucket, convertedKey) } - await PersistorManager.promises.sendStream(bucket, key, stream) + await PersistorManager.sendStream(bucket, key, stream) } async function deleteFile(bucket, key) { @@ -51,9 +51,9 @@ async function deleteFile(bucket, key) { convertedKey }) } - const jobs = [PersistorManager.promises.deleteFile(bucket, key)] + const jobs = [PersistorManager.deleteObject(bucket, key)] if (Settings.enableConversions) { - jobs.push(PersistorManager.promises.deleteDirectory(bucket, convertedKey)) + jobs.push(PersistorManager.deleteDirectory(bucket, convertedKey)) } await Promise.all(jobs) } @@ -65,13 +65,13 @@ async function deleteProject(bucket, key) { key }) } - await PersistorManager.promises.deleteDirectory(bucket, key) + await PersistorManager.deleteDirectory(bucket, key) } async function getFile(bucket, key, opts) { opts = opts || {} if (!opts.format && !opts.style) { - return PersistorManager.promises.getFileStream(bucket, key, opts) + return PersistorManager.getObjectStream(bucket, key, opts) } else { return _getConvertedFile(bucket, key, opts) } @@ -89,28 +89,28 @@ async function getRedirectUrl(bucket, key, opts) { Object.values(Settings.filestore.stores).includes(bucket) && Settings.filestore.allowRedirects ) { - return PersistorManager.promises.getRedirectUrl(bucket, key) + return PersistorManager.getRedirectUrl(bucket, key) } return null } async function getFileSize(bucket, key) { - return PersistorManager.promises.getFileSize(bucket, key) + return PersistorManager.getObjectSize(bucket, key) } async function getDirectorySize(bucket, projectId) { - return PersistorManager.promises.directorySize(bucket, projectId) + return PersistorManager.directorySize(bucket, projectId) } async function _getConvertedFile(bucket, key, opts) { const convertedKey = KeyBuilder.addCachingToKey(key, opts) - const exists = await PersistorManager.promises.checkIfFileExists( + const exists = await PersistorManager.checkIfObjectExists( bucket, convertedKey ) if (exists) { - return PersistorManager.promises.getFileStream(bucket, convertedKey, opts) + return PersistorManager.getObjectStream(bucket, convertedKey, opts) } else { return _getConvertedFileAndCache(bucket, key, convertedKey, opts) } @@ -121,19 +121,14 @@ async function _getConvertedFileAndCache(bucket, key, convertedKey, opts) { try { convertedFsPath = await _convertFile(bucket, key, opts) await ImageOptimiser.promises.compressPng(convertedFsPath) - await PersistorManager.promises.sendFile( - bucket, - convertedKey, - convertedFsPath - ) + await PersistorManager.sendFile(bucket, convertedKey, convertedFsPath) } catch (err) { LocalFileWriter.deleteFile(convertedFsPath, () => {}) - throw new ConversionError('failed to convert file', { - opts, - bucket, - key, - convertedKey - }).withCause(err) + throw new ConversionError( + 'failed to convert file', + { opts, bucket, key, convertedKey }, + err + ) } // Send back the converted file from the local copy to avoid problems // with the file not being present in S3 yet. As described in the @@ -159,11 +154,11 @@ async function _convertFile(bucket, originalKey, opts) { try { originalFsPath = await _writeFileToDisk(bucket, originalKey, opts) } catch (err) { - throw new ConversionError('unable to write file to disk', { - bucket, - originalKey, - opts - }).withCause(err) + throw new ConversionError( + 'unable to write file to disk', + { bucket, originalKey, opts }, + err + ) } let promise @@ -184,21 +179,17 @@ async function _convertFile(bucket, originalKey, opts) { try { destPath = await promise } catch (err) { - throw new ConversionError('error converting file', { - bucket, - originalKey, - opts - }).withCause(err) + throw new ConversionError( + 'error converting file', + { bucket, originalKey, opts }, + err + ) } LocalFileWriter.deleteFile(originalFsPath, function() {}) return destPath } async function _writeFileToDisk(bucket, key, opts) { - const fileStream = await PersistorManager.promises.getFileStream( - bucket, - key, - opts - ) + const fileStream = await PersistorManager.getObjectStream(bucket, key, opts) return LocalFileWriter.promises.writeStream(fileStream, key) } diff --git a/services/filestore/app/js/GcsPersistor.js b/services/filestore/app/js/GcsPersistor.js deleted file mode 100644 index 05425abee5..0000000000 --- a/services/filestore/app/js/GcsPersistor.js +++ /dev/null @@ -1,305 +0,0 @@ -const settings = require('settings-sharelatex') -const fs = require('fs') -const { promisify } = require('util') -const Stream = require('stream') -const { Storage } = require('@google-cloud/storage') -const { callbackify } = require('util') -const { WriteError, ReadError, NotFoundError } = require('./Errors') -const asyncPool = require('tiny-async-pool') -const PersistorHelper = require('./PersistorHelper') - -const pipeline = promisify(Stream.pipeline) - -// endpoint settings will be null by default except for tests -// that's OK - GCS uses the locally-configured service account by default -const storage = new Storage(settings.filestore.gcs.endpoint) -// workaround for broken uploads with custom endpoints: -// https://github.com/googleapis/nodejs-storage/issues/898 -if ( - settings.filestore.gcs.endpoint && - settings.filestore.gcs.endpoint.apiEndpoint -) { - storage.interceptors.push({ - request: function(reqOpts) { - const url = new URL(reqOpts.uri) - url.host = settings.filestore.gcs.endpoint.apiEndpoint - if (settings.filestore.gcs.endpoint.apiScheme) { - url.protocol = settings.filestore.gcs.endpoint.apiScheme - } - reqOpts.uri = url.toString() - return reqOpts - } - }) -} - -const GcsPersistor = { - sendFile: callbackify(sendFile), - sendStream: callbackify(sendStream), - getFileStream: callbackify(getFileStream), - getRedirectUrl: callbackify(getRedirectUrl), - getFileMd5Hash: callbackify(getFileMd5Hash), - deleteDirectory: callbackify(deleteDirectory), - getFileSize: callbackify(getFileSize), - deleteFile: callbackify(deleteFile), - copyFile: callbackify(copyFile), - checkIfFileExists: callbackify(checkIfFileExists), - directorySize: callbackify(directorySize), - promises: { - sendFile, - sendStream, - getFileStream, - getRedirectUrl, - getFileMd5Hash, - deleteDirectory, - getFileSize, - deleteFile, - copyFile, - checkIfFileExists, - directorySize - } -} - -module.exports = GcsPersistor - -async function sendFile(bucketName, key, fsPath) { - return sendStream(bucketName, key, fs.createReadStream(fsPath)) -} - -async function sendStream(bucketName, key, readStream, sourceMd5) { - try { - // egress from us to gcs - const observeOptions = { metric: 'gcs.egress' } - - if (!sourceMd5) { - // if there is no supplied md5 hash, we calculate the hash as the data passes through - observeOptions.hash = 'md5' - } - - const observer = new PersistorHelper.ObserverStream(observeOptions) - - const writeOptions = { - // disabling of resumable uploads is recommended by Google: - resumable: false - } - - if (sourceMd5) { - writeOptions.validation = 'md5' - writeOptions.metadata = { - md5Hash: PersistorHelper.hexToBase64(sourceMd5) - } - } - - const uploadStream = storage - .bucket(bucketName) - .file(key) - .createWriteStream(writeOptions) - - await pipeline(readStream, observer, uploadStream) - - // if we didn't have an md5 hash, we should compare our computed one with Google's - // as we couldn't tell GCS about it beforehand - if (!sourceMd5) { - sourceMd5 = observer.getHash() - // throws on mismatch - await PersistorHelper.verifyMd5(GcsPersistor, bucketName, key, sourceMd5) - } - } catch (err) { - throw PersistorHelper.wrapError( - err, - 'upload to GCS failed', - { bucketName, key }, - WriteError - ) - } -} - -async function getFileStream(bucketName, key, _opts = {}) { - const opts = Object.assign({}, _opts) - const stream = storage - .bucket(bucketName) - .file(key) - .createReadStream(opts) - - // ingress to us from gcs - const observer = new PersistorHelper.ObserverStream({ - metric: 'gcs.ingress' - }) - - try { - // wait for the pipeline to be ready, to catch non-200s - await PersistorHelper.getReadyPipeline(stream, observer) - return observer - } catch (err) { - throw PersistorHelper.wrapError( - err, - 'error reading file from GCS', - { bucketName, key, opts }, - ReadError - ) - } -} - -async function getRedirectUrl(bucketName, key) { - try { - const [url] = await storage - .bucket(bucketName) - .file(key) - .getSignedUrl({ - action: 'read', - expires: new Date().getTime() + settings.filestore.signedUrlExpiryInMs - }) - return url - } catch (err) { - throw PersistorHelper.wrapError( - err, - 'error generating signed url for GCS file', - { bucketName, key }, - ReadError - ) - } -} - -async function getFileSize(bucketName, key) { - try { - const [metadata] = await storage - .bucket(bucketName) - .file(key) - .getMetadata() - return metadata.size - } catch (err) { - throw PersistorHelper.wrapError( - err, - 'error getting size of GCS object', - { bucketName, key }, - ReadError - ) - } -} - -async function getFileMd5Hash(bucketName, key) { - try { - const [metadata] = await storage - .bucket(bucketName) - .file(key) - .getMetadata() - return PersistorHelper.base64ToHex(metadata.md5Hash) - } catch (err) { - throw PersistorHelper.wrapError( - err, - 'error getting hash of GCS object', - { bucketName, key }, - ReadError - ) - } -} - -async function deleteFile(bucketName, key) { - try { - const file = storage.bucket(bucketName).file(key) - - if (settings.filestore.gcs.deletedBucketSuffix) { - await file.copy( - storage - .bucket(`${bucketName}${settings.filestore.gcs.deletedBucketSuffix}`) - .file(`${key}-${new Date().toISOString()}`) - ) - } - if (settings.filestore.gcs.unlockBeforeDelete) { - await file.setMetadata({ eventBasedHold: false }) - } - await file.delete() - } catch (err) { - const error = PersistorHelper.wrapError( - err, - 'error deleting GCS object', - { bucketName, key }, - WriteError - ) - if (!(error instanceof NotFoundError)) { - throw error - } - } -} - -async function deleteDirectory(bucketName, key) { - try { - const [files] = await storage - .bucket(bucketName) - .getFiles({ directory: key }) - - await asyncPool( - settings.filestore.gcs.deleteConcurrency, - files, - async file => { - await deleteFile(bucketName, file.name) - } - ) - } catch (err) { - const error = PersistorHelper.wrapError( - err, - 'failed to delete directory in GCS', - { bucketName, key }, - WriteError - ) - if (error instanceof NotFoundError) { - return - } - throw error - } -} - -async function directorySize(bucketName, key) { - let files - - try { - const [response] = await storage - .bucket(bucketName) - .getFiles({ directory: key }) - files = response - } catch (err) { - throw PersistorHelper.wrapError( - err, - 'failed to list objects in GCS', - { bucketName, key }, - ReadError - ) - } - - return files.reduce((acc, file) => Number(file.metadata.size) + acc, 0) -} - -async function checkIfFileExists(bucketName, key) { - try { - const [response] = await storage - .bucket(bucketName) - .file(key) - .exists() - return response - } catch (err) { - throw PersistorHelper.wrapError( - err, - 'error checking if file exists in GCS', - { bucketName, key }, - ReadError - ) - } -} - -async function copyFile(bucketName, sourceKey, destKey) { - try { - const src = storage.bucket(bucketName).file(sourceKey) - const dest = storage.bucket(bucketName).file(destKey) - await src.copy(dest) - } catch (err) { - // fake-gcs-server has a bug that returns an invalid response when the file does not exist - if (err.message === 'Cannot parse response as JSON: not found\n') { - err.code = 404 - } - throw PersistorHelper.wrapError( - err, - 'failed to copy file in GCS', - { bucketName, sourceKey, destKey }, - WriteError - ) - } -} diff --git a/services/filestore/app/js/HealthCheckController.js b/services/filestore/app/js/HealthCheckController.js index 0a4b10387e..4ee7534307 100644 --- a/services/filestore/app/js/HealthCheckController.js +++ b/services/filestore/app/js/HealthCheckController.js @@ -31,7 +31,7 @@ async function checkCanGetFiles() { try { await pipeline(sourceStream, buffer) } catch (err) { - throw new HealthCheckError('failed to get health-check file').withCause(err) + throw new HealthCheckError('failed to get health-check file', {}, err) } if (!buffer.size()) { diff --git a/services/filestore/app/js/LocalFileWriter.js b/services/filestore/app/js/LocalFileWriter.js index 015f12fe33..6338dcecb0 100644 --- a/services/filestore/app/js/LocalFileWriter.js +++ b/services/filestore/app/js/LocalFileWriter.js @@ -30,9 +30,7 @@ async function writeStream(stream, key) { } catch (err) { await deleteFile(fsPath) - throw new WriteError('problem writing file locally', { - fsPath - }).withCause(err) + throw new WriteError('problem writing file locally', { fsPath }, err) } } @@ -44,7 +42,7 @@ async function deleteFile(fsPath) { await promisify(fs.unlink)(fsPath) } catch (err) { if (err.code !== 'ENOENT') { - throw new WriteError('failed to delete file', { fsPath }).withCause(err) + throw new WriteError('failed to delete file', { fsPath }, err) } } } diff --git a/services/filestore/app/js/MigrationPersistor.js b/services/filestore/app/js/MigrationPersistor.js deleted file mode 100644 index 2a9fe5d2a0..0000000000 --- a/services/filestore/app/js/MigrationPersistor.js +++ /dev/null @@ -1,230 +0,0 @@ -const metrics = require('metrics-sharelatex') -const Settings = require('settings-sharelatex') -const logger = require('logger-sharelatex') -const Stream = require('stream') -const { callbackify, promisify } = require('util') -const { NotFoundError, WriteError } = require('./Errors') - -const pipeline = promisify(Stream.pipeline) - -// Persistor that wraps two other persistors. Talks to the 'primary' by default, -// but will fall back to an older persistor in the case of a not-found error. -// If `Settings.filestore.fallback.copyOnMiss` is set, this will copy files from the fallback -// to the primary, in the event that they are missing. -// -// It is unlikely that the bucket/location name will be the same on the fallback -// as the primary. The bucket names should be overridden in `Settings.filestore.fallback.buckets` -// e.g. -// Settings.filestore.fallback.buckets = { -// myBucketOnS3: 'myBucketOnGCS' -// } - -module.exports = function(primary, fallback) { - function _wrapMethodOnBothPersistors(method) { - return async function(bucket, key, ...moreArgs) { - const fallbackBucket = _getFallbackBucket(bucket) - - await Promise.all([ - primary.promises[method](bucket, key, ...moreArgs), - fallback.promises[method](fallbackBucket, key, ...moreArgs) - ]) - } - } - - async function getFileStreamWithFallback(bucket, key, opts) { - const shouldCopy = - Settings.filestore.fallback.copyOnMiss && !opts.start && !opts.end - - try { - return await primary.promises.getFileStream(bucket, key, opts) - } catch (err) { - if (err instanceof NotFoundError) { - const fallbackBucket = _getFallbackBucket(bucket) - const fallbackStream = await fallback.promises.getFileStream( - fallbackBucket, - key, - opts - ) - // tee the stream to the client, and as a copy to the primary (if necessary) - // start listening on both straight away so that we don't consume bytes - // in one place before the other - const returnStream = new Stream.PassThrough() - pipeline(fallbackStream, returnStream) - - if (shouldCopy) { - const copyStream = new Stream.PassThrough() - pipeline(fallbackStream, copyStream) - - _copyStreamFromFallbackAndVerify( - copyStream, - fallbackBucket, - bucket, - key, - key - ).catch(() => { - // swallow errors, as this runs in the background and will log a warning - }) - } - return returnStream - } - throw err - } - } - - async function copyFileWithFallback(bucket, sourceKey, destKey) { - try { - return await primary.promises.copyFile(bucket, sourceKey, destKey) - } catch (err) { - if (err instanceof NotFoundError) { - const fallbackBucket = _getFallbackBucket(bucket) - const fallbackStream = await fallback.promises.getFileStream( - fallbackBucket, - sourceKey, - {} - ) - - const copyStream = new Stream.PassThrough() - pipeline(fallbackStream, copyStream) - - if (Settings.filestore.fallback.copyOnMiss) { - const missStream = new Stream.PassThrough() - pipeline(fallbackStream, missStream) - - // copy from sourceKey -> sourceKey - _copyStreamFromFallbackAndVerify( - missStream, - fallbackBucket, - bucket, - sourceKey, - sourceKey - ).then(() => { - // swallow errors, as this runs in the background and will log a warning - }) - } - // copy from sourceKey -> destKey - return _copyStreamFromFallbackAndVerify( - copyStream, - fallbackBucket, - bucket, - sourceKey, - destKey - ) - } - throw err - } - } - - function _getFallbackBucket(bucket) { - return Settings.filestore.fallback.buckets[bucket] || bucket - } - - function _wrapFallbackMethod(method) { - return async function(bucket, key, ...moreArgs) { - try { - return await primary.promises[method](bucket, key, ...moreArgs) - } catch (err) { - if (err instanceof NotFoundError) { - const fallbackBucket = _getFallbackBucket(bucket) - if (Settings.filestore.fallback.copyOnMiss) { - const fallbackStream = await fallback.promises.getFileStream( - fallbackBucket, - key, - {} - ) - // run in background - _copyStreamFromFallbackAndVerify( - fallbackStream, - fallbackBucket, - bucket, - key, - key - ).catch(err => { - logger.warn({ err }, 'failed to copy file from fallback') - }) - } - return fallback.promises[method](fallbackBucket, key, ...moreArgs) - } - throw err - } - } - } - - async function _copyStreamFromFallbackAndVerify( - stream, - sourceBucket, - destBucket, - sourceKey, - destKey - ) { - try { - let sourceMd5 - try { - sourceMd5 = await fallback.promises.getFileMd5Hash( - sourceBucket, - sourceKey - ) - } catch (err) { - logger.warn(err, 'error getting md5 hash from fallback persistor') - } - - await primary.promises.sendStream(destBucket, destKey, stream, sourceMd5) - } catch (err) { - const error = new WriteError( - 'unable to copy file to destination persistor', - { - sourceBucket, - destBucket, - sourceKey, - destKey - } - ).withCause(err) - metrics.inc('fallback.copy.failure') - - try { - await primary.promises.deleteFile(destBucket, destKey) - } catch (err) { - error.info.cleanupError = new WriteError( - 'unable to clean up destination copy artifact', - { - destBucket, - destKey - } - ).withCause(err) - } - - logger.warn({ error }, 'failed to copy file from fallback') - throw error - } - } - - return { - primaryPersistor: primary, - fallbackPersistor: fallback, - sendFile: primary.sendFile, - sendStream: primary.sendStream, - getFileStream: callbackify(getFileStreamWithFallback), - getRedirectUrl: primary.getRedirectUrl, - getFileMd5Hash: callbackify(_wrapFallbackMethod('getFileMd5Hash')), - deleteDirectory: callbackify( - _wrapMethodOnBothPersistors('deleteDirectory') - ), - getFileSize: callbackify(_wrapFallbackMethod('getFileSize')), - deleteFile: callbackify(_wrapMethodOnBothPersistors('deleteFile')), - copyFile: callbackify(copyFileWithFallback), - checkIfFileExists: callbackify(_wrapFallbackMethod('checkIfFileExists')), - directorySize: callbackify(_wrapFallbackMethod('directorySize')), - promises: { - sendFile: primary.promises.sendFile, - sendStream: primary.promises.sendStream, - getFileStream: getFileStreamWithFallback, - getRedirectUrl: primary.promises.getRedirectUrl, - getFileMd5Hash: _wrapFallbackMethod('getFileMd5Hash'), - deleteDirectory: _wrapMethodOnBothPersistors('deleteDirectory'), - getFileSize: _wrapFallbackMethod('getFileSize'), - deleteFile: _wrapMethodOnBothPersistors('deleteFile'), - copyFile: copyFileWithFallback, - checkIfFileExists: _wrapFallbackMethod('checkIfFileExists'), - directorySize: _wrapFallbackMethod('directorySize') - } - } -} diff --git a/services/filestore/app/js/PersistorHelper.js b/services/filestore/app/js/PersistorHelper.js deleted file mode 100644 index 1a836a2b09..0000000000 --- a/services/filestore/app/js/PersistorHelper.js +++ /dev/null @@ -1,177 +0,0 @@ -const crypto = require('crypto') -const Stream = require('stream') -const logger = require('logger-sharelatex') -const metrics = require('metrics-sharelatex') -const { WriteError, ReadError, NotFoundError } = require('./Errors') -const { promisify } = require('util') - -const pipeline = promisify(Stream.pipeline) - -// Observes data that passes through and computes some metadata for it -// - specifically, it computes the number of bytes transferred, and optionally -// computes a cryptographic hash based on the 'hash' option. e.g., pass -// { hash: 'md5' } to compute the md5 hash of the stream -// - if 'metric' is supplied as an option, this metric will be incremented by -// the number of bytes transferred -class ObserverStream extends Stream.Transform { - constructor(options) { - options.autoDestroy = true - super(options) - - this.bytes = 0 - - if (options.hash) { - this.hash = crypto.createHash(options.hash) - } - - if (options.metric) { - const onEnd = () => { - metrics.count(options.metric, this.bytes) - } - this.once('error', onEnd) - this.once('end', onEnd) - } - } - - _transform(chunk, encoding, done) { - if (this.hash) { - this.hash.update(chunk) - } - this.bytes += chunk.length - this.push(chunk) - done() - } - - getHash() { - return this.hash && this.hash.digest('hex') - } -} - -module.exports = { - ObserverStream, - calculateStreamMd5, - verifyMd5, - getReadyPipeline, - wrapError, - hexToBase64, - base64ToHex -} - -// returns a promise which resolves with the md5 hash of the stream -// - consumes the stream -function calculateStreamMd5(stream) { - const hash = crypto.createHash('md5') - hash.setEncoding('hex') - - return pipeline(stream, hash).then(() => hash.read()) -} - -// verifies the md5 hash of a file against the supplied md5 or the one stored in -// storage if not supplied - deletes the new file if the md5 does not match and -// throws an error -async function verifyMd5(persistor, bucket, key, sourceMd5, destMd5 = null) { - if (!destMd5) { - destMd5 = await persistor.promises.getFileMd5Hash(bucket, key) - } - - if (sourceMd5 !== destMd5) { - try { - await persistor.promises.deleteFile(bucket, key) - } catch (err) { - logger.warn(err, 'error deleting file for invalid upload') - } - - throw new WriteError('source and destination hashes do not match', { - sourceMd5, - destMd5, - bucket, - key - }) - } -} - -// resolves when a stream is 'readable', or rejects if the stream throws an error -// before that happens - this lets us handle protocol-level errors before trying -// to read them -function getReadyPipeline(...streams) { - return new Promise((resolve, reject) => { - const lastStream = streams.slice(-1)[0] - - // in case of error or stream close, we must ensure that we drain the - // previous stream so that it can clean up its socket (if it has one) - const drainPreviousStream = function(previousStream) { - // this stream is no longer reliable, so don't pipe anything more into it - previousStream.unpipe(this) - previousStream.resume() - } - - // handler to resolve when either: - // - an error happens, or - // - the last stream in the chain is readable - // for example, in the case of a 4xx error an error will occur and the - // streams will not become readable - const handler = function(err) { - // remove handler from all streams because we don't want to do this on - // later errors - lastStream.removeListener('readable', handler) - for (const stream of streams) { - stream.removeListener('error', handler) - } - - // return control to the caller - if (err) { - reject( - wrapError(err, 'error before stream became ready', {}, ReadError) - ) - } else { - resolve(lastStream) - } - } - - // ensure the handler fires when the last strem becomes readable - lastStream.on('readable', handler) - - for (const stream of streams) { - // when a stream receives a pipe, set up the drain handler to drain the - // connection if an error occurs or the stream is closed - stream.on('pipe', previousStream => { - stream.on('error', x => { - drainPreviousStream(previousStream) - }) - stream.on('close', () => { - drainPreviousStream(previousStream) - }) - }) - // add the handler function to resolve this method on error if we can't - // set up the pipeline - stream.on('error', handler) - } - - // begin the pipeline - for (let index = 0; index < streams.length - 1; index++) { - streams[index].pipe(streams[index + 1]) - } - }) -} - -function wrapError(error, message, params, ErrorType) { - if ( - error instanceof NotFoundError || - ['NoSuchKey', 'NotFound', 404, 'AccessDenied', 'ENOENT'].includes( - error.code - ) || - (error.response && error.response.statusCode === 404) - ) { - return new NotFoundError('no such file', params).withCause(error) - } else { - return new ErrorType(message, params).withCause(error) - } -} - -function base64ToHex(base64) { - return Buffer.from(base64, 'base64').toString('hex') -} - -function hexToBase64(hex) { - return Buffer.from(hex, 'hex').toString('base64') -} diff --git a/services/filestore/app/js/PersistorManager.js b/services/filestore/app/js/PersistorManager.js index d26ab77a92..c17e251ce9 100644 --- a/services/filestore/app/js/PersistorManager.js +++ b/services/filestore/app/js/PersistorManager.js @@ -1,39 +1,10 @@ const settings = require('settings-sharelatex') -const logger = require('logger-sharelatex') -logger.log( - { - backend: settings.filestore.backend, - fallback: settings.filestore.fallback && settings.filestore.fallback.backend - }, - 'Loading backend' -) -if (!settings.filestore.backend) { - throw new Error('no backend specified - config incomplete') -} +const persistorSettings = settings.filestore +persistorSettings.Metrics = require('metrics-sharelatex') +persistorSettings.paths = settings.path -function getPersistor(backend) { - switch (backend) { - case 'aws-sdk': - case 's3': - return require('./S3Persistor') - case 'fs': - return require('./FSPersistor') - case 'gcs': - return require('./GcsPersistor') - default: - throw new Error(`unknown filestore backend: ${backend}`) - } -} - -let persistor = getPersistor(settings.filestore.backend) - -if (settings.filestore.fallback && settings.filestore.fallback.backend) { - const migrationPersistor = require('./MigrationPersistor') - persistor = migrationPersistor( - persistor, - getPersistor(settings.filestore.fallback.backend) - ) -} +const ObjectPersistor = require('@overleaf/object-persistor') +const persistor = ObjectPersistor(persistorSettings) module.exports = persistor diff --git a/services/filestore/app/js/S3Persistor.js b/services/filestore/app/js/S3Persistor.js deleted file mode 100644 index f0df46f10d..0000000000 --- a/services/filestore/app/js/S3Persistor.js +++ /dev/null @@ -1,367 +0,0 @@ -const http = require('http') -const https = require('https') -http.globalAgent.maxSockets = 300 -https.globalAgent.maxSockets = 300 - -const settings = require('settings-sharelatex') - -const PersistorHelper = require('./PersistorHelper') - -const fs = require('fs') -const S3 = require('aws-sdk/clients/s3') -const { URL } = require('url') -const Stream = require('stream') -const { promisify, callbackify } = require('util') -const { - WriteError, - ReadError, - NotFoundError, - SettingsError -} = require('./Errors') -const pipeline = promisify(Stream.pipeline) - -const S3Persistor = { - sendFile: callbackify(sendFile), - sendStream: callbackify(sendStream), - getFileStream: callbackify(getFileStream), - getRedirectUrl: callbackify(getRedirectUrl), - getFileMd5Hash: callbackify(getFileMd5Hash), - deleteDirectory: callbackify(deleteDirectory), - getFileSize: callbackify(getFileSize), - deleteFile: callbackify(deleteFile), - copyFile: callbackify(copyFile), - checkIfFileExists: callbackify(checkIfFileExists), - directorySize: callbackify(directorySize), - promises: { - sendFile, - sendStream, - getFileStream, - getRedirectUrl, - getFileMd5Hash, - deleteDirectory, - getFileSize, - deleteFile, - copyFile, - checkIfFileExists, - directorySize - } -} - -module.exports = S3Persistor - -async function sendFile(bucketName, key, fsPath) { - return sendStream(bucketName, key, fs.createReadStream(fsPath)) -} - -async function sendStream(bucketName, key, readStream, sourceMd5) { - try { - // egress from us to S3 - const observeOptions = { metric: 's3.egress' } - let b64Hash - - if (sourceMd5) { - b64Hash = PersistorHelper.hexToBase64(sourceMd5) - } else { - // if there is no supplied md5 hash, we calculate the hash as the data passes through - observeOptions.hash = 'md5' - } - - const observer = new PersistorHelper.ObserverStream(observeOptions) - pipeline(readStream, observer) - - // if we have an md5 hash, pass this to S3 to verify the upload - const uploadOptions = { - Bucket: bucketName, - Key: key, - Body: observer - } - if (b64Hash) { - uploadOptions.ContentMD5 = b64Hash - } - - const response = await _getClientForBucket(bucketName) - .upload(uploadOptions, { partSize: settings.filestore.s3.partSize }) - .promise() - let destMd5 = _md5FromResponse(response) - if (!destMd5) { - // the eTag isn't in md5 format so we need to calculate it ourselves - const verifyStream = await getFileStream( - response.Bucket, - response.Key, - {} - ) - destMd5 = await PersistorHelper.calculateStreamMd5(verifyStream) - } - - // if we didn't have an md5 hash, we should compare our computed one with S3's - // as we couldn't tell S3 about it beforehand - if (!sourceMd5) { - sourceMd5 = observer.getHash() - // throws on mismatch - await PersistorHelper.verifyMd5( - S3Persistor, - bucketName, - key, - sourceMd5, - destMd5 - ) - } - } catch (err) { - throw PersistorHelper.wrapError( - err, - 'upload to S3 failed', - { bucketName, key }, - WriteError - ) - } -} - -async function getFileStream(bucketName, key, opts) { - opts = opts || {} - - const params = { - Bucket: bucketName, - Key: key - } - if (opts.start != null && opts.end != null) { - params.Range = `bytes=${opts.start}-${opts.end}` - } - - const stream = _getClientForBucket(bucketName) - .getObject(params) - .createReadStream() - - // ingress from S3 to us - const observer = new PersistorHelper.ObserverStream({ metric: 's3.ingress' }) - - try { - // wait for the pipeline to be ready, to catch non-200s - await PersistorHelper.getReadyPipeline(stream, observer) - return observer - } catch (err) { - throw PersistorHelper.wrapError( - err, - 'error reading file from S3', - { bucketName, key, opts }, - ReadError - ) - } -} - -async function getRedirectUrl() { - // not implemented - return null -} - -async function deleteDirectory(bucketName, key) { - let response - - try { - response = await _getClientForBucket(bucketName) - .listObjects({ Bucket: bucketName, Prefix: key }) - .promise() - } catch (err) { - throw PersistorHelper.wrapError( - err, - 'failed to list objects in S3', - { bucketName, key }, - ReadError - ) - } - - const objects = response.Contents.map(item => ({ Key: item.Key })) - if (objects.length) { - try { - await _getClientForBucket(bucketName) - .deleteObjects({ - Bucket: bucketName, - Delete: { - Objects: objects, - Quiet: true - } - }) - .promise() - } catch (err) { - throw PersistorHelper.wrapError( - err, - 'failed to delete objects in S3', - { bucketName, key }, - WriteError - ) - } - } -} - -async function getFileSize(bucketName, key) { - try { - const response = await _getClientForBucket(bucketName) - .headObject({ Bucket: bucketName, Key: key }) - .promise() - return response.ContentLength - } catch (err) { - throw PersistorHelper.wrapError( - err, - 'error getting size of s3 object', - { bucketName, key }, - ReadError - ) - } -} - -async function getFileMd5Hash(bucketName, key) { - try { - const response = await _getClientForBucket(bucketName) - .headObject({ Bucket: bucketName, Key: key }) - .promise() - return _md5FromResponse(response) - } catch (err) { - throw PersistorHelper.wrapError( - err, - 'error getting hash of s3 object', - { bucketName, key }, - ReadError - ) - } -} - -async function deleteFile(bucketName, key) { - try { - await _getClientForBucket(bucketName) - .deleteObject({ Bucket: bucketName, Key: key }) - .promise() - } catch (err) { - // s3 does not give us a NotFoundError here - throw PersistorHelper.wrapError( - err, - 'failed to delete file in S3', - { bucketName, key }, - WriteError - ) - } -} - -async function copyFile(bucketName, sourceKey, destKey) { - const params = { - Bucket: bucketName, - Key: destKey, - CopySource: `${bucketName}/${sourceKey}` - } - try { - await _getClientForBucket(bucketName) - .copyObject(params) - .promise() - } catch (err) { - throw PersistorHelper.wrapError( - err, - 'failed to copy file in S3', - params, - WriteError - ) - } -} - -async function checkIfFileExists(bucketName, key) { - try { - await getFileSize(bucketName, key) - return true - } catch (err) { - if (err instanceof NotFoundError) { - return false - } - throw PersistorHelper.wrapError( - err, - 'error checking whether S3 object exists', - { bucketName, key }, - ReadError - ) - } -} - -async function directorySize(bucketName, key) { - try { - const response = await _getClientForBucket(bucketName) - .listObjects({ Bucket: bucketName, Prefix: key }) - .promise() - - return response.Contents.reduce((acc, item) => item.Size + acc, 0) - } catch (err) { - throw PersistorHelper.wrapError( - err, - 'error getting directory size in S3', - { bucketName, key }, - ReadError - ) - } -} - -const _clients = new Map() -let _defaultClient - -function _getClientForBucket(bucket) { - if (_clients[bucket]) { - return _clients[bucket] - } - - if ( - settings.filestore.s3BucketCreds && - settings.filestore.s3BucketCreds[bucket] - ) { - _clients[bucket] = new S3( - _buildClientOptions(settings.filestore.s3BucketCreds[bucket]) - ) - return _clients[bucket] - } - - // no specific credentials for the bucket - if (_defaultClient) { - return _defaultClient - } - - if (settings.filestore.s3.key) { - _defaultClient = new S3(_buildClientOptions()) - return _defaultClient - } - - throw new SettingsError( - 'no bucket-specific or default credentials provided', - { bucket } - ) -} - -function _buildClientOptions(bucketCredentials) { - const options = {} - - if (bucketCredentials) { - options.credentials = { - accessKeyId: bucketCredentials.auth_key, - secretAccessKey: bucketCredentials.auth_secret - } - } else { - options.credentials = { - accessKeyId: settings.filestore.s3.key, - secretAccessKey: settings.filestore.s3.secret - } - } - - if (settings.filestore.s3.endpoint) { - const endpoint = new URL(settings.filestore.s3.endpoint) - options.endpoint = settings.filestore.s3.endpoint - options.sslEnabled = endpoint.protocol === 'https' - } - - // path-style access is only used for acceptance tests - if (settings.filestore.s3.pathStyle) { - options.s3ForcePathStyle = true - } - - return options -} - -function _md5FromResponse(response) { - const md5 = (response.ETag || '').replace(/[ "]/g, '') - if (!md5.match(/^[a-f0-9]{32}$/)) { - return null - } - - return md5 -} diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index d740b46ff6..a7a0daeb90 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -314,9 +314,9 @@ } }, "mime": { - "version": "2.4.4", - "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.4.tgz", - "integrity": "sha512-LRxmNwziLPT828z+4YkNzloCFC2YM4wrB99k+AV5ZbEyfGNWfG8SO1FUXLmLDBSo89NrJZ4DIWeLjy1CHGhMGA==" + "version": "2.4.6", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.6.tgz", + "integrity": "sha512-RZKhC3EmpBchfTGBVb8fb+RL2cWyw/32lshnsETttkBAyAUXSGHxbEJWWRXc751DrIxG1q04b8QwMbAwkRPpUA==" }, "ms": { "version": "2.1.2", @@ -402,9 +402,9 @@ } }, "google-auth-library": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.0.2.tgz", - "integrity": "sha512-o/F/GiOPzDc49v5/6vfrEz3gRXvES49qGP84rrl3SO0efJA/M52hFwv2ozd1EC1TPrLj75Moj3iPgKGuGs6smA==", + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.0.3.tgz", + "integrity": "sha512-2Np6ojPmaJGXHSMsBhtTQEKfSMdLc8hefoihv7N2cwEr8E5bq39fhoat6TcXHwa0XoGO5Guh9sp3nxHFPmibMw==", "requires": { "arrify": "^2.0.0", "base64-js": "^1.3.0", @@ -503,6 +503,11 @@ "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-0.3.1.tgz", "integrity": "sha512-QzB0/IMvB0eFxFK7Eqh+bfC8NLv3E9ScjWQrPOk6GgfNroxcVITdTlT8NRsRrcp5+QQJVPLkRqKG0PUdaWXmHw==" }, + "@types/node": { + "version": "10.17.26", + "resolved": "https://registry.npmjs.org/@types/node/-/node-10.17.26.tgz", + "integrity": "sha512-myMwkO2Cr82kirHY8uknNRHEVtn0wV3DTQfkrjx17jmkstDRZ24gNUdl8AHXVyVclTYI/bNjgTPTAWvWLqXqkw==" + }, "agent-base": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.3.0.tgz", @@ -620,9 +625,9 @@ } }, "mime": { - "version": "2.4.4", - "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.4.tgz", - "integrity": "sha512-LRxmNwziLPT828z+4YkNzloCFC2YM4wrB99k+AV5ZbEyfGNWfG8SO1FUXLmLDBSo89NrJZ4DIWeLjy1CHGhMGA==" + "version": "2.4.6", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.6.tgz", + "integrity": "sha512-RZKhC3EmpBchfTGBVb8fb+RL2cWyw/32lshnsETttkBAyAUXSGHxbEJWWRXc751DrIxG1q04b8QwMbAwkRPpUA==" }, "ms": { "version": "2.1.2", @@ -634,6 +639,26 @@ "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.8.5.tgz", "integrity": "sha512-vFMQIWt+J/7FLNyKouZ9TazT74PRV3wgv9UT4cRjC8BffxFbKXkgIWR42URCPSnHm/QDz6BOlb2Q0U4+VQT67Q==" }, + "protobufjs": { + "version": "6.8.9", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.8.9.tgz", + "integrity": "sha512-j2JlRdUeL/f4Z6x4aU4gj9I2LECglC+5qR2TrWb193Tla1qfdaNQTZ8I27Pt7K0Ajmvjjpft7O3KWTGciz4gpw==", + "requires": { + "@protobufjs/aspromise": "^1.1.2", + "@protobufjs/base64": "^1.1.2", + "@protobufjs/codegen": "^2.0.4", + "@protobufjs/eventemitter": "^1.1.0", + "@protobufjs/fetch": "^1.1.0", + "@protobufjs/float": "^1.0.2", + "@protobufjs/inquire": "^1.1.0", + "@protobufjs/path": "^1.1.2", + "@protobufjs/pool": "^1.1.0", + "@protobufjs/utf8": "^1.1.0", + "@types/long": "^4.0.0", + "@types/node": "^10.1.0", + "long": "^4.0.0" + } + }, "semver": { "version": "5.7.1", "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", @@ -881,9 +906,9 @@ } }, "mime": { - "version": "2.4.4", - "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.4.tgz", - "integrity": "sha512-LRxmNwziLPT828z+4YkNzloCFC2YM4wrB99k+AV5ZbEyfGNWfG8SO1FUXLmLDBSo89NrJZ4DIWeLjy1CHGhMGA==" + "version": "2.4.6", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.6.tgz", + "integrity": "sha512-RZKhC3EmpBchfTGBVb8fb+RL2cWyw/32lshnsETttkBAyAUXSGHxbEJWWRXc751DrIxG1q04b8QwMbAwkRPpUA==" }, "ms": { "version": "2.1.2", @@ -939,6 +964,13 @@ "semver": "^6.0.0", "shimmer": "^1.2.0", "uuid": "^3.2.1" + }, + "dependencies": { + "uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" + } } }, "@opencensus/propagation-stackdriver": { @@ -949,6 +981,13 @@ "@opencensus/core": "^0.0.20", "hex2dec": "^1.0.1", "uuid": "^3.2.1" + }, + "dependencies": { + "uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" + } } }, "@overleaf/o-error": { @@ -956,6 +995,210 @@ "resolved": "https://registry.npmjs.org/@overleaf/o-error/-/o-error-3.0.0.tgz", "integrity": "sha512-LsM2s6Iy9G97ktPo0ys4VxtI/m3ahc1ZHwjo5XnhXtjeIkkkVAehsrcRRoV/yWepPjymB0oZonhcfojpjYR/tg==" }, + "@overleaf/object-persistor": { + "version": "git+https://github.com/overleaf/object-persistor.git#923c26a04dfeb9e79caaa5133394678253b5d006", + "from": "git+https://github.com/overleaf/object-persistor.git", + "requires": { + "@google-cloud/storage": "^5.1.1", + "@overleaf/o-error": "^3.0.0", + "aws-sdk": "^2.710.0", + "fast-crc32c": "^2.0.0", + "glob": "^7.1.6", + "logger-sharelatex": "^2.1.1", + "node-uuid": "^1.4.8", + "range-parser": "^1.2.1", + "tiny-async-pool": "^1.1.0" + }, + "dependencies": { + "@google-cloud/common": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-3.3.1.tgz", + "integrity": "sha512-bJamcNvZ2j5xS01uFBT1GqfHIKrtwpyUhIU/Xn3uwMZkK/t6JA3mlID0wuZlo7XjbjFSRT2iLBEmDWv9T2hP8g==", + "requires": { + "@google-cloud/projectify": "^2.0.0", + "@google-cloud/promisify": "^2.0.0", + "arrify": "^2.0.1", + "duplexify": "^4.1.1", + "ent": "^2.2.0", + "extend": "^3.0.2", + "google-auth-library": "^6.0.0", + "retry-request": "^4.1.1", + "teeny-request": "^7.0.0" + }, + "dependencies": { + "duplexify": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", + "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", + "requires": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.0" + } + } + } + }, + "@google-cloud/paginator": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-3.0.1.tgz", + "integrity": "sha512-ykqRmHRg6rcIZTE+JjUMNBKOQ8uvmbVrhY//lTxZgf5QBPbZW3PoN7VK+D43yCaRJJjRmmWsaG5YdxLR6h0n0A==", + "requires": { + "arrify": "^2.0.0", + "extend": "^3.0.2" + } + }, + "@google-cloud/projectify": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-2.0.0.tgz", + "integrity": "sha512-7wZ+m4N3Imtb5afOPfqNFyj9cKrlfVQ+t5YRxLS7tUpn8Pn/i7QuVubZRTXllaWjO4T5t/gm/r2x7oy5ajjvFQ==" + }, + "@google-cloud/promisify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-2.0.1.tgz", + "integrity": "sha512-82EQzwrNauw1fkbUSr3f+50Bcq7g4h0XvLOk8C5e9ABkXYHei7ZPi9tiMMD7Vh3SfcdH97d1ibJ3KBWp2o1J+w==" + }, + "@google-cloud/storage": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-5.1.1.tgz", + "integrity": "sha512-w/64V+eJl+vpYUXT15sBcO8pX0KTmb9Ni2ZNuQQ8HmyhAbEA3//G8JFaLPCXGBWO2/b0OQZytUT6q2wII9a9aQ==", + "requires": { + "@google-cloud/common": "^3.0.0", + "@google-cloud/paginator": "^3.0.0", + "@google-cloud/promisify": "^2.0.0", + "arrify": "^2.0.0", + "compressible": "^2.0.12", + "concat-stream": "^2.0.0", + "date-and-time": "^0.13.0", + "duplexify": "^3.5.0", + "extend": "^3.0.2", + "gaxios": "^3.0.0", + "gcs-resumable-upload": "^3.0.0", + "hash-stream-validation": "^0.2.2", + "mime": "^2.2.0", + "mime-types": "^2.0.8", + "onetime": "^5.1.0", + "p-limit": "^3.0.1", + "pumpify": "^2.0.0", + "readable-stream": "^3.4.0", + "snakeize": "^0.1.0", + "stream-events": "^1.0.1", + "through2": "^3.0.0", + "xdg-basedir": "^4.0.0" + } + }, + "gaxios": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-3.0.3.tgz", + "integrity": "sha512-PkzQludeIFhd535/yucALT/Wxyj/y2zLyrMwPcJmnLHDugmV49NvAi/vb+VUq/eWztATZCNcb8ue+ywPG+oLuw==", + "requires": { + "abort-controller": "^3.0.0", + "extend": "^3.0.2", + "https-proxy-agent": "^5.0.0", + "is-stream": "^2.0.0", + "node-fetch": "^2.3.0" + } + }, + "gcp-metadata": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.1.0.tgz", + "integrity": "sha512-r57SV28+olVsflPlKyVig3Muo/VDlcsObMtvDGOEtEJXj+DDE8bEl0coIkXh//hbkSDTvo+f5lbihZOndYXQQQ==", + "requires": { + "gaxios": "^3.0.0", + "json-bigint": "^0.3.0" + } + }, + "gcs-resumable-upload": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/gcs-resumable-upload/-/gcs-resumable-upload-3.1.0.tgz", + "integrity": "sha512-gB8xH6EjYCv9lfBEL4FK5+AMgTY0feYoNHAYOV5nCuOrDPhy5MOiyJE8WosgxhbKBPS361H7fkwv6CTufEh9bg==", + "requires": { + "abort-controller": "^3.0.0", + "configstore": "^5.0.0", + "extend": "^3.0.2", + "gaxios": "^3.0.0", + "google-auth-library": "^6.0.0", + "pumpify": "^2.0.0", + "stream-events": "^1.0.4" + } + }, + "google-auth-library": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.0.3.tgz", + "integrity": "sha512-2Np6ojPmaJGXHSMsBhtTQEKfSMdLc8hefoihv7N2cwEr8E5bq39fhoat6TcXHwa0XoGO5Guh9sp3nxHFPmibMw==", + "requires": { + "arrify": "^2.0.0", + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "fast-text-encoding": "^1.0.0", + "gaxios": "^3.0.0", + "gcp-metadata": "^4.1.0", + "gtoken": "^5.0.0", + "jws": "^4.0.0", + "lru-cache": "^5.0.0" + } + }, + "google-p12-pem": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.1.tgz", + "integrity": "sha512-VlQgtozgNVVVcYTXS36eQz4PXPt9gIPqLOhHN0QiV6W6h4qSCNVKPtKC5INtJsaHHF2r7+nOIa26MJeJMTaZEQ==", + "requires": { + "node-forge": "^0.9.0" + } + }, + "gtoken": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.0.1.tgz", + "integrity": "sha512-33w4FNDkUcyIOq/TqyC+drnKdI4PdXmWp9lZzssyEQKuvu9ZFN3KttaSnDKo52U3E51oujVGop93mKxmqO8HHg==", + "requires": { + "gaxios": "^3.0.0", + "google-p12-pem": "^3.0.0", + "jws": "^4.0.0", + "mime": "^2.2.0" + } + }, + "mime": { + "version": "2.4.6", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.6.tgz", + "integrity": "sha512-RZKhC3EmpBchfTGBVb8fb+RL2cWyw/32lshnsETttkBAyAUXSGHxbEJWWRXc751DrIxG1q04b8QwMbAwkRPpUA==" + }, + "p-limit": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.0.1.tgz", + "integrity": "sha512-mw/p92EyOzl2MhauKodw54Rx5ZK4624rNfgNaBguFZkHzyUG9WsDzFF5/yQVEJinbJDdP4jEfMN+uBquiGnaLg==", + "requires": { + "p-try": "^2.0.0" + } + }, + "readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + }, + "teeny-request": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-7.0.0.tgz", + "integrity": "sha512-kWD3sdGmIix6w7c8ZdVKxWq+3YwVPGWz+Mq0wRZXayEKY/YHb63b8uphfBzcFDmyq8frD9+UTc3wLyOhltRbtg==", + "requires": { + "http-proxy-agent": "^4.0.0", + "https-proxy-agent": "^5.0.0", + "node-fetch": "^2.2.0", + "stream-events": "^1.0.5", + "uuid": "^8.0.0" + } + }, + "uuid": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.2.0.tgz", + "integrity": "sha512-CYpGiFTUrmI6OBMkAdjSDM0k5h8SkkiTP4WAjQgDgNB1S3Ou9VBEvr6q0Kv2H1mMk7IWfxYGpMH5sd5AvcIV2Q==" + } + } + }, "@protobufjs/aspromise": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", @@ -1016,9 +1259,9 @@ "integrity": "sha512-lu8BpxjAtRCAo5ifytTpCPCj99LF7o/2Myn+NXyNCBqvPYn7Pjd76AMmUB5l7XF1U6t0hcWrlEM5ESufW7wAeA==" }, "@sinonjs/commons": { - "version": "1.7.1", - "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-1.7.1.tgz", - "integrity": "sha512-Debi3Baff1Qu1Unc3mjJ96MgpbwTn43S1+9yJ0llWygPwDNu2aaWBD6yc9y/Z8XDRNhx7U+u2UDg2OGQXkclUQ==", + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-1.8.0.tgz", + "integrity": "sha512-wEj54PfsZ5jGSwMX68G8ZXFawcSglQSXqCftWX3ec8MDUzQdHgcKvw97awHbY0efQEL5iKUOAmmVtoYgmrSG4Q==", "dev": true, "requires": { "type-detect": "4.0.8" @@ -1098,9 +1341,15 @@ } }, "@types/json-schema": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.4.tgz", - "integrity": "sha512-8+KAKzEvSUdeo+kmqnKrqgeE+LcA0tjYWFY7RPProVYwnqDjukzO+3b6dLD56rYX5TdWejnEOLJYOIeh4CXKuA==", + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.5.tgz", + "integrity": "sha512-7+2BITlgjgDhH0vvwZU/HZJVyk+2XUlvxXe8dFMedNX/aMkaOq++rMAFXc0tM7ij15QaWlbdQASBR9dihi+bDQ==", + "dev": true + }, + "@types/json5": { + "version": "0.0.29", + "resolved": "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz", + "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==", "dev": true }, "@types/long": { @@ -1109,14 +1358,14 @@ "integrity": "sha512-5tXH6Bx/kNGd3MgffdmP4dy2Z+G4eaXw0SE81Tq3BNadtnMR5/ySMzX4SLEzHJzSmPNn4HIdpQsBvXMUykr58w==" }, "@types/node": { - "version": "10.17.16", - "resolved": "https://registry.npmjs.org/@types/node/-/node-10.17.16.tgz", - "integrity": "sha512-A4283YSA1OmnIivcpy/4nN86YlnKRiQp8PYwI2KdPCONEBN093QTb0gCtERtkLyVNGKKIGazTZ2nAmVzQU51zA==" + "version": "13.13.13", + "resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.13.tgz", + "integrity": "sha512-UfvBE9oRCAJVzfR+3eWm/sdLFe/qroAPEXP3GPJ1SehQiEVgZT6NQZWYbPMiJ3UdcKM06v4j+S1lTcdWCmw+3g==" }, "@types/request": { - "version": "2.48.4", - "resolved": "https://registry.npmjs.org/@types/request/-/request-2.48.4.tgz", - "integrity": "sha512-W1t1MTKYR8PxICH+A4HgEIPuAC3sbljoEVfyZbeFJJDbr30guDspJri2XOaM2E+Un7ZjrihaDi7cf6fPa2tbgw==", + "version": "2.48.5", + "resolved": "https://registry.npmjs.org/@types/request/-/request-2.48.5.tgz", + "integrity": "sha512-/LO7xRVnL3DxJ1WkPGDQrp4VTV1reX9RkC85mJ+Qzykj2Bdw+mG15aAfDahc76HtknjzE16SX/Yddn6MxVbmGQ==", "requires": { "@types/caseless": "*", "@types/node": "*", @@ -1142,9 +1391,9 @@ "integrity": "sha512-41qEJgBH/TWgo5NFSvBCJ1qkoi3Q6ONSF2avrHq1LVEZfYpdHmj0y9SuTK+u9ZhG1sYQKBL1AWXKyLWP4RaUoQ==" }, "@types/tough-cookie": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-2.3.6.tgz", - "integrity": "sha512-wHNBMnkoEBiRAd3s8KTKwIuO9biFtTf0LehITzBhSco+HQI0xkXZbLOD55SW3Aqw3oUkHstkm5SPv58yaAdFPQ==" + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.0.tgz", + "integrity": "sha512-I99sngh224D0M7XgW1s120zxCt3VYQ3IQsuw3P3jbq5GG4yc79+ZjyKznyOGIQrflfylLgcfekeZW/vk0yng6A==" }, "@typescript-eslint/experimental-utils": { "version": "1.13.0", @@ -1396,11 +1645,11 @@ "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" }, "aws-sdk": { - "version": "2.648.0", - "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.648.0.tgz", - "integrity": "sha512-b+PdZmCFvZBisqXEH68jO4xB30LrDHQMWrEX6MJoZaOlxPJfpOqRFUH3zsiAXF5Q2jTdjYLtS5bs3vcIwRzi3Q==", + "version": "2.710.0", + "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.710.0.tgz", + "integrity": "sha512-GQTPH0DzJMpvvtZ3VO+grkKVdL/nqjWsIfcVf1c3oedvEjW24wSXQEs6KWAGbpG2jbHsYKH7kZ4XXuq428LVAw==", "requires": { - "buffer": "4.9.1", + "buffer": "4.9.2", "events": "1.1.1", "ieee754": "1.1.13", "jmespath": "0.15.0", @@ -1475,9 +1724,9 @@ "integrity": "sha512-S4XzBk5sMB+Rcb/LNcpzXr57VRTxgAvaAEDAl1AwRx27j00hT84O6OkteE7u8UB3NuaaygCRrEpqox4uDOrbdQ==" }, "binary-extensions": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.0.0.tgz", - "integrity": "sha512-Phlt0plgpIIBOGTT/ehfFnbNlfsDEiqmzE2KRXoX1bLIlir4X/MR+zSyBEkL05ffWgnRSf/DXv+WrUAVr93/ow==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.1.0.tgz", + "integrity": "sha512-1Yj8h9Q+QDF5FzhMs/c9+6UntbD5MkRfRwac8DoEm9ZfUBZ7tZ55YcGVAzEe4bXsdQHEk+s9S5wsOKVdZrw0tQ==", "dev": true }, "bindings": { @@ -1557,9 +1806,9 @@ "dev": true }, "buffer": { - "version": "4.9.1", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.1.tgz", - "integrity": "sha512-DNK4ruAqtyHaN8Zne7PkBTO+dD1Lr0YfTduMqlIyjvQIoztBkUxrvL+hKeLW8NXFKHOq/2upkxuoS9znQ9bW9A==", + "version": "4.9.2", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.2.tgz", + "integrity": "sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg==", "requires": { "base64-js": "^1.0.2", "ieee754": "^1.1.4", @@ -1582,13 +1831,13 @@ "integrity": "sha512-k0KL0aWZuBt2lrxrcASWDfwOLMnodeQjodT/1SxEQAXsHANgo6ZC/VEaSEHCXt7aSTZ4/4H5LKa+tBXmW7Vtvw==" }, "bunyan": { - "version": "1.8.12", - "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.12.tgz", - "integrity": "sha512-dmDUbGHeGcvCDLRFOscZkwx1ZO/aFz3bJOCi5nCgzdhFGPxwK+y5AcDBnqagNGlJZ7lje/l6JUEz9mQcutttdg==", + "version": "1.8.14", + "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.14.tgz", + "integrity": "sha512-LlahJUxXzZLuw/hetUQJmRgZ1LF6+cr5TPpRj6jf327AsiIq2jhYEH4oqUUkVKTor+9w2BT3oxVwhzE5lw9tcg==", "dev": true, "requires": { "dtrace-provider": "~0.8", - "moment": "^2.10.6", + "moment": "^2.19.3", "mv": "~2", "safe-json-stringify": "~1" } @@ -2113,22 +2362,22 @@ } }, "es-abstract": { - "version": "1.17.5", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.5.tgz", - "integrity": "sha512-BR9auzDbySxOcfog0tLECW8l28eRGpDpU3Dm3Hp4q/N+VtLTmyj4EUN088XZWQDW/hzj6sYRDXeOFsaAODKvpg==", + "version": "1.17.6", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.6.tgz", + "integrity": "sha512-Fr89bON3WFyUi5EvAeI48QTWX0AyekGgLA8H+c+7fbfCkJwRWRMLd8CQedNEyJuoYYhmtEqY92pgte1FAhBlhw==", "dev": true, "requires": { "es-to-primitive": "^1.2.1", "function-bind": "^1.1.1", "has": "^1.0.3", "has-symbols": "^1.0.1", - "is-callable": "^1.1.5", - "is-regex": "^1.0.5", + "is-callable": "^1.2.0", + "is-regex": "^1.1.0", "object-inspect": "^1.7.0", "object-keys": "^1.1.1", "object.assign": "^4.1.0", - "string.prototype.trimleft": "^2.1.1", - "string.prototype.trimright": "^2.1.1" + "string.prototype.trimend": "^1.0.1", + "string.prototype.trimstart": "^1.0.1" } }, "es-to-primitive": { @@ -2244,9 +2493,9 @@ } }, "eslint-config-prettier": { - "version": "6.10.1", - "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-6.10.1.tgz", - "integrity": "sha512-svTy6zh1ecQojvpbJSgH3aei/Rt7C6i090l5f2WQ4aB05lYHeZIR1qL4wZyyILTbtmnbHP5Yn8MrsOJMGa8RkQ==", + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-6.11.0.tgz", + "integrity": "sha512-oB8cpLWSAjOVFEJhhyMZh6NOEOtBVziaqdDQ86+qhDHFbZXoRTM7pNSvFRfW/W/L/LrQ38C99J5CGuRBBzBsdA==", "dev": true, "requires": { "get-stdin": "^6.0.0" @@ -2259,9 +2508,9 @@ "dev": true }, "eslint-import-resolver-node": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.3.tgz", - "integrity": "sha512-b8crLDo0M5RSe5YG8Pu2DYBj71tSB6OvXkfzwbJU2w7y8P4/yo0MyF8jU26IEuEuHF2K5/gcAJE3LhQGqBBbVg==", + "version": "0.3.4", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.4.tgz", + "integrity": "sha512-ogtf+5AB/O+nM6DIeBUNr2fuT7ot9Qg/1harBfBtaP13ekEWFQEEMP94BCB7zaNW3gyY+8SHYF00rnqYwXKWOA==", "dev": true, "requires": { "debug": "^2.6.9", @@ -2285,9 +2534,9 @@ "dev": true }, "eslint-plugin-chai-friendly": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-chai-friendly/-/eslint-plugin-chai-friendly-0.5.0.tgz", - "integrity": "sha512-Pxe6z8C9fP0pn2X2nGFU/b3GBOCM/5FVus1hsMwJsXP3R7RiXFl7g0ksJbsc0GxiLyidTW4mEFk77qsNn7Tk7g==", + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-chai-friendly/-/eslint-plugin-chai-friendly-0.6.0.tgz", + "integrity": "sha512-Uvvv1gkbRGp/qfN15B0kQyQWg+oFA8buDSqrwmW3egNSk/FpqH2MjQqKOuKwmEL6w4QIQrIjDp+gg6kGGmD3oQ==", "dev": true }, "eslint-plugin-es": { @@ -2318,23 +2567,24 @@ } }, "eslint-plugin-import": { - "version": "2.20.2", - "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.20.2.tgz", - "integrity": "sha512-FObidqpXrR8OnCh4iNsxy+WACztJLXAHBO5hK79T1Hc77PgQZkyDGA5Ag9xAvRpglvLNxhH/zSmZ70/pZ31dHg==", + "version": "2.22.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.22.0.tgz", + "integrity": "sha512-66Fpf1Ln6aIS5Gr/55ts19eUuoDhAbZgnr6UxK5hbDx6l/QgQgx61AePq+BV4PP2uXQFClgMVzep5zZ94qqsxg==", "dev": true, "requires": { - "array-includes": "^3.0.3", - "array.prototype.flat": "^1.2.1", + "array-includes": "^3.1.1", + "array.prototype.flat": "^1.2.3", "contains-path": "^0.1.0", "debug": "^2.6.9", "doctrine": "1.5.0", - "eslint-import-resolver-node": "^0.3.2", - "eslint-module-utils": "^2.4.1", + "eslint-import-resolver-node": "^0.3.3", + "eslint-module-utils": "^2.6.0", "has": "^1.0.3", "minimatch": "^3.0.4", - "object.values": "^1.1.0", + "object.values": "^1.1.1", "read-pkg-up": "^2.0.0", - "resolve": "^1.12.0" + "resolve": "^1.17.0", + "tsconfig-paths": "^3.9.0" }, "dependencies": { "doctrine": { @@ -2346,6 +2596,15 @@ "esutils": "^2.0.2", "isarray": "^1.0.0" } + }, + "resolve": { + "version": "1.17.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.17.0.tgz", + "integrity": "sha512-ic+7JYiV8Vi2yzQGFWOkiZD5Z9z7O2Zhm9XMaTxdJExKasieFCr+yXZ/WmXsckHiKl12ar0y6XiXDx3m4RHn1w==", + "dev": true, + "requires": { + "path-parse": "^1.0.6" + } } } }, @@ -2402,9 +2661,9 @@ } }, "eslint-plugin-prettier": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-3.1.2.tgz", - "integrity": "sha512-GlolCC9y3XZfv3RQfwGew7NnuFDKsfI4lbvRK+PIIo23SFH+LemGs4cKwzAaRa+Mdb+lQO/STaIayno8T5sJJA==", + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-3.1.4.tgz", + "integrity": "sha512-jZDa8z76klRqo+TdGDTFJSavwbnWK2ZpqGKNZ+VvweMW516pDUMmQ2koXvxEE4JhzNvTv+radye/bWGBmA6jmg==", "dev": true, "requires": { "prettier-linter-helpers": "^1.0.0" @@ -2519,6 +2778,13 @@ "requires": { "d64": "^1.0.0", "uuid": "^3.0.1" + }, + "dependencies": { + "uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" + } } }, "events": { @@ -2765,9 +3031,9 @@ "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" }, "fsevents": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.1.2.tgz", - "integrity": "sha512-R4wDiBwZ0KzpgOWetKDug1FZcYhqYnUYKtfZYt4mD5SBz76q0KR4Q9o7GIPamsVPGmW3EYPPJ0dOOjvx32ldZA==", + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.1.3.tgz", + "integrity": "sha512-Auw9a4AxqWpa9GUfj370BMPzzyncfBABW8Mab7BGWBYDj4Isgq+cDKtx0i6u9jcX9pQDnswsaaOTgTmA5pEjuQ==", "dev": true, "optional": true }, @@ -2907,33 +3173,6 @@ "retry-request": "^4.0.0", "semver": "^6.0.0", "walkdir": "^0.4.0" - }, - "dependencies": { - "@types/node": { - "version": "13.13.12", - "resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.12.tgz", - "integrity": "sha512-zWz/8NEPxoXNT9YyF2osqyA9WjssZukYpgI4UYZpOjcyqwIUqWGkcCionaEb9Ki+FULyPyvNFpg/329Kd2/pbw==" - }, - "protobufjs": { - "version": "6.9.0", - "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.9.0.tgz", - "integrity": "sha512-LlGVfEWDXoI/STstRDdZZKb/qusoAWUnmLg9R8OLSO473mBLWHowx8clbX5/+mKDEI+v7GzjoK9tRPZMMcoTrg==", - "requires": { - "@protobufjs/aspromise": "^1.1.2", - "@protobufjs/base64": "^1.1.2", - "@protobufjs/codegen": "^2.0.4", - "@protobufjs/eventemitter": "^1.1.0", - "@protobufjs/fetch": "^1.1.0", - "@protobufjs/float": "^1.0.2", - "@protobufjs/inquire": "^1.1.0", - "@protobufjs/path": "^1.1.2", - "@protobufjs/pool": "^1.1.0", - "@protobufjs/utf8": "^1.1.0", - "@types/long": "^4.0.1", - "@types/node": "^13.7.0", - "long": "^4.0.0" - } - } } }, "google-p12-pem": { @@ -3238,9 +3477,9 @@ "integrity": "sha512-Kq1rokWXOPXWuaMAqZiJW4XxsmD9zGx9q4aePabbn3qCRGedtH7Cm+zV8WETitMfu1wdh+Rvd6w5egwSngUX2A==" }, "is-callable": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.5.tgz", - "integrity": "sha512-ESKv5sMCJB2jnHTWZ3O5itG+O128Hsus4K4Qh1h2/cgn2vbgnLSVqfV46AeJA9D5EeeLa9w81KUXMtn34zhX+Q==", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.0.tgz", + "integrity": "sha512-pyVD9AaGLxtg6srb2Ng6ynWJqkHU9bEM087AKck0w8QwDarTfNcpIYoU8x8Hv2Icm8u6kFJM18Dag8lyqGkviw==", "dev": true }, "is-date-object": { @@ -3288,12 +3527,12 @@ "dev": true }, "is-regex": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.5.tgz", - "integrity": "sha512-vlKW17SNq44owv5AQR3Cq0bQPEb8+kF3UKZ2fiZNOWtztYE5i0CzCZxFDwO58qAOWtxdBRVO/V5Qin1wjCqFYQ==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.0.tgz", + "integrity": "sha512-iI97M8KTWID2la5uYXlkbSDQIg4F6o1sYboZKKTDpnDQMLtUL86zxhgDet3Q2SriaYsyGqZ6Mn2SjbRKeLHdqw==", "dev": true, "requires": { - "has": "^1.0.3" + "has-symbols": "^1.0.1" } }, "is-stream": { @@ -3403,6 +3642,15 @@ "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==" }, + "json5": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.1.tgz", + "integrity": "sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==", + "dev": true, + "requires": { + "minimist": "^1.2.0" + } + }, "jsprim": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", @@ -3569,11 +3817,6 @@ "mv": "~2", "safe-json-stringify": "~1" } - }, - "yn": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yn/-/yn-4.0.0.tgz", - "integrity": "sha512-huWiiCS4TxKc4SfgmTwW1K7JmXPPAmuXWYy4j9qjQo4+27Kni8mGhAAi1cloRWmBe2EqcLgt3IGqQoRL/MtPgg==" } } }, @@ -3756,9 +3999,9 @@ "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==" }, "metrics-sharelatex": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/metrics-sharelatex/-/metrics-sharelatex-2.6.2.tgz", - "integrity": "sha512-bOLfkSCexiPgB96hdXhoOWyvvrwscgjeZPEqdcJ7BTGxY59anzvymNf5hTGJ1RtS4sblDKxITw3L5a+gYKhRYQ==", + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/metrics-sharelatex/-/metrics-sharelatex-2.7.0.tgz", + "integrity": "sha512-fv7TsM2pVhY/VuHqP5p9Ip2BhHfGPrP0e2TuZGCMK9/J83ZuGCrxkpaU3OOz31CWcmcQ5am4Du70EZAaIdA/2A==", "requires": { "@google-cloud/debug-agent": "^3.0.0", "@google-cloud/profiler": "^0.2.3", @@ -3768,6 +4011,13 @@ "prom-client": "^11.1.3", "underscore": "~1.6.0", "yn": "^3.1.1" + }, + "dependencies": { + "yn": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", + "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==" + } } }, "mime": { @@ -3815,9 +4065,9 @@ } }, "mocha": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/mocha/-/mocha-7.1.1.tgz", - "integrity": "sha512-3qQsu3ijNS3GkWcccT5Zw0hf/rWvu1fTN9sPvEd81hlwsr30GX2GcDSSoBxo24IR8FelmrAydGC6/1J5QQP4WA==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-7.2.0.tgz", + "integrity": "sha512-O9CIypScywTVpNaRrCAgoUnJgozpIofjKUYmJhiCIJMiuYnLI6otcb1/kpW9/n/tJODHGZ7i8aLQoDVsMtOKQQ==", "dev": true, "requires": { "ansi-colors": "3.2.3", @@ -3833,7 +4083,7 @@ "js-yaml": "3.13.1", "log-symbols": "3.0.0", "minimatch": "3.0.4", - "mkdirp": "0.5.3", + "mkdirp": "0.5.5", "ms": "2.1.1", "node-environment-flags": "1.0.6", "object.assign": "4.1.0", @@ -3900,21 +4150,6 @@ "path-exists": "^3.0.0" } }, - "minimist": { - "version": "1.2.5", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", - "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==", - "dev": true - }, - "mkdirp": { - "version": "0.5.3", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.3.tgz", - "integrity": "sha512-P+2gwrFqx8lhew375MQHHeTlY8AuOJSrGf0R5ddkEndUkmwpgUob/vQuBD1V22/Cw1/lJr4x+EjllSezBThzBg==", - "dev": true, - "requires": { - "minimist": "^1.2.5" - } - }, "ms": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz", @@ -3973,16 +4208,6 @@ "y18n": "^4.0.0", "yargs-parser": "^13.1.2" } - }, - "yargs-parser": { - "version": "13.1.2", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-13.1.2.tgz", - "integrity": "sha512-3lbsNRf/j+A4QuSZfDRA7HRSfWrzO0YjqTJd5kjAq37Zep1CEgaYmrH9Q3GwPiB9cHyd1Y1UwggGhJGoxipbzg==", - "dev": true, - "requires": { - "camelcase": "^5.0.0", - "decamelize": "^1.2.0" - } } } }, @@ -3998,13 +4223,13 @@ "optional": true }, "mongodb": { - "version": "3.5.5", - "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-3.5.5.tgz", - "integrity": "sha512-GCjDxR3UOltDq00Zcpzql6dQo1sVry60OXJY3TDmFc2SWFY6c8Gn1Ardidc5jDirvJrx2GC3knGOImKphbSL3A==", + "version": "3.5.9", + "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-3.5.9.tgz", + "integrity": "sha512-vXHBY1CsGYcEPoVWhwgxIBeWqP3dSu9RuRDsoLRPTITrcrgm1f0Ubu1xqF9ozMwv53agmEiZm0YGo+7WL3Nbug==", "dev": true, "requires": { "bl": "^2.2.0", - "bson": "^1.1.1", + "bson": "^1.1.4", "denque": "^1.4.1", "require_optional": "^1.0.1", "safe-buffer": "^5.1.2", @@ -4086,9 +4311,9 @@ "dev": true }, "nise": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/nise/-/nise-4.0.3.tgz", - "integrity": "sha512-EGlhjm7/4KvmmE6B/UFsKh7eHykRl9VH+au8dduHLCyWUO/hr7+N+WtTvDUwc9zHuM1IaIJs/0lQ6Ag1jDkQSg==", + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/nise/-/nise-4.0.4.tgz", + "integrity": "sha512-bTTRUNlemx6deJa+ZyoCUTRvH3liK5+N6VQZ4NIw90AgDXY6iPnsqplNFf6STcj+ePk0H/xqxnP75Lr0J0Fq3A==", "dev": true, "requires": { "@sinonjs/commons": "^1.7.0", @@ -4186,9 +4411,9 @@ "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==" }, "object-inspect": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.7.0.tgz", - "integrity": "sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw==", + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.8.0.tgz", + "integrity": "sha512-jLdtEOB112fORuypAyl/50VRVIBIdVQOSUUGQHzJ4xBSbit81zRarz7GThkEFZy1RceYrWYcPcBFPQwHyAc1gA==", "dev": true }, "object-keys": { @@ -4324,9 +4549,9 @@ } }, "parse-duration": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/parse-duration/-/parse-duration-0.1.2.tgz", - "integrity": "sha512-0qfMZyjOUFBeEIvJ5EayfXJqaEXxQ+Oj2b7tWJM3hvEXvXsYCk05EDVI23oYnEw2NaFYUWdABEVPBvBMh8L/pA==" + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/parse-duration/-/parse-duration-0.1.3.tgz", + "integrity": "sha512-hMOZHfUmjxO5hMKn7Eft+ckP2M4nV4yzauLXiw3PndpkASnx5r8pDAMcOAiqxoemqWjMWmz4fOHQM6n6WwETXw==" }, "parse-json": { "version": "2.2.0", @@ -4441,9 +4666,9 @@ "dev": true }, "prettier-eslint": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/prettier-eslint/-/prettier-eslint-9.0.1.tgz", - "integrity": "sha512-KZT65QTosSAqBBqmrC+RpXbsMRe7Os2YSR9cAfFbDlyPAopzA/S5bioiZ3rpziNQNSJaOxmtXSx07EQ+o2Dlug==", + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/prettier-eslint/-/prettier-eslint-9.0.2.tgz", + "integrity": "sha512-u6EQqxUhaGfra9gy9shcR7MT7r/2twwEfRGy1tfzyaJvLQwSg34M9IU5HuF7FsLW2QUgr5VIUc56EPWibw1pdw==", "dev": true, "requires": { "@typescript-eslint/parser": "^1.10.2", @@ -5031,9 +5256,9 @@ } }, "protobufjs": { - "version": "6.8.8", - "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.8.8.tgz", - "integrity": "sha512-AAmHtD5pXgZfi7GMpllpO3q1Xw1OYldr+dMUlAnffGTAhqkg72WdmSY71uKBF/JuyiKs8psYbtKrhi0ASCD8qw==", + "version": "6.9.0", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.9.0.tgz", + "integrity": "sha512-LlGVfEWDXoI/STstRDdZZKb/qusoAWUnmLg9R8OLSO473mBLWHowx8clbX5/+mKDEI+v7GzjoK9tRPZMMcoTrg==", "requires": { "@protobufjs/aspromise": "^1.1.2", "@protobufjs/base64": "^1.1.2", @@ -5045,8 +5270,8 @@ "@protobufjs/path": "^1.1.2", "@protobufjs/pool": "^1.1.0", "@protobufjs/utf8": "^1.1.0", - "@types/long": "^4.0.0", - "@types/node": "^10.1.0", + "@types/long": "^4.0.1", + "@types/node": "^13.7.0", "long": "^4.0.0" } }, @@ -5460,9 +5685,9 @@ "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" }, "sandboxed-module": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/sandboxed-module/-/sandboxed-module-2.0.3.tgz", - "integrity": "sha512-wXiA6ULoGjCDwjn6evQF/Qi+oe77P+aCxizUktLBBKdqNbTxwec4GySJcS+O7iZFhme2ex04m+14KgknKKqFsw==", + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/sandboxed-module/-/sandboxed-module-2.0.4.tgz", + "integrity": "sha512-AwEPOdO8mg/wJjr876yCHP2DHqVN0MaggEXhp6IIf3bcI5cYoQl9QrrCHSrvToHjvdEiS5x4TVZRgjD2bEmNTA==", "dev": true, "requires": { "require-like": "0.1.2", @@ -5572,13 +5797,13 @@ "integrity": "sha512-meQNNykwecVxdu1RlYMKpQx4+wefIYpmxi6gexo/KAbwquJrBUrBmKYJrE8KFkVQAAVWEnwNdu21PgrD77J3xA==" }, "sinon": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/sinon/-/sinon-9.0.1.tgz", - "integrity": "sha512-iTTyiQo5T94jrOx7X7QLBZyucUJ2WvL9J13+96HMfm2CGoJYbIPqRfl6wgNcqmzk0DI28jeGx5bUTXizkrqBmg==", + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/sinon/-/sinon-9.0.2.tgz", + "integrity": "sha512-0uF8Q/QHkizNUmbK3LRFqx5cpTttEVXudywY9Uwzy8bTfZUhljZ7ARzSxnRHWYWtVTeh4Cw+tTb3iU21FQVO9A==", "dev": true, "requires": { - "@sinonjs/commons": "^1.7.0", - "@sinonjs/fake-timers": "^6.0.0", + "@sinonjs/commons": "^1.7.2", + "@sinonjs/fake-timers": "^6.0.1", "@sinonjs/formatio": "^5.0.1", "@sinonjs/samsam": "^5.0.3", "diff": "^4.0.2", @@ -5664,9 +5889,9 @@ } }, "spdx-correct": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.0.tgz", - "integrity": "sha512-lr2EZCctC2BNR7j7WzJ2FpDznxky1sjfxvvYEyzxNyb6lZXHODmEoJeFu4JupYlkfha1KZpJyoqiJ7pgA1qq8Q==", + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.1.tgz", + "integrity": "sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w==", "dev": true, "requires": { "spdx-expression-parse": "^3.0.0", @@ -5674,15 +5899,15 @@ } }, "spdx-exceptions": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.2.0.tgz", - "integrity": "sha512-2XQACfElKi9SlVb1CYadKDXvoajPgBVPn/gOQLrTvHdElaVhr7ZEbqJaRnJLVNeaI4cMEAgVCeBMKF6MWRDCRA==", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz", + "integrity": "sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A==", "dev": true }, "spdx-expression-parse": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.0.tgz", - "integrity": "sha512-Yg6D3XpRD4kkOmTpdgbUiEJFKghJH03fiC1OPll5h/0sO6neh2jqRDVHOQ4o/LMea0tgCkbMgea5ip/e+MkWyg==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz", + "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==", "dev": true, "requires": { "spdx-exceptions": "^2.1.0", @@ -5803,41 +6028,19 @@ } }, "string.prototype.trimend": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.0.tgz", - "integrity": "sha512-EEJnGqa/xNfIg05SxiPSqRS7S9qwDhYts1TSLR1BQfYUfPe1stofgGKvwERK9+9yf+PpfBMlpBaCHucXGPQfUA==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.1.tgz", + "integrity": "sha512-LRPxFUaTtpqYsTeNKaFOw3R4bxIzWOnbQ837QfBylo8jIxtcbK/A/sMV7Q+OAV/vWo+7s25pOE10KYSjaSO06g==", "dev": true, "requires": { "define-properties": "^1.1.3", "es-abstract": "^1.17.5" } }, - "string.prototype.trimleft": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/string.prototype.trimleft/-/string.prototype.trimleft-2.1.2.tgz", - "integrity": "sha512-gCA0tza1JBvqr3bfAIFJGqfdRTyPae82+KTnm3coDXkZN9wnuW3HjGgN386D7hfv5CHQYCI022/rJPVlqXyHSw==", - "dev": true, - "requires": { - "define-properties": "^1.1.3", - "es-abstract": "^1.17.5", - "string.prototype.trimstart": "^1.0.0" - } - }, - "string.prototype.trimright": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/string.prototype.trimright/-/string.prototype.trimright-2.1.2.tgz", - "integrity": "sha512-ZNRQ7sY3KroTaYjRS6EbNiiHrOkjihL9aQE/8gfQ4DtAC/aEBRHFJa44OmoWxGGqXuJlfKkZW4WcXErGr+9ZFg==", - "dev": true, - "requires": { - "define-properties": "^1.1.3", - "es-abstract": "^1.17.5", - "string.prototype.trimend": "^1.0.0" - } - }, "string.prototype.trimstart": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.0.tgz", - "integrity": "sha512-iCP8g01NFYiiBOnwG1Xc3WZLyoo+RuBymwIlWncShXDDJYWN6DbnM3odslBJdgCdRlq94B5s63NWAZlcn2CS4w==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.1.tgz", + "integrity": "sha512-XxZn+QpvrBI1FOcg6dIpxUPgWCPuNXvMD72aaRaUQv1eD4e/Qy8i/hFTe0BUmD60p/QA6bh1avmuPTfNjqVWRw==", "dev": true, "requires": { "define-properties": "^1.1.3", @@ -6071,6 +6274,18 @@ } } }, + "tsconfig-paths": { + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.9.0.tgz", + "integrity": "sha512-dRcuzokWhajtZWkQsDVKbWyY+jgcLC5sqJhg2PSgf4ZkH2aHPvaOY8YWGhmjb68b5qqTfasSsDO9k7RUiEmZAw==", + "dev": true, + "requires": { + "@types/json5": "^0.0.29", + "json5": "^1.0.1", + "minimist": "^1.2.0", + "strip-bom": "^3.0.0" + } + }, "tslib": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.11.1.tgz", @@ -6133,9 +6348,9 @@ } }, "typescript": { - "version": "3.8.2", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.8.2.tgz", - "integrity": "sha512-EgOVgL/4xfVrCMbhYKUQTdF37SQn4Iw73H5BgCrF1Abdun7Kwy/QZsE/ssAy0y4LxBbvua3PIbFsbRczWWnDdQ==", + "version": "3.9.6", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.6.tgz", + "integrity": "sha512-Pspx3oKAPJtjNwE92YS05HQoY7z2SFyOpHo9MqJor3BXAGNaPUs83CuVp9VISFkSjyRfiTpmKuAYGJB7S7hOxw==", "dev": true }, "underscore": { @@ -6190,11 +6405,6 @@ "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", "integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==" }, - "uuid": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", - "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" - }, "v8-compile-cache": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.1.0.tgz", @@ -6241,9 +6451,9 @@ }, "dependencies": { "acorn": { - "version": "5.7.3", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-5.7.3.tgz", - "integrity": "sha512-T/zvzYRfbVojPWahDsE5evJdHb3oJoQfFbsrKM7w5Zcs++Tr257tia3BmMP8XYVjp1S9RZXQMh7gao96BlqZOw==", + "version": "5.7.4", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-5.7.4.tgz", + "integrity": "sha512-1D++VG7BhrtvQpNbBzovKNc1FLGGEE/oGe7b9xJm/RFHMBeUaUGpluV9RLjZa47YFdPcDAenEYuq9pQPcMdLJg==", "dev": true }, "acorn-jsx": { @@ -6562,9 +6772,9 @@ } }, "yn": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", - "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==" + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yn/-/yn-4.0.0.tgz", + "integrity": "sha512-huWiiCS4TxKc4SfgmTwW1K7JmXPPAmuXWYy4j9qjQo4+27Kni8mGhAAi1cloRWmBe2EqcLgt3IGqQoRL/MtPgg==" } } } diff --git a/services/filestore/package.json b/services/filestore/package.json index f8f468cdde..8efec070aa 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -13,7 +13,7 @@ "test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP", "start": "node $NODE_APP_OPTIONS app.js", "nodemon": "nodemon --config nodemon.json", - "lint": "node_modules/.bin/eslint .", + "lint": "node_modules/.bin/eslint app test *.js", "format": "node_modules/.bin/prettier-eslint $PWD'/**/*.js' --list-different", "format:fix": "node_modules/.bin/prettier-eslint $PWD'/**/*.js' --write", "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", @@ -22,45 +22,46 @@ "dependencies": { "@google-cloud/storage": "^4.7.0", "@overleaf/o-error": "^3.0.0", - "aws-sdk": "^2.648.0", + "@overleaf/object-persistor": "git+https://github.com/overleaf/object-persistor.git", + "aws-sdk": "^2.710.0", "body-parser": "^1.19.0", "express": "^4.17.1", "fast-crc32c": "^2.0.0", "glob": "^7.1.6", "lodash.once": "^4.1.1", - "logger-sharelatex": "2.1.1", - "metrics-sharelatex": "^2.6.2", + "logger-sharelatex": "^2.1.1", + "metrics-sharelatex": "^2.7.0", "node-uuid": "~1.4.8", "range-parser": "^1.2.1", "request": "^2.88.2", "request-promise-native": "^1.0.8", "settings-sharelatex": "^1.1.0", - "stream-buffers": "~0.2.5", + "stream-buffers": "~0.2.6", "tiny-async-pool": "^1.1.0" }, "devDependencies": { "babel-eslint": "^10.1.0", - "bunyan": "^1.8.12", + "bunyan": "^1.8.14", "chai": "4.2.0", "chai-as-promised": "^7.1.1", "disrequire": "^1.1.0", "eslint": "^6.8.0", - "eslint-config-prettier": "^6.10.0", - "eslint-config-standard": "^14.1.0", + "eslint-config-prettier": "^6.11.0", + "eslint-config-standard": "^14.1.1", "eslint-plugin-chai-expect": "^2.1.0", - "eslint-plugin-chai-friendly": "^0.5.0", - "eslint-plugin-import": "^2.20.1", + "eslint-plugin-chai-friendly": "^0.6.0", + "eslint-plugin-import": "^2.22.0", "eslint-plugin-mocha": "^6.3.0", - "eslint-plugin-node": "^11.0.0", - "eslint-plugin-prettier": "^3.1.2", + "eslint-plugin-node": "^11.1.0", + "eslint-plugin-prettier": "^3.1.4", "eslint-plugin-promise": "^4.2.1", "eslint-plugin-standard": "^4.0.1", - "mocha": "7.1.1", - "mongodb": "^3.5.5", - "prettier-eslint": "^9.0.1", + "mocha": "7.2.0", + "mongodb": "^3.5.9", + "prettier-eslint": "^9.0.2", "prettier-eslint-cli": "^5.0.0", - "sandboxed-module": "2.0.3", - "sinon": "9.0.1", + "sandboxed-module": "2.0.4", + "sinon": "9.0.2", "sinon-chai": "^3.5.0", "streamifier": "^0.1.1", "timekeeper": "^2.2.0" diff --git a/services/filestore/test/acceptance/js/FilestoreApp.js b/services/filestore/test/acceptance/js/FilestoreApp.js index 6bc4f32719..076198a38b 100644 --- a/services/filestore/test/acceptance/js/FilestoreApp.js +++ b/services/filestore/test/acceptance/js/FilestoreApp.js @@ -114,6 +114,7 @@ class FilestoreApp { disrequire(Path.resolve(__dirname, '../../../app/js', file)) }) disrequire(Path.resolve(__dirname, '../../../app')) + disrequire('@overleaf/object-persistor') return require('../../../app') } diff --git a/services/filestore/test/acceptance/js/FilestoreTests.js b/services/filestore/test/acceptance/js/FilestoreTests.js index 8382a48de5..6f7503410f 100644 --- a/services/filestore/test/acceptance/js/FilestoreTests.js +++ b/services/filestore/test/acceptance/js/FilestoreTests.js @@ -590,7 +590,7 @@ describe('Filestore', function() { describe('when copyOnMiss is disabled', function() { beforeEach(function() { - Settings.filestore.fallback.copyOnMiss = false + app.persistor.settings.copyOnMiss = false }) it('should fetch the file', async function() { @@ -611,7 +611,7 @@ describe('Filestore', function() { describe('when copyOnMiss is enabled', function() { beforeEach(function() { - Settings.filestore.fallback.copyOnMiss = true + app.persistor.settings.copyOnMiss = true }) it('should fetch the file', async function() { @@ -656,7 +656,7 @@ describe('Filestore', function() { describe('when copyOnMiss is false', function() { beforeEach(async function() { - Settings.filestore.fallback.copyOnMiss = false + app.persistor.settings.copyOnMiss = false const response = await rp(opts) expect(response.statusCode).to.equal(200) @@ -702,7 +702,7 @@ describe('Filestore', function() { describe('when copyOnMiss is true', function() { beforeEach(async function() { - Settings.filestore.fallback.copyOnMiss = true + app.persistor.settings.copyOnMiss = true const response = await rp(opts) expect(response.statusCode).to.equal(200) diff --git a/services/filestore/test/acceptance/js/TestHelper.js b/services/filestore/test/acceptance/js/TestHelper.js index df57303de1..935a8b50e7 100644 --- a/services/filestore/test/acceptance/js/TestHelper.js +++ b/services/filestore/test/acceptance/js/TestHelper.js @@ -34,11 +34,11 @@ function streamToString(stream) { async function uploadStringToPersistor(persistor, bucket, key, content) { const fileStream = streamifier.createReadStream(content) - await persistor.promises.sendStream(bucket, key, fileStream) + await persistor.sendStream(bucket, key, fileStream) } async function getStringFromPersistor(persistor, bucket, key) { - const stream = await persistor.promises.getFileStream(bucket, key, {}) + const stream = await persistor.getObjectStream(bucket, key, {}) return streamToString(stream) } diff --git a/services/filestore/test/unit/js/FSPersistorTests.js b/services/filestore/test/unit/js/FSPersistorTests.js deleted file mode 100644 index c8255987ba..0000000000 --- a/services/filestore/test/unit/js/FSPersistorTests.js +++ /dev/null @@ -1,353 +0,0 @@ -const sinon = require('sinon') -const chai = require('chai') -const { expect } = chai -const SandboxedModule = require('sandboxed-module') -const Errors = require('../../../app/js/Errors') -const StreamModule = require('stream') - -chai.use(require('sinon-chai')) -chai.use(require('chai-as-promised')) - -const modulePath = '../../../app/js/FSPersistor.js' - -describe('FSPersistorTests', function() { - const stat = { size: 4, isFile: sinon.stub().returns(true) } - const fd = 1234 - const writeStream = 'writeStream' - const remoteStream = 'remoteStream' - const tempFile = '/tmp/potato.txt' - const location = '/foo' - const error = new Error('guru meditation error') - const md5 = 'ffffffff' - - const files = ['animals/wombat.tex', 'vegetables/potato.tex'] - const globs = [`${location}/${files[0]}`, `${location}/${files[1]}`] - const filteredFilenames = ['animals_wombat.tex', 'vegetables_potato.tex'] - let fs, stream, LocalFileWriter, FSPersistor, glob, readStream, crypto, Hash - - beforeEach(function() { - readStream = { - name: 'readStream', - on: sinon.stub().yields(), - pipe: sinon.stub() - } - fs = { - createReadStream: sinon.stub().returns(readStream), - createWriteStream: sinon.stub().returns(writeStream), - unlink: sinon.stub().yields(), - open: sinon.stub().yields(null, fd), - stat: sinon.stub().yields(null, stat) - } - glob = sinon.stub().yields(null, globs) - stream = { - pipeline: sinon.stub().yields(), - Transform: StreamModule.Transform - } - LocalFileWriter = { - promises: { - writeStream: sinon.stub().resolves(tempFile), - deleteFile: sinon.stub().resolves() - } - } - Hash = { - end: sinon.stub(), - read: sinon.stub().returns(md5), - digest: sinon.stub().returns(md5), - setEncoding: sinon.stub() - } - crypto = { - createHash: sinon.stub().returns(Hash) - } - FSPersistor = SandboxedModule.require(modulePath, { - requires: { - './LocalFileWriter': LocalFileWriter, - './Errors': Errors, - fs, - glob, - stream, - crypto, - // imported by PersistorHelper but otherwise unused here - 'logger-sharelatex': {}, - 'metrics-sharelatex': {} - }, - globals: { console } - }) - }) - - describe('sendFile', function() { - const localFilesystemPath = '/path/to/local/file' - it('should copy the file', async function() { - await FSPersistor.promises.sendFile( - location, - files[0], - localFilesystemPath - ) - expect(fs.createReadStream).to.have.been.calledWith(localFilesystemPath) - expect(fs.createWriteStream).to.have.been.calledWith( - `${location}/${filteredFilenames[0]}` - ) - expect(stream.pipeline).to.have.been.calledWith(readStream, writeStream) - }) - - it('should return an error if the file cannot be stored', async function() { - stream.pipeline.yields(error) - await expect( - FSPersistor.promises.sendFile(location, files[0], localFilesystemPath) - ).to.eventually.be.rejected.and.have.property('cause', error) - }) - }) - - describe('sendStream', function() { - it('should send the stream to LocalFileWriter', async function() { - await FSPersistor.promises.sendStream(location, files[0], remoteStream) - expect(LocalFileWriter.promises.writeStream).to.have.been.calledWith( - remoteStream - ) - }) - - it('should delete the temporary file', async function() { - await FSPersistor.promises.sendStream(location, files[0], remoteStream) - expect(LocalFileWriter.promises.deleteFile).to.have.been.calledWith( - tempFile - ) - }) - - it('should return the error from LocalFileWriter', async function() { - LocalFileWriter.promises.writeStream.rejects(error) - await expect( - FSPersistor.promises.sendStream(location, files[0], remoteStream) - ).to.eventually.be.rejectedWith(error) - }) - - it('should send the temporary file to the filestore', async function() { - await FSPersistor.promises.sendStream(location, files[0], remoteStream) - expect(fs.createReadStream).to.have.been.calledWith(tempFile) - }) - - describe('when the md5 hash does not match', function() { - it('should return a write error', async function() { - await expect( - FSPersistor.promises.sendStream( - location, - files[0], - remoteStream, - '00000000' - ) - ) - .to.eventually.be.rejected.and.be.an.instanceOf(Errors.WriteError) - .and.have.property('message', 'md5 hash mismatch') - }) - - it('deletes the copied file', async function() { - try { - await FSPersistor.promises.sendStream( - location, - files[0], - remoteStream, - '00000000' - ) - } catch (_) {} - expect(LocalFileWriter.promises.deleteFile).to.have.been.calledWith( - `${location}/${filteredFilenames[0]}` - ) - }) - }) - }) - - describe('getFileStream', function() { - it('should use correct file location', async function() { - await FSPersistor.promises.getFileStream(location, files[0], {}) - expect(fs.open).to.have.been.calledWith( - `${location}/${filteredFilenames[0]}` - ) - }) - - it('should pass the options to createReadStream', async function() { - await FSPersistor.promises.getFileStream(location, files[0], { - start: 0, - end: 8 - }) - expect(fs.createReadStream).to.have.been.calledWith(null, { - start: 0, - end: 8, - fd - }) - }) - - it('should give a NotFoundError if the file does not exist', async function() { - const err = new Error() - err.code = 'ENOENT' - fs.open.yields(err) - - await expect(FSPersistor.promises.getFileStream(location, files[0], {})) - .to.eventually.be.rejected.and.be.an.instanceOf(Errors.NotFoundError) - .and.have.property('cause', err) - }) - - it('should wrap any other error', async function() { - fs.open.yields(error) - await expect(FSPersistor.promises.getFileStream(location, files[0], {})) - .to.eventually.be.rejectedWith('failed to open file for streaming') - .and.be.an.instanceOf(Errors.ReadError) - .and.have.property('cause', error) - }) - }) - - describe('getFileSize', function() { - const badFilename = 'neenaw.tex' - const size = 65536 - const noentError = new Error('not found') - noentError.code = 'ENOENT' - - beforeEach(function() { - fs.stat - .yields(error) - .withArgs(`${location}/${filteredFilenames[0]}`) - .yields(null, { size }) - .withArgs(`${location}/${badFilename}`) - .yields(noentError) - }) - - it('should return the file size', async function() { - expect( - await FSPersistor.promises.getFileSize(location, files[0]) - ).to.equal(size) - }) - - it('should throw a NotFoundError if the file does not exist', async function() { - await expect( - FSPersistor.promises.getFileSize(location, badFilename) - ).to.eventually.be.rejected.and.be.an.instanceOf(Errors.NotFoundError) - }) - - it('should wrap any other error', async function() { - await expect(FSPersistor.promises.getFileSize(location, 'raccoon')) - .to.eventually.be.rejected.and.be.an.instanceOf(Errors.ReadError) - .and.have.property('cause', error) - }) - }) - - describe('copyFile', function() { - it('Should open the source for reading', async function() { - await FSPersistor.promises.copyFile(location, files[0], files[1]) - expect(fs.createReadStream).to.have.been.calledWith( - `${location}/${filteredFilenames[0]}` - ) - }) - - it('Should open the target for writing', async function() { - await FSPersistor.promises.copyFile(location, files[0], files[1]) - expect(fs.createWriteStream).to.have.been.calledWith( - `${location}/${filteredFilenames[1]}` - ) - }) - - it('Should pipe the source to the target', async function() { - await FSPersistor.promises.copyFile(location, files[0], files[1]) - expect(stream.pipeline).to.have.been.calledWith(readStream, writeStream) - }) - }) - - describe('deleteFile', function() { - it('Should call unlink with correct options', async function() { - await FSPersistor.promises.deleteFile(location, files[0]) - expect(fs.unlink).to.have.been.calledWith( - `${location}/${filteredFilenames[0]}` - ) - }) - - it('Should propagate the error', async function() { - fs.unlink.yields(error) - await expect( - FSPersistor.promises.deleteFile(location, files[0]) - ).to.eventually.be.rejected.and.have.property('cause', error) - }) - }) - - describe('deleteDirectory', function() { - it('Should call glob with correct options', async function() { - await FSPersistor.promises.deleteDirectory(location, files[0]) - expect(glob).to.have.been.calledWith( - `${location}/${filteredFilenames[0]}*` - ) - }) - - it('Should call unlink on the returned files', async function() { - await FSPersistor.promises.deleteDirectory(location, files[0]) - for (const filename of globs) { - expect(fs.unlink).to.have.been.calledWith(filename) - } - }) - - it('Should propagate the error', async function() { - glob.yields(error) - await expect( - FSPersistor.promises.deleteDirectory(location, files[0]) - ).to.eventually.be.rejected.and.have.property('cause', error) - }) - }) - - describe('checkIfFileExists', function() { - const badFilename = 'pototo' - const noentError = new Error('not found') - noentError.code = 'ENOENT' - - beforeEach(function() { - fs.stat - .yields(error) - .withArgs(`${location}/${filteredFilenames[0]}`) - .yields(null, {}) - .withArgs(`${location}/${badFilename}`) - .yields(noentError) - }) - - it('Should call stat with correct options', async function() { - await FSPersistor.promises.checkIfFileExists(location, files[0]) - expect(fs.stat).to.have.been.calledWith( - `${location}/${filteredFilenames[0]}` - ) - }) - - it('Should return true for existing files', async function() { - expect( - await FSPersistor.promises.checkIfFileExists(location, files[0]) - ).to.equal(true) - }) - - it('Should return false for non-existing files', async function() { - expect( - await FSPersistor.promises.checkIfFileExists(location, badFilename) - ).to.equal(false) - }) - - it('should wrap the error if there is a problem', async function() { - await expect(FSPersistor.promises.checkIfFileExists(location, 'llama')) - .to.eventually.be.rejected.and.be.an.instanceOf(Errors.ReadError) - .and.have.property('cause', error) - }) - }) - - describe('directorySize', function() { - it('should wrap the error', async function() { - glob.yields(error) - await expect(FSPersistor.promises.directorySize(location, files[0])) - .to.eventually.be.rejected.and.be.an.instanceOf(Errors.ReadError) - .and.include({ cause: error }) - .and.have.property('info') - .which.includes({ location, name: files[0] }) - }) - - it('should filter the directory name', async function() { - await FSPersistor.promises.directorySize(location, files[0]) - expect(glob).to.have.been.calledWith( - `${location}/${filteredFilenames[0]}_*` - ) - }) - - it('should sum directory files size', async function() { - expect( - await FSPersistor.promises.directorySize(location, files[0]) - ).to.equal(stat.size * files.length) - }) - }) -}) diff --git a/services/filestore/test/unit/js/FileControllerTests.js b/services/filestore/test/unit/js/FileControllerTests.js index cd1d19ef02..fe446bd478 100644 --- a/services/filestore/test/unit/js/FileControllerTests.js +++ b/services/filestore/test/unit/js/FileControllerTests.js @@ -32,8 +32,8 @@ describe('FileController', function() { beforeEach(function() { PersistorManager = { sendStream: sinon.stub().yields(), - copyFile: sinon.stub().yields(), - deleteFile: sinon.stub().yields() + copyObject: sinon.stub().resolves(), + deleteObject: sinon.stub().yields() } FileHandler = { @@ -216,7 +216,9 @@ describe('FileController', function() { }) it('should return a 404 is the file is not found', function(done) { - FileHandler.getFileSize.yields(new Errors.NotFoundError()) + FileHandler.getFileSize.yields( + new Errors.NotFoundError({ message: 'not found', info: {} }) + ) res.sendStatus = code => { expect(code).to.equal(404) @@ -262,7 +264,7 @@ describe('FileController', function() { it('should send bucket name and both keys to PersistorManager', function(done) { res.sendStatus = code => { code.should.equal(200) - expect(PersistorManager.copyFile).to.have.been.calledWith( + expect(PersistorManager.copyObject).to.have.been.calledWith( bucket, oldKey, key @@ -273,7 +275,9 @@ describe('FileController', function() { }) it('should send a 404 if the original file was not found', function(done) { - PersistorManager.copyFile.yields(new Errors.NotFoundError()) + PersistorManager.copyObject.rejects( + new Errors.NotFoundError({ message: 'not found', info: {} }) + ) res.sendStatus = code => { code.should.equal(404) done() @@ -281,10 +285,12 @@ describe('FileController', function() { FileController.copyFile(req, res, next) }) - it('should send an error if there was an error', function() { - PersistorManager.copyFile.yields(error) - FileController.copyFile(req, res, next) - expect(next).to.have.been.calledWith(error) + it('should send an error if there was an error', function(done) { + PersistorManager.copyObject.rejects(error) + FileController.copyFile(req, res, err => { + expect(err).to.equal(error) + done() + }) }) }) diff --git a/services/filestore/test/unit/js/FileConverterTests.js b/services/filestore/test/unit/js/FileConverterTests.js index 72d6413417..671544098e 100644 --- a/services/filestore/test/unit/js/FileConverterTests.js +++ b/services/filestore/test/unit/js/FileConverterTests.js @@ -2,6 +2,7 @@ const sinon = require('sinon') const chai = require('chai') const { expect } = chai const SandboxedModule = require('sandboxed-module') +const { Errors } = require('@overleaf/object-persistor') const modulePath = '../../../app/js/FileConverter.js' @@ -22,6 +23,8 @@ describe('FileConverter', function() { promises: sinon.stub().resolves(destPath) } + const ObjectPersistor = { Errors } + FileConverter = SandboxedModule.require(modulePath, { requires: { './SafeExec': SafeExec, @@ -29,7 +32,8 @@ describe('FileConverter', function() { inc: sinon.stub(), Timer: sinon.stub().returns({ done: sinon.stub() }) }, - 'settings-sharelatex': Settings + 'settings-sharelatex': Settings, + '@overleaf/object-persistor': ObjectPersistor } }) }) diff --git a/services/filestore/test/unit/js/FileHandlerTests.js b/services/filestore/test/unit/js/FileHandlerTests.js index 22fe8d1503..ecce74d870 100644 --- a/services/filestore/test/unit/js/FileHandlerTests.js +++ b/services/filestore/test/unit/js/FileHandlerTests.js @@ -4,6 +4,7 @@ const { expect } = chai const modulePath = '../../../app/js/FileHandler.js' const SandboxedModule = require('sandboxed-module') const { ObjectId } = require('mongodb') +const { Errors } = require('@overleaf/object-persistor') chai.use(require('sinon-chai')) chai.use(require('chai-as-promised')) @@ -32,17 +33,15 @@ describe('FileHandler', function() { beforeEach(function() { PersistorManager = { - promises: { - getFileStream: sinon.stub().resolves(sourceStream), - getRedirectUrl: sinon.stub().resolves(redirectUrl), - checkIfFileExists: sinon.stub().resolves(), - deleteFile: sinon.stub().resolves(), - deleteDirectory: sinon.stub().resolves(), - sendStream: sinon.stub().resolves(), - insertFile: sinon.stub().resolves(), - sendFile: sinon.stub().resolves(), - directorySize: sinon.stub().resolves() - } + getObjectStream: sinon.stub().resolves(sourceStream), + getRedirectUrl: sinon.stub().resolves(redirectUrl), + checkIfObjectExists: sinon.stub().resolves(), + deleteObject: sinon.stub().resolves(), + deleteDirectory: sinon.stub().resolves(), + sendStream: sinon.stub().resolves(), + insertFile: sinon.stub().resolves(), + sendFile: sinon.stub().resolves(), + directorySize: sinon.stub().resolves() } LocalFileWriter = { // the callback style is used for detached cleanup calls @@ -73,6 +72,8 @@ describe('FileHandler', function() { createReadStream: sinon.stub().returns(readStream) } + const ObjectPersistor = { Errors } + FileHandler = SandboxedModule.require(modulePath, { requires: { './PersistorManager': PersistorManager, @@ -81,6 +82,7 @@ describe('FileHandler', function() { './KeyBuilder': KeyBuilder, './ImageOptimiser': ImageOptimiser, 'settings-sharelatex': Settings, + '@overleaf/object-persistor': ObjectPersistor, fs: fs }, globals: { console } @@ -93,7 +95,7 @@ describe('FileHandler', function() { it('should send file to the filestore', function(done) { FileHandler.insertFile(bucket, key, stream, err => { expect(err).not.to.exist - expect(PersistorManager.promises.sendStream).to.have.been.calledWith( + expect(PersistorManager.sendStream).to.have.been.calledWith( bucket, key, stream @@ -105,8 +107,7 @@ describe('FileHandler', function() { it('should not make a delete request for the convertedKey folder', function(done) { FileHandler.insertFile(bucket, key, stream, err => { expect(err).not.to.exist - expect(PersistorManager.promises.deleteDirectory).not.to.have.been - .called + expect(PersistorManager.deleteDirectory).not.to.have.been.called done() }) }) @@ -137,9 +138,10 @@ describe('FileHandler', function() { it('should delete the convertedKey folder', function(done) { FileHandler.insertFile(bucket, key, stream, err => { expect(err).not.to.exist - expect( - PersistorManager.promises.deleteDirectory - ).to.have.been.calledWith(bucket, convertedFolderKey) + expect(PersistorManager.deleteDirectory).to.have.been.calledWith( + bucket, + convertedFolderKey + ) done() }) }) @@ -150,7 +152,7 @@ describe('FileHandler', function() { it('should tell the filestore manager to delete the file', function(done) { FileHandler.deleteFile(bucket, key, err => { expect(err).not.to.exist - expect(PersistorManager.promises.deleteFile).to.have.been.calledWith( + expect(PersistorManager.deleteObject).to.have.been.calledWith( bucket, key ) @@ -161,8 +163,7 @@ describe('FileHandler', function() { it('should not tell the filestore manager to delete the cached folder', function(done) { FileHandler.deleteFile(bucket, key, err => { expect(err).not.to.exist - expect(PersistorManager.promises.deleteDirectory).not.to.have.been - .called + expect(PersistorManager.deleteDirectory).not.to.have.been.called done() }) }) @@ -193,9 +194,10 @@ describe('FileHandler', function() { it('should delete the convertedKey folder', function(done) { FileHandler.deleteFile(bucket, key, err => { expect(err).not.to.exist - expect( - PersistorManager.promises.deleteDirectory - ).to.have.been.calledWith(bucket, convertedFolderKey) + expect(PersistorManager.deleteDirectory).to.have.been.calledWith( + bucket, + convertedFolderKey + ) done() }) }) @@ -206,9 +208,10 @@ describe('FileHandler', function() { it('should tell the filestore manager to delete the folder', function(done) { FileHandler.deleteProject(bucket, projectKey, err => { expect(err).not.to.exist - expect( - PersistorManager.promises.deleteDirectory - ).to.have.been.calledWith(bucket, projectKey) + expect(PersistorManager.deleteDirectory).to.have.been.calledWith( + bucket, + projectKey + ) done() }) }) @@ -234,7 +237,7 @@ describe('FileHandler', function() { const options = { start: 0, end: 8 } FileHandler.getFile(bucket, key, options, err => { expect(err).not.to.exist - expect(PersistorManager.promises.getFileStream).to.have.been.calledWith( + expect(PersistorManager.getObjectStream).to.have.been.calledWith( bucket, key, options @@ -265,17 +268,16 @@ describe('FileHandler', function() { it('should return the the converted stream', function() { expect(result.err).not.to.exist expect(result.stream).to.equal(readStream) - expect( - PersistorManager.promises.getFileStream - ).to.have.been.calledWith(bucket, key) + expect(PersistorManager.getObjectStream).to.have.been.calledWith( + bucket, + key + ) }) }) describe('when the file is cached', function() { beforeEach(function(done) { - PersistorManager.promises.checkIfFileExists = sinon - .stub() - .resolves(true) + PersistorManager.checkIfObjectExists = sinon.stub().resolves(true) FileHandler.getFile(bucket, key, { format: 'png' }, (err, stream) => { result = { err, stream } done() @@ -293,9 +295,10 @@ describe('FileHandler', function() { it('should return the cached stream', function() { expect(result.err).not.to.exist expect(result.stream).to.equal(sourceStream) - expect( - PersistorManager.promises.getFileStream - ).to.have.been.calledWith(bucket, convertedKey) + expect(PersistorManager.getObjectStream).to.have.been.calledWith( + bucket, + convertedKey + ) }) }) }) @@ -341,9 +344,10 @@ describe('FileHandler', function() { it('should call the persistor to get a redirect url', function(done) { FileHandler.getRedirectUrl(bucket, key, () => { - expect( - PersistorManager.promises.getRedirectUrl - ).to.have.been.calledWith(bucket, key) + expect(PersistorManager.getRedirectUrl).to.have.been.calledWith( + bucket, + key + ) done() }) }) @@ -383,7 +387,7 @@ describe('FileHandler', function() { it('should call the filestore manager to get directory size', function(done) { FileHandler.getDirectorySize(bucket, key, err => { expect(err).not.to.exist - expect(PersistorManager.promises.directorySize).to.have.been.calledWith( + expect(PersistorManager.directorySize).to.have.been.calledWith( bucket, key ) diff --git a/services/filestore/test/unit/js/GcsPersistorTests.js b/services/filestore/test/unit/js/GcsPersistorTests.js deleted file mode 100644 index 027a63298b..0000000000 --- a/services/filestore/test/unit/js/GcsPersistorTests.js +++ /dev/null @@ -1,683 +0,0 @@ -const sinon = require('sinon') -const chai = require('chai') -const { expect } = chai -const modulePath = '../../../app/js/GcsPersistor.js' -const SandboxedModule = require('sandboxed-module') -const { ObjectId } = require('mongodb') -const asyncPool = require('tiny-async-pool') - -const Errors = require('../../../app/js/Errors') - -describe('GcsPersistorTests', function() { - const filename = '/wombat/potato.tex' - const bucket = 'womBucket' - const key = 'monKey' - const destKey = 'donKey' - const genericError = new Error('guru meditation error') - const filesSize = 33 - const md5 = 'ffffffff00000000ffffffff00000000' - const WriteStream = 'writeStream' - const redirectUrl = 'https://wombat.potato/giraffe' - - let Metrics, - Logger, - Transform, - Storage, - Fs, - GcsNotFoundError, - ReadStream, - Stream, - GcsBucket, - GcsFile, - GcsPersistor, - FileNotFoundError, - Hash, - settings, - crypto, - files - - beforeEach(function() { - settings = { - filestore: { - backend: 'gcs', - stores: { - user_files: 'user_files' - }, - gcs: { - directoryKeyRegex: /^[0-9a-fA-F]{24}\/[0-9a-fA-F]{24}/ - } - } - } - - files = [ - { - metadata: { size: 11, md5Hash: '/////wAAAAD/////AAAAAA==' }, - delete: sinon.stub() - }, - { - metadata: { size: 22, md5Hash: '/////wAAAAD/////AAAAAA==' }, - delete: sinon.stub() - } - ] - - ReadStream = { - pipe: sinon.stub().returns('readStream'), - on: sinon.stub(), - removeListener: sinon.stub() - } - ReadStream.on.withArgs('end').yields() - ReadStream.on.withArgs('pipe').yields({ - unpipe: sinon.stub(), - resume: sinon.stub(), - on: sinon.stub() - }) - - Transform = class { - on(event, callback) { - if (event === 'readable') { - callback() - } - } - - once() {} - removeListener() {} - } - - Stream = { - pipeline: sinon.stub().yields(), - Transform: Transform - } - - Metrics = { - count: sinon.stub() - } - - GcsFile = { - delete: sinon.stub().resolves(), - createReadStream: sinon.stub().returns(ReadStream), - getMetadata: sinon.stub().resolves([files[0].metadata]), - createWriteStream: sinon.stub().returns(WriteStream), - copy: sinon.stub().resolves(), - exists: sinon.stub().resolves([true]), - getSignedUrl: sinon.stub().resolves([redirectUrl]) - } - - GcsBucket = { - file: sinon.stub().returns(GcsFile), - getFiles: sinon.stub().resolves([files]) - } - - Storage = class { - constructor() { - this.interceptors = [] - } - } - Storage.prototype.bucket = sinon.stub().returns(GcsBucket) - - GcsNotFoundError = new Error('File not found') - GcsNotFoundError.code = 404 - - Fs = { - createReadStream: sinon.stub().returns(ReadStream) - } - - FileNotFoundError = new Error('File not found') - FileNotFoundError.code = 'ENOENT' - - Hash = { - end: sinon.stub(), - read: sinon.stub().returns(md5), - digest: sinon.stub().returns(md5), - setEncoding: sinon.stub() - } - crypto = { - createHash: sinon.stub().returns(Hash) - } - - Logger = { - warn: sinon.stub() - } - - GcsPersistor = SandboxedModule.require(modulePath, { - requires: { - '@google-cloud/storage': { Storage }, - 'settings-sharelatex': settings, - 'logger-sharelatex': Logger, - 'tiny-async-pool': asyncPool, - './Errors': Errors, - fs: Fs, - stream: Stream, - 'metrics-sharelatex': Metrics, - crypto - }, - globals: { console, Buffer } - }) - }) - - describe('getFileStream', function() { - describe('when called with valid parameters', function() { - let stream - - beforeEach(async function() { - stream = await GcsPersistor.promises.getFileStream(bucket, key) - }) - - it('returns a metered stream', function() { - expect(stream).to.be.instanceOf(Transform) - }) - - it('fetches the right key from the right bucket', function() { - expect(Storage.prototype.bucket).to.have.been.calledWith(bucket) - expect(GcsBucket.file).to.have.been.calledWith(key) - expect(GcsFile.createReadStream).to.have.been.called - }) - - it('pipes the stream through the meter', function() { - expect(ReadStream.pipe).to.have.been.calledWith( - sinon.match.instanceOf(Transform) - ) - }) - }) - - describe('when called with a byte range', function() { - let stream - - beforeEach(async function() { - stream = await GcsPersistor.promises.getFileStream(bucket, key, { - start: 5, - end: 10 - }) - }) - - it('returns a metered stream', function() { - expect(stream).to.be.instanceOf(Transform) - }) - - it('passes the byte range on to GCS', function() { - expect(GcsFile.createReadStream).to.have.been.calledWith({ - start: 5, - end: 10 - }) - }) - }) - - describe("when the file doesn't exist", function() { - let error, stream - - beforeEach(async function() { - Transform.prototype.on = sinon.stub() - ReadStream.on.withArgs('error').yields(GcsNotFoundError) - try { - stream = await GcsPersistor.promises.getFileStream(bucket, key) - } catch (e) { - error = e - } - }) - - it('does not return a stream', function() { - expect(stream).not.to.exist - }) - - it('throws a NotFoundError', function() { - expect(error).to.be.an.instanceOf(Errors.NotFoundError) - }) - - it('wraps the error', function() { - expect(error.cause).to.exist - }) - - it('stores the bucket and key in the error', function() { - expect(error.info).to.include({ bucketName: bucket, key: key }) - }) - }) - - describe('when Gcs encounters an unkown error', function() { - let error, stream - - beforeEach(async function() { - Transform.prototype.on = sinon.stub() - ReadStream.on.withArgs('error').yields(genericError) - try { - stream = await GcsPersistor.promises.getFileStream(bucket, key) - } catch (err) { - error = err - } - }) - - it('does not return a stream', function() { - expect(stream).not.to.exist - }) - - it('throws a ReadError', function() { - expect(error).to.be.an.instanceOf(Errors.ReadError) - }) - - it('wraps the error', function() { - expect(error.cause).to.exist - }) - - it('stores the bucket and key in the error', function() { - expect(error.info).to.include({ bucketName: bucket, key: key }) - }) - }) - }) - - describe('getFile', function() { - let signedUrl - - beforeEach(async function() { - signedUrl = await GcsPersistor.promises.getRedirectUrl(bucket, key) - }) - - it('should request a signed URL', function() { - expect(GcsFile.getSignedUrl).to.have.been.called - }) - - it('should return the url', function() { - expect(signedUrl).to.equal(redirectUrl) - }) - }) - - describe('getFileSize', function() { - describe('when called with valid parameters', function() { - let size - - beforeEach(async function() { - size = await GcsPersistor.promises.getFileSize(bucket, key) - }) - - it('should return the object size', function() { - expect(size).to.equal(files[0].metadata.size) - }) - - it('should pass the bucket and key to GCS', function() { - expect(Storage.prototype.bucket).to.have.been.calledWith(bucket) - expect(GcsBucket.file).to.have.been.calledWith(key) - expect(GcsFile.getMetadata).to.have.been.called - }) - }) - - describe('when the object is not found', function() { - let error - - beforeEach(async function() { - GcsFile.getMetadata = sinon.stub().rejects(GcsNotFoundError) - try { - await GcsPersistor.promises.getFileSize(bucket, key) - } catch (err) { - error = err - } - }) - - it('should return a NotFoundError', function() { - expect(error).to.be.an.instanceOf(Errors.NotFoundError) - }) - - it('should wrap the error', function() { - expect(error.cause).to.equal(GcsNotFoundError) - }) - }) - - describe('when GCS returns an error', function() { - let error - - beforeEach(async function() { - GcsFile.getMetadata = sinon.stub().rejects(genericError) - try { - await GcsPersistor.promises.getFileSize(bucket, key) - } catch (err) { - error = err - } - }) - - it('should return a ReadError', function() { - expect(error).to.be.an.instanceOf(Errors.ReadError) - }) - - it('should wrap the error', function() { - expect(error.cause).to.equal(genericError) - }) - }) - }) - - describe('sendStream', function() { - describe('with valid parameters', function() { - beforeEach(async function() { - return GcsPersistor.promises.sendStream(bucket, key, ReadStream) - }) - - it('should upload the stream', function() { - expect(Storage.prototype.bucket).to.have.been.calledWith(bucket) - expect(GcsBucket.file).to.have.been.calledWith(key) - expect(GcsFile.createWriteStream).to.have.been.called - }) - - it('should not try to create a resumable upload', function() { - expect(GcsFile.createWriteStream).to.have.been.calledWith({ - resumable: false - }) - }) - - it('should meter the stream and pass it to GCS', function() { - expect(Stream.pipeline).to.have.been.calledWith( - ReadStream, - sinon.match.instanceOf(Transform), - WriteStream - ) - }) - - it('calculates the md5 hash of the file', function() { - expect(Hash.digest).to.have.been.called - }) - }) - - describe('when a hash is supplied', function() { - beforeEach(async function() { - return GcsPersistor.promises.sendStream( - bucket, - key, - ReadStream, - 'aaaaaaaabbbbbbbbaaaaaaaabbbbbbbb' - ) - }) - - it('should not calculate the md5 hash of the file', function() { - expect(Hash.digest).not.to.have.been.called - }) - - it('sends the hash in base64', function() { - expect(GcsFile.createWriteStream).to.have.been.calledWith({ - validation: 'md5', - metadata: { - md5Hash: 'qqqqqru7u7uqqqqqu7u7uw==' - }, - resumable: false - }) - }) - - it('does not fetch the md5 hash of the uploaded file', function() { - expect(GcsFile.getMetadata).not.to.have.been.called - }) - }) - - describe('when the upload fails', function() { - let error - beforeEach(async function() { - Stream.pipeline - .withArgs( - ReadStream, - sinon.match.instanceOf(Transform), - WriteStream, - sinon.match.any - ) - .yields(genericError) - try { - await GcsPersistor.promises.sendStream(bucket, key, ReadStream) - } catch (err) { - error = err - } - }) - - it('throws a WriteError', function() { - expect(error).to.be.an.instanceOf(Errors.WriteError) - }) - - it('wraps the error', function() { - expect(error.cause).to.equal(genericError) - }) - }) - }) - - describe('sendFile', function() { - describe('with valid parameters', function() { - beforeEach(async function() { - return GcsPersistor.promises.sendFile(bucket, key, filename) - }) - - it('should create a read stream for the file', function() { - expect(Fs.createReadStream).to.have.been.calledWith(filename) - }) - - it('should create a write stream', function() { - expect(Storage.prototype.bucket).to.have.been.calledWith(bucket) - expect(GcsBucket.file).to.have.been.calledWith(key) - expect(GcsFile.createWriteStream).to.have.been.called - }) - - it('should upload the stream via the meter', function() { - expect(Stream.pipeline).to.have.been.calledWith( - ReadStream, - sinon.match.instanceOf(Transform), - WriteStream - ) - }) - }) - }) - - describe('copyFile', function() { - const destinationFile = 'destFile' - - beforeEach(function() { - GcsBucket.file.withArgs(destKey).returns(destinationFile) - }) - - describe('with valid parameters', function() { - beforeEach(async function() { - return GcsPersistor.promises.copyFile(bucket, key, destKey) - }) - - it('should copy the object', function() { - expect(Storage.prototype.bucket).to.have.been.calledWith(bucket) - expect(GcsBucket.file).to.have.been.calledWith(key) - expect(GcsFile.copy).to.have.been.calledWith(destinationFile) - }) - }) - - describe('when the file does not exist', function() { - let error - - beforeEach(async function() { - GcsFile.copy = sinon.stub().rejects(GcsNotFoundError) - try { - await GcsPersistor.promises.copyFile(bucket, key, destKey) - } catch (err) { - error = err - } - }) - - it('should throw a NotFoundError', function() { - expect(error).to.be.an.instanceOf(Errors.NotFoundError) - }) - }) - }) - - describe('deleteFile', function() { - describe('with valid parameters', function() { - beforeEach(async function() { - return GcsPersistor.promises.deleteFile(bucket, key) - }) - - it('should delete the object', function() { - expect(Storage.prototype.bucket).to.have.been.calledWith(bucket) - expect(GcsBucket.file).to.have.been.calledWith(key) - expect(GcsFile.delete).to.have.been.called - }) - }) - - describe('when the file does not exist', function() { - let error - - beforeEach(async function() { - GcsFile.delete = sinon.stub().rejects(GcsNotFoundError) - try { - await GcsPersistor.promises.deleteFile(bucket, key) - } catch (err) { - error = err - } - }) - - it('should not throw an error', function() { - expect(error).not.to.exist - }) - }) - }) - - describe('deleteDirectory', function() { - const directoryName = `${ObjectId()}/${ObjectId()}` - describe('with valid parameters', function() { - beforeEach(async function() { - return GcsPersistor.promises.deleteDirectory(bucket, directoryName) - }) - - it('should list the objects in the directory', function() { - expect(Storage.prototype.bucket).to.have.been.calledWith(bucket) - expect(GcsBucket.getFiles).to.have.been.calledWith({ - directory: directoryName - }) - }) - - it('should delete the files', function() { - expect(GcsFile.delete).to.have.been.calledTwice - }) - }) - - describe('when there is an error listing the objects', function() { - let error - - beforeEach(async function() { - GcsBucket.getFiles = sinon.stub().rejects(genericError) - try { - await GcsPersistor.promises.deleteDirectory(bucket, directoryName) - } catch (err) { - error = err - } - }) - - it('should generate a WriteError', function() { - expect(error).to.be.an.instanceOf(Errors.WriteError) - }) - - it('should wrap the error', function() { - expect(error.cause).to.equal(genericError) - }) - }) - }) - - describe('directorySize', function() { - describe('with valid parameters', function() { - let size - - beforeEach(async function() { - size = await GcsPersistor.promises.directorySize(bucket, key) - }) - - it('should list the objects in the directory', function() { - expect(Storage.prototype.bucket).to.have.been.calledWith(bucket) - expect(GcsBucket.getFiles).to.have.been.calledWith({ directory: key }) - }) - - it('should return the directory size', function() { - expect(size).to.equal(filesSize) - }) - }) - - describe('when there are no files', function() { - let size - - beforeEach(async function() { - GcsBucket.getFiles.resolves([[]]) - size = await GcsPersistor.promises.directorySize(bucket, key) - }) - - it('should list the objects in the directory', function() { - expect(Storage.prototype.bucket).to.have.been.calledWith(bucket) - expect(GcsBucket.getFiles).to.have.been.calledWith({ directory: key }) - }) - - it('should return zero', function() { - expect(size).to.equal(0) - }) - }) - - describe('when there is an error listing the objects', function() { - let error - - beforeEach(async function() { - GcsBucket.getFiles.rejects(genericError) - try { - await GcsPersistor.promises.directorySize(bucket, key) - } catch (err) { - error = err - } - }) - - it('should generate a ReadError', function() { - expect(error).to.be.an.instanceOf(Errors.ReadError) - }) - - it('should wrap the error', function() { - expect(error.cause).to.equal(genericError) - }) - }) - }) - - describe('checkIfFileExists', function() { - describe('when the file exists', function() { - let exists - - beforeEach(async function() { - exists = await GcsPersistor.promises.checkIfFileExists(bucket, key) - }) - - it('should ask the file if it exists', function() { - expect(Storage.prototype.bucket).to.have.been.calledWith(bucket) - expect(GcsBucket.file).to.have.been.calledWith(key) - expect(GcsFile.exists).to.have.been.called - }) - - it('should return that the file exists', function() { - expect(exists).to.equal(true) - }) - }) - - describe('when the file does not exist', function() { - let exists - - beforeEach(async function() { - GcsFile.exists = sinon.stub().resolves([false]) - exists = await GcsPersistor.promises.checkIfFileExists(bucket, key) - }) - - it('should get the object header', function() { - expect(Storage.prototype.bucket).to.have.been.calledWith(bucket) - expect(GcsBucket.file).to.have.been.calledWith(key) - expect(GcsFile.exists).to.have.been.called - }) - - it('should return that the file does not exist', function() { - expect(exists).to.equal(false) - }) - }) - - describe('when there is an error', function() { - let error - - beforeEach(async function() { - GcsFile.exists = sinon.stub().rejects(genericError) - try { - await GcsPersistor.promises.checkIfFileExists(bucket, key) - } catch (err) { - error = err - } - }) - - it('should generate a ReadError', function() { - expect(error).to.be.an.instanceOf(Errors.ReadError) - }) - - it('should wrap the error', function() { - expect(error.cause).to.equal(genericError) - }) - }) - }) -}) diff --git a/services/filestore/test/unit/js/LocalFileWriterTests.js b/services/filestore/test/unit/js/LocalFileWriterTests.js index 5f5158f28a..0316b5d800 100644 --- a/services/filestore/test/unit/js/LocalFileWriterTests.js +++ b/services/filestore/test/unit/js/LocalFileWriterTests.js @@ -3,6 +3,7 @@ const chai = require('chai') const { expect } = chai const modulePath = '../../../app/js/LocalFileWriter.js' const SandboxedModule = require('sandboxed-module') +const { Errors } = require('@overleaf/object-persistor') chai.use(require('sinon-chai')) describe('LocalFileWriter', function() { @@ -22,6 +23,8 @@ describe('LocalFileWriter', function() { pipeline: sinon.stub().yields() } + const ObjectPersistor = { Errors } + LocalFileWriter = SandboxedModule.require(modulePath, { requires: { fs, @@ -30,7 +33,8 @@ describe('LocalFileWriter', function() { 'metrics-sharelatex': { inc: sinon.stub(), Timer: sinon.stub().returns({ done: sinon.stub() }) - } + }, + '@overleaf/object-persistor': ObjectPersistor } }) }) diff --git a/services/filestore/test/unit/js/MigrationPersistorTests.js b/services/filestore/test/unit/js/MigrationPersistorTests.js deleted file mode 100644 index db8401c78c..0000000000 --- a/services/filestore/test/unit/js/MigrationPersistorTests.js +++ /dev/null @@ -1,519 +0,0 @@ -const sinon = require('sinon') -const chai = require('chai') -const { expect } = chai -const modulePath = '../../../app/js/MigrationPersistor.js' -const SandboxedModule = require('sandboxed-module') - -const Errors = require('../../../app/js/Errors') - -// Not all methods are tested here, but a method with each type of wrapping has -// tests. Specifically, the following wrapping methods are tested here: -// getFileStream: _wrapFallbackMethod -// sendStream: forward-to-primary -// deleteFile: _wrapMethodOnBothPersistors -// copyFile: copyFileWithFallback - -describe('MigrationPersistorTests', function() { - const bucket = 'womBucket' - const fallbackBucket = 'bucKangaroo' - const key = 'monKey' - const destKey = 'donKey' - const genericError = new Error('guru meditation error') - const notFoundError = new Errors.NotFoundError('not found') - const size = 33 - const md5 = 'ffffffff' - - let Metrics, - Settings, - Logger, - Stream, - MigrationPersistor, - fileStream, - newPersistor - - beforeEach(function() { - fileStream = { - name: 'fileStream', - on: sinon - .stub() - .withArgs('end') - .yields(), - pipe: sinon.stub() - } - - newPersistor = function(hasFile) { - return { - promises: { - sendFile: sinon.stub().resolves(), - sendStream: sinon.stub().resolves(), - getFileStream: hasFile - ? sinon.stub().resolves(fileStream) - : sinon.stub().rejects(notFoundError), - deleteDirectory: sinon.stub().resolves(), - getFileSize: hasFile - ? sinon.stub().resolves(size) - : sinon.stub().rejects(notFoundError), - deleteFile: sinon.stub().resolves(), - copyFile: hasFile - ? sinon.stub().resolves() - : sinon.stub().rejects(notFoundError), - checkIfFileExists: sinon.stub().resolves(hasFile), - directorySize: hasFile - ? sinon.stub().resolves(size) - : sinon.stub().rejects(notFoundError), - getFileMd5Hash: hasFile - ? sinon.stub().resolves(md5) - : sinon.stub().rejects(notFoundError) - } - } - } - - Settings = { - filestore: { - fallback: { - buckets: { - [bucket]: fallbackBucket - } - } - } - } - - Metrics = { - inc: sinon.stub() - } - - Stream = { - pipeline: sinon.stub().yields(), - PassThrough: sinon.stub() - } - - Logger = { - warn: sinon.stub() - } - - MigrationPersistor = SandboxedModule.require(modulePath, { - requires: { - 'settings-sharelatex': Settings, - stream: Stream, - './Errors': Errors, - 'metrics-sharelatex': Metrics, - 'logger-sharelatex': Logger - }, - globals: { console } - }) - }) - - describe('getFileStream', function() { - const options = { wombat: 'potato' } - describe('when the primary persistor has the file', function() { - let primaryPersistor, fallbackPersistor, migrationPersistor, response - beforeEach(async function() { - primaryPersistor = newPersistor(true) - fallbackPersistor = newPersistor(false) - migrationPersistor = MigrationPersistor( - primaryPersistor, - fallbackPersistor - ) - response = await migrationPersistor.promises.getFileStream( - bucket, - key, - options - ) - }) - - it('should return the file stream', function() { - expect(response).to.equal(fileStream) - }) - - it('should fetch the file from the primary persistor, with the correct options', function() { - expect( - primaryPersistor.promises.getFileStream - ).to.have.been.calledWithExactly(bucket, key, options) - }) - - it('should not query the fallback persistor', function() { - expect(fallbackPersistor.promises.getFileStream).not.to.have.been.called - }) - }) - - describe('when the fallback persistor has the file', function() { - let primaryPersistor, fallbackPersistor, migrationPersistor, response - beforeEach(async function() { - primaryPersistor = newPersistor(false) - fallbackPersistor = newPersistor(true) - migrationPersistor = MigrationPersistor( - primaryPersistor, - fallbackPersistor - ) - response = await migrationPersistor.promises.getFileStream( - bucket, - key, - options - ) - }) - - it('should return the file stream', function() { - expect(response).to.be.an.instanceOf(Stream.PassThrough) - }) - - it('should fetch the file from the primary persistor with the correct options', function() { - expect( - primaryPersistor.promises.getFileStream - ).to.have.been.calledWithExactly(bucket, key, options) - }) - - it('should fetch the file from the fallback persistor with the fallback bucket with the correct options', function() { - expect( - fallbackPersistor.promises.getFileStream - ).to.have.been.calledWithExactly(fallbackBucket, key, options) - }) - - it('should create one read stream', function() { - expect(fallbackPersistor.promises.getFileStream).to.have.been.calledOnce - }) - - it('should not send the file to the primary', function() { - expect(primaryPersistor.promises.sendStream).not.to.have.been.called - }) - }) - - describe('when the file should be copied to the primary', function() { - let primaryPersistor, - fallbackPersistor, - migrationPersistor, - returnedStream - beforeEach(async function() { - primaryPersistor = newPersistor(false) - fallbackPersistor = newPersistor(true) - migrationPersistor = MigrationPersistor( - primaryPersistor, - fallbackPersistor - ) - Settings.filestore.fallback.copyOnMiss = true - returnedStream = await migrationPersistor.promises.getFileStream( - bucket, - key, - options - ) - }) - - it('should create one read stream', function() { - expect(fallbackPersistor.promises.getFileStream).to.have.been.calledOnce - }) - - it('should get the md5 hash from the source', function() { - expect( - fallbackPersistor.promises.getFileMd5Hash - ).to.have.been.calledWith(fallbackBucket, key) - }) - - it('should send a stream to the primary', function() { - expect( - primaryPersistor.promises.sendStream - ).to.have.been.calledWithExactly( - bucket, - key, - sinon.match.instanceOf(Stream.PassThrough), - md5 - ) - }) - - it('should send a stream to the client', function() { - expect(returnedStream).to.be.an.instanceOf(Stream.PassThrough) - }) - }) - - describe('when neither persistor has the file', function() { - it('rejects with a NotFoundError', async function() { - const migrationPersistor = MigrationPersistor( - newPersistor(false), - newPersistor(false) - ) - return expect( - migrationPersistor.promises.getFileStream(bucket, key) - ).to.eventually.be.rejected.and.be.an.instanceOf(Errors.NotFoundError) - }) - }) - - describe('when the primary persistor throws an unexpected error', function() { - let primaryPersistor, fallbackPersistor, migrationPersistor, error - beforeEach(async function() { - primaryPersistor = newPersistor(false) - fallbackPersistor = newPersistor(true) - primaryPersistor.promises.getFileStream = sinon - .stub() - .rejects(genericError) - migrationPersistor = MigrationPersistor( - primaryPersistor, - fallbackPersistor - ) - try { - await migrationPersistor.promises.getFileStream(bucket, key, options) - } catch (err) { - error = err - } - }) - - it('rejects with the error', function() { - expect(error).to.equal(genericError) - }) - - it('does not call the fallback', function() { - expect(fallbackPersistor.promises.getFileStream).not.to.have.been.called - }) - }) - - describe('when the fallback persistor throws an unexpected error', function() { - let primaryPersistor, fallbackPersistor, migrationPersistor, error - beforeEach(async function() { - primaryPersistor = newPersistor(false) - fallbackPersistor = newPersistor(false) - fallbackPersistor.promises.getFileStream = sinon - .stub() - .rejects(genericError) - migrationPersistor = MigrationPersistor( - primaryPersistor, - fallbackPersistor - ) - try { - await migrationPersistor.promises.getFileStream(bucket, key, options) - } catch (err) { - error = err - } - }) - - it('rejects with the error', function() { - expect(error).to.equal(genericError) - }) - - it('should have called the fallback', function() { - expect( - fallbackPersistor.promises.getFileStream - ).to.have.been.calledWith(fallbackBucket, key) - }) - }) - }) - - describe('sendStream', function() { - let primaryPersistor, fallbackPersistor, migrationPersistor - beforeEach(function() { - primaryPersistor = newPersistor(false) - fallbackPersistor = newPersistor(false) - migrationPersistor = MigrationPersistor( - primaryPersistor, - fallbackPersistor - ) - }) - - describe('when it works', function() { - beforeEach(async function() { - return migrationPersistor.promises.sendStream(bucket, key, fileStream) - }) - - it('should send the file to the primary persistor', function() { - expect( - primaryPersistor.promises.sendStream - ).to.have.been.calledWithExactly(bucket, key, fileStream) - }) - - it('should not send the file to the fallback persistor', function() { - expect(fallbackPersistor.promises.sendStream).not.to.have.been.called - }) - }) - - describe('when the primary persistor throws an error', function() { - it('returns the error', async function() { - primaryPersistor.promises.sendStream.rejects(notFoundError) - return expect( - migrationPersistor.promises.sendStream(bucket, key, fileStream) - ).to.eventually.be.rejected.and.be.an.instanceOf(Errors.NotFoundError) - }) - }) - }) - - describe('deleteFile', function() { - let primaryPersistor, fallbackPersistor, migrationPersistor - beforeEach(function() { - primaryPersistor = newPersistor(false) - fallbackPersistor = newPersistor(false) - migrationPersistor = MigrationPersistor( - primaryPersistor, - fallbackPersistor - ) - }) - - describe('when it works', function() { - beforeEach(async function() { - return migrationPersistor.promises.deleteFile(bucket, key) - }) - - it('should delete the file from the primary', function() { - expect( - primaryPersistor.promises.deleteFile - ).to.have.been.calledWithExactly(bucket, key) - }) - - it('should delete the file from the fallback', function() { - expect( - fallbackPersistor.promises.deleteFile - ).to.have.been.calledWithExactly(fallbackBucket, key) - }) - }) - - describe('when the primary persistor throws an error', function() { - let error - beforeEach(async function() { - primaryPersistor.promises.deleteFile.rejects(genericError) - try { - await migrationPersistor.promises.deleteFile(bucket, key) - } catch (err) { - error = err - } - }) - - it('should return the error', function() { - expect(error).to.equal(genericError) - }) - - it('should delete the file from the primary', function() { - expect( - primaryPersistor.promises.deleteFile - ).to.have.been.calledWithExactly(bucket, key) - }) - - it('should delete the file from the fallback', function() { - expect( - fallbackPersistor.promises.deleteFile - ).to.have.been.calledWithExactly(fallbackBucket, key) - }) - }) - - describe('when the fallback persistor throws an error', function() { - let error - beforeEach(async function() { - fallbackPersistor.promises.deleteFile.rejects(genericError) - try { - await migrationPersistor.promises.deleteFile(bucket, key) - } catch (err) { - error = err - } - }) - - it('should return the error', function() { - expect(error).to.equal(genericError) - }) - - it('should delete the file from the primary', function() { - expect( - primaryPersistor.promises.deleteFile - ).to.have.been.calledWithExactly(bucket, key) - }) - - it('should delete the file from the fallback', function() { - expect( - fallbackPersistor.promises.deleteFile - ).to.have.been.calledWithExactly(fallbackBucket, key) - }) - }) - }) - - describe('copyFile', function() { - describe('when the file exists on the primary', function() { - let primaryPersistor, fallbackPersistor, migrationPersistor - beforeEach(async function() { - primaryPersistor = newPersistor(true) - fallbackPersistor = newPersistor(false) - migrationPersistor = MigrationPersistor( - primaryPersistor, - fallbackPersistor - ) - return migrationPersistor.promises.copyFile(bucket, key, destKey) - }) - - it('should call copyFile to copy the file', function() { - expect( - primaryPersistor.promises.copyFile - ).to.have.been.calledWithExactly(bucket, key, destKey) - }) - - it('should not try to read from the fallback', function() { - expect(fallbackPersistor.promises.getFileStream).not.to.have.been.called - }) - }) - - describe('when the file does not exist on the primary', function() { - let primaryPersistor, fallbackPersistor, migrationPersistor - beforeEach(async function() { - primaryPersistor = newPersistor(false) - fallbackPersistor = newPersistor(true) - migrationPersistor = MigrationPersistor( - primaryPersistor, - fallbackPersistor - ) - return migrationPersistor.promises.copyFile(bucket, key, destKey) - }) - - it('should call copyFile to copy the file', function() { - expect( - primaryPersistor.promises.copyFile - ).to.have.been.calledWithExactly(bucket, key, destKey) - }) - - it('should fetch the file from the fallback', function() { - expect( - fallbackPersistor.promises.getFileStream - ).not.to.have.been.calledWithExactly(fallbackBucket, key) - }) - - it('should get the md5 hash from the source', function() { - expect( - fallbackPersistor.promises.getFileMd5Hash - ).to.have.been.calledWith(fallbackBucket, key) - }) - - it('should send the file to the primary', function() { - expect( - primaryPersistor.promises.sendStream - ).to.have.been.calledWithExactly( - bucket, - destKey, - sinon.match.instanceOf(Stream.PassThrough), - md5 - ) - }) - }) - - describe('when the file does not exist on the fallback', function() { - let primaryPersistor, fallbackPersistor, migrationPersistor, error - beforeEach(async function() { - primaryPersistor = newPersistor(false) - fallbackPersistor = newPersistor(false) - migrationPersistor = MigrationPersistor( - primaryPersistor, - fallbackPersistor - ) - try { - await migrationPersistor.promises.copyFile(bucket, key, destKey) - } catch (err) { - error = err - } - }) - - it('should call copyFile to copy the file', function() { - expect( - primaryPersistor.promises.copyFile - ).to.have.been.calledWithExactly(bucket, key, destKey) - }) - - it('should fetch the file from the fallback', function() { - expect( - fallbackPersistor.promises.getFileStream - ).not.to.have.been.calledWithExactly(fallbackBucket, key) - }) - - it('should return a not-found error', function() { - expect(error).to.be.an.instanceOf(Errors.NotFoundError) - }) - }) - }) -}) diff --git a/services/filestore/test/unit/js/PersistorManagerTests.js b/services/filestore/test/unit/js/PersistorManagerTests.js deleted file mode 100644 index cdc9de0f92..0000000000 --- a/services/filestore/test/unit/js/PersistorManagerTests.js +++ /dev/null @@ -1,78 +0,0 @@ -const sinon = require('sinon') -const chai = require('chai') -const { expect } = chai -const SandboxedModule = require('sandboxed-module') - -const modulePath = '../../../app/js/PersistorManager.js' - -describe('PersistorManager', function() { - let PersistorManager, FSPersistor, S3Persistor, settings, requires - - beforeEach(function() { - FSPersistor = { - wrappedMethod: sinon.stub().returns('FSPersistor') - } - S3Persistor = { - wrappedMethod: sinon.stub().returns('S3Persistor') - } - - settings = { - filestore: {} - } - - requires = { - './S3Persistor': S3Persistor, - './FSPersistor': FSPersistor, - 'settings-sharelatex': settings, - 'logger-sharelatex': { - log() {}, - err() {} - } - } - }) - - it('should implement the S3 wrapped method when S3 is configured', function() { - settings.filestore.backend = 's3' - PersistorManager = SandboxedModule.require(modulePath, { requires }) - - expect(PersistorManager).to.respondTo('wrappedMethod') - expect(PersistorManager.wrappedMethod()).to.equal('S3Persistor') - }) - - it("should implement the S3 wrapped method when 'aws-sdk' is configured", function() { - settings.filestore.backend = 'aws-sdk' - PersistorManager = SandboxedModule.require(modulePath, { requires }) - - expect(PersistorManager).to.respondTo('wrappedMethod') - expect(PersistorManager.wrappedMethod()).to.equal('S3Persistor') - }) - - it('should implement the FS wrapped method when FS is configured', function() { - settings.filestore.backend = 'fs' - PersistorManager = SandboxedModule.require(modulePath, { requires }) - - expect(PersistorManager).to.respondTo('wrappedMethod') - expect(PersistorManager.wrappedMethod()).to.equal('FSPersistor') - }) - - it('should throw an error when the backend is not configured', function() { - try { - SandboxedModule.require(modulePath, { requires }) - } catch (err) { - expect(err.message).to.equal('no backend specified - config incomplete') - return - } - expect('should have caught an error').not.to.exist - }) - - it('should throw an error when the backend is unknown', function() { - settings.filestore.backend = 'magic' - try { - SandboxedModule.require(modulePath, { requires }) - } catch (err) { - expect(err.message).to.equal('unknown filestore backend: magic') - return - } - expect('should have caught an error').not.to.exist - }) -}) diff --git a/services/filestore/test/unit/js/S3PersistorTests.js b/services/filestore/test/unit/js/S3PersistorTests.js deleted file mode 100644 index c236de25ef..0000000000 --- a/services/filestore/test/unit/js/S3PersistorTests.js +++ /dev/null @@ -1,865 +0,0 @@ -const sinon = require('sinon') -const chai = require('chai') -const { expect } = chai -const modulePath = '../../../app/js/S3Persistor.js' -const SandboxedModule = require('sandboxed-module') - -const Errors = require('../../../app/js/Errors') - -describe('S3PersistorTests', function() { - const defaultS3Key = 'frog' - const defaultS3Secret = 'prince' - const defaultS3Credentials = { - credentials: { - accessKeyId: defaultS3Key, - secretAccessKey: defaultS3Secret - } - } - const filename = '/wombat/potato.tex' - const bucket = 'womBucket' - const key = 'monKey' - const destKey = 'donKey' - const objectSize = 5555 - const genericError = new Error('guru meditation error') - const files = [ - { Key: 'llama', Size: 11 }, - { Key: 'hippo', Size: 22 } - ] - const filesSize = 33 - const md5 = 'ffffffff00000000ffffffff00000000' - - let Metrics, - Logger, - Transform, - S3, - Fs, - ReadStream, - Stream, - S3Persistor, - S3Client, - S3ReadStream, - S3NotFoundError, - S3AccessDeniedError, - FileNotFoundError, - EmptyPromise, - settings, - Hash, - crypto - - beforeEach(function() { - settings = { - filestore: { - backend: 's3', - s3: { - secret: defaultS3Secret, - key: defaultS3Key, - partSize: 100 * 1024 * 1024 - }, - stores: { - user_files: 'sl_user_files' - } - } - } - - Transform = class { - on(event, callback) { - if (event === 'readable') { - callback() - } - } - - once() {} - removeListener() {} - } - - Stream = { - pipeline: sinon.stub().yields(), - Transform: Transform - } - - EmptyPromise = { - promise: sinon.stub().resolves() - } - - Metrics = { - count: sinon.stub() - } - - ReadStream = { - pipe: sinon.stub().returns('readStream'), - on: sinon.stub(), - removeListener: sinon.stub() - } - ReadStream.on.withArgs('end').yields() - ReadStream.on.withArgs('pipe').yields({ - unpipe: sinon.stub(), - resume: sinon.stub() - }) - - FileNotFoundError = new Error('File not found') - FileNotFoundError.code = 'ENOENT' - - Fs = { - createReadStream: sinon.stub().returns(ReadStream) - } - - S3NotFoundError = new Error('not found') - S3NotFoundError.code = 'NoSuchKey' - - S3AccessDeniedError = new Error('access denied') - S3AccessDeniedError.code = 'AccessDenied' - - S3ReadStream = { - on: sinon.stub(), - pipe: sinon.stub(), - removeListener: sinon.stub() - } - S3ReadStream.on.withArgs('end').yields() - S3ReadStream.on.withArgs('pipe').yields({ - unpipe: sinon.stub(), - resume: sinon.stub() - }) - S3Client = { - getObject: sinon.stub().returns({ - createReadStream: sinon.stub().returns(S3ReadStream) - }), - headObject: sinon.stub().returns({ - promise: sinon.stub().resolves({ - ContentLength: objectSize, - ETag: md5 - }) - }), - listObjects: sinon.stub().returns({ - promise: sinon.stub().resolves({ - Contents: files - }) - }), - upload: sinon - .stub() - .returns({ promise: sinon.stub().resolves({ ETag: `"${md5}"` }) }), - copyObject: sinon.stub().returns(EmptyPromise), - deleteObject: sinon.stub().returns(EmptyPromise), - deleteObjects: sinon.stub().returns(EmptyPromise) - } - S3 = sinon.stub().returns(S3Client) - - Hash = { - end: sinon.stub(), - read: sinon.stub().returns(md5), - digest: sinon.stub().returns(md5), - setEncoding: sinon.stub() - } - crypto = { - createHash: sinon.stub().returns(Hash) - } - - Logger = { - warn: sinon.stub() - } - - S3Persistor = SandboxedModule.require(modulePath, { - requires: { - 'aws-sdk/clients/s3': S3, - 'settings-sharelatex': settings, - 'logger-sharelatex': Logger, - './Errors': Errors, - fs: Fs, - stream: Stream, - 'metrics-sharelatex': Metrics, - crypto - }, - globals: { console, Buffer } - }) - }) - - describe('getFileStream', function() { - describe('when called with valid parameters', function() { - let stream - - beforeEach(async function() { - stream = await S3Persistor.promises.getFileStream(bucket, key) - }) - - it('returns a metered stream', function() { - expect(stream).to.be.instanceOf(Transform) - }) - - it('sets the AWS client up with credentials from settings', function() { - expect(S3).to.have.been.calledWith(defaultS3Credentials) - }) - - it('fetches the right key from the right bucket', function() { - expect(S3Client.getObject).to.have.been.calledWith({ - Bucket: bucket, - Key: key - }) - }) - - it('pipes the stream through the meter', async function() { - expect(S3ReadStream.pipe).to.have.been.calledWith( - sinon.match.instanceOf(Transform) - ) - }) - }) - - describe('when called with a byte range', function() { - let stream - - beforeEach(async function() { - stream = await S3Persistor.promises.getFileStream(bucket, key, { - start: 5, - end: 10 - }) - }) - - it('returns a metered stream', function() { - expect(stream).to.be.instanceOf(Stream.Transform) - }) - - it('passes the byte range on to S3', function() { - expect(S3Client.getObject).to.have.been.calledWith({ - Bucket: bucket, - Key: key, - Range: 'bytes=5-10' - }) - }) - }) - - describe('when there are alternative credentials', function() { - let stream - const alternativeSecret = 'giraffe' - const alternativeKey = 'hippo' - const alternativeS3Credentials = { - credentials: { - accessKeyId: alternativeKey, - secretAccessKey: alternativeSecret - } - } - - beforeEach(async function() { - settings.filestore.s3BucketCreds = {} - settings.filestore.s3BucketCreds[bucket] = { - auth_key: alternativeKey, - auth_secret: alternativeSecret - } - - stream = await S3Persistor.promises.getFileStream(bucket, key) - }) - - it('returns a metered stream', function() { - expect(stream).to.be.instanceOf(Stream.Transform) - }) - - it('sets the AWS client up with the alternative credentials', function() { - expect(S3).to.have.been.calledWith(alternativeS3Credentials) - }) - - it('fetches the right key from the right bucket', function() { - expect(S3Client.getObject).to.have.been.calledWith({ - Bucket: bucket, - Key: key - }) - }) - - it('caches the credentials', async function() { - stream = await S3Persistor.promises.getFileStream(bucket, key) - - expect(S3).to.have.been.calledOnceWith(alternativeS3Credentials) - }) - - it('uses the default credentials for an unknown bucket', async function() { - stream = await S3Persistor.promises.getFileStream('anotherBucket', key) - - expect(S3).to.have.been.calledTwice - expect(S3.firstCall).to.have.been.calledWith(alternativeS3Credentials) - expect(S3.secondCall).to.have.been.calledWith(defaultS3Credentials) - }) - - it('caches the default credentials', async function() { - stream = await S3Persistor.promises.getFileStream('anotherBucket', key) - stream = await S3Persistor.promises.getFileStream('anotherBucket', key) - - expect(S3).to.have.been.calledTwice - expect(S3.firstCall).to.have.been.calledWith(alternativeS3Credentials) - expect(S3.secondCall).to.have.been.calledWith(defaultS3Credentials) - }) - - it('throws an error if there are no credentials for the bucket', async function() { - delete settings.filestore.s3.key - delete settings.filestore.s3.secret - - await expect( - S3Persistor.promises.getFileStream('anotherBucket', key) - ).to.eventually.be.rejected.and.be.an.instanceOf(Errors.SettingsError) - }) - }) - - describe("when the file doesn't exist", function() { - let error, stream - - beforeEach(async function() { - Transform.prototype.on = sinon.stub() - S3ReadStream.on.withArgs('error').yields(S3NotFoundError) - try { - stream = await S3Persistor.promises.getFileStream(bucket, key) - } catch (err) { - error = err - } - }) - - it('does not return a stream', function() { - expect(stream).not.to.exist - }) - - it('throws a NotFoundError', function() { - expect(error).to.be.an.instanceOf(Errors.NotFoundError) - }) - - it('wraps the error', function() { - expect(error.cause).to.exist - }) - - it('stores the bucket and key in the error', function() { - expect(error.info).to.include({ bucketName: bucket, key: key }) - }) - }) - - describe('when access to the file is denied', function() { - let error, stream - - beforeEach(async function() { - Transform.prototype.on = sinon.stub() - S3ReadStream.on.withArgs('error').yields(S3AccessDeniedError) - try { - stream = await S3Persistor.promises.getFileStream(bucket, key) - } catch (err) { - error = err - } - }) - - it('does not return a stream', function() { - expect(stream).not.to.exist - }) - - it('throws a NotFoundError', function() { - expect(error).to.be.an.instanceOf(Errors.NotFoundError) - }) - - it('wraps the error', function() { - expect(error.cause).to.exist - }) - - it('stores the bucket and key in the error', function() { - expect(error.info).to.include({ bucketName: bucket, key: key }) - }) - }) - - describe('when S3 encounters an unkown error', function() { - let error, stream - - beforeEach(async function() { - Transform.prototype.on = sinon.stub() - S3ReadStream.on.withArgs('error').yields(genericError) - try { - stream = await S3Persistor.promises.getFileStream(bucket, key) - } catch (err) { - error = err - } - }) - - it('does not return a stream', function() { - expect(stream).not.to.exist - }) - - it('throws a ReadError', function() { - expect(error).to.be.an.instanceOf(Errors.ReadError) - }) - - it('wraps the error', function() { - expect(error.cause).to.exist - }) - - it('stores the bucket and key in the error', function() { - expect(error.info).to.include({ bucketName: bucket, key: key }) - }) - }) - }) - - describe('getFileSize', function() { - describe('when called with valid parameters', function() { - let size - - beforeEach(async function() { - size = await S3Persistor.promises.getFileSize(bucket, key) - }) - - it('should return the object size', function() { - expect(size).to.equal(objectSize) - }) - - it('should pass the bucket and key to S3', function() { - expect(S3Client.headObject).to.have.been.calledWith({ - Bucket: bucket, - Key: key - }) - }) - }) - - describe('when the object is not found', function() { - let error - - beforeEach(async function() { - S3Client.headObject = sinon.stub().returns({ - promise: sinon.stub().rejects(S3NotFoundError) - }) - try { - await S3Persistor.promises.getFileSize(bucket, key) - } catch (err) { - error = err - } - }) - - it('should return a NotFoundError', function() { - expect(error).to.be.an.instanceOf(Errors.NotFoundError) - }) - - it('should wrap the error', function() { - expect(error.cause).to.equal(S3NotFoundError) - }) - }) - - describe('when S3 returns an error', function() { - let error - - beforeEach(async function() { - S3Client.headObject = sinon.stub().returns({ - promise: sinon.stub().rejects(genericError) - }) - try { - await S3Persistor.promises.getFileSize(bucket, key) - } catch (err) { - error = err - } - }) - - it('should return a ReadError', function() { - expect(error).to.be.an.instanceOf(Errors.ReadError) - }) - - it('should wrap the error', function() { - expect(error.cause).to.equal(genericError) - }) - }) - }) - - describe('sendStream', function() { - describe('with valid parameters', function() { - beforeEach(async function() { - return S3Persistor.promises.sendStream(bucket, key, ReadStream) - }) - - it('should upload the stream', function() { - expect(S3Client.upload).to.have.been.calledWith({ - Bucket: bucket, - Key: key, - Body: sinon.match.instanceOf(Stream.Transform) - }) - }) - - it('should upload files in a single part', function() { - expect(S3Client.upload).to.have.been.calledWith(sinon.match.any, { - partSize: 100 * 1024 * 1024 - }) - }) - - it('should meter the stream', function() { - expect(Stream.pipeline).to.have.been.calledWith( - ReadStream, - sinon.match.instanceOf(Stream.Transform) - ) - }) - - it('calculates the md5 hash of the file', function() { - expect(Hash.digest).to.have.been.called - }) - }) - - describe('when a hash is supploed', function() { - beforeEach(async function() { - return S3Persistor.promises.sendStream( - bucket, - key, - ReadStream, - 'aaaaaaaabbbbbbbbaaaaaaaabbbbbbbb' - ) - }) - - it('should not calculate the md5 hash of the file', function() { - expect(Hash.digest).not.to.have.been.called - }) - - it('sends the hash in base64', function() { - expect(S3Client.upload).to.have.been.calledWith({ - Bucket: bucket, - Key: key, - Body: sinon.match.instanceOf(Transform), - ContentMD5: 'qqqqqru7u7uqqqqqu7u7uw==' - }) - }) - - it('does not fetch the md5 hash of the uploaded file', function() { - expect(S3Client.headObject).not.to.have.been.called - }) - }) - - describe('when the upload fails', function() { - let error - beforeEach(async function() { - S3Client.upload = sinon.stub().returns({ - promise: sinon.stub().rejects(genericError) - }) - try { - await S3Persistor.promises.sendStream(bucket, key, ReadStream) - } catch (err) { - error = err - } - }) - - it('throws a WriteError', function() { - expect(error).to.be.an.instanceOf(Errors.WriteError) - }) - }) - - describe("when the etag isn't a valid md5 hash", function() { - beforeEach(async function() { - S3Client.upload = sinon.stub().returns({ - promise: sinon.stub().resolves({ - ETag: 'somethingthatisntanmd5', - Bucket: bucket, - Key: key - }) - }) - - await S3Persistor.promises.sendStream(bucket, key, ReadStream) - }) - - it('should re-fetch the file to verify it', function() { - expect(S3Client.getObject).to.have.been.calledWith({ - Bucket: bucket, - Key: key - }) - }) - - it('should meter the download', function() { - expect(S3ReadStream.pipe).to.have.been.calledWith( - sinon.match.instanceOf(Stream.Transform) - ) - }) - - it('should calculate the md5 hash from the file', function() { - expect(Hash.digest).to.have.been.called - }) - }) - }) - - describe('sendFile', function() { - describe('with valid parameters', function() { - beforeEach(async function() { - return S3Persistor.promises.sendFile(bucket, key, filename) - }) - - it('should create a read stream for the file', function() { - expect(Fs.createReadStream).to.have.been.calledWith(filename) - }) - - it('should upload the stream', function() { - expect(S3Client.upload).to.have.been.calledWith({ - Bucket: bucket, - Key: key, - Body: sinon.match.instanceOf(Transform) - }) - }) - }) - }) - - describe('copyFile', function() { - describe('with valid parameters', function() { - beforeEach(async function() { - return S3Persistor.promises.copyFile(bucket, key, destKey) - }) - - it('should copy the object', function() { - expect(S3Client.copyObject).to.have.been.calledWith({ - Bucket: bucket, - Key: destKey, - CopySource: `${bucket}/${key}` - }) - }) - }) - - describe('when the file does not exist', function() { - let error - - beforeEach(async function() { - S3Client.copyObject = sinon.stub().returns({ - promise: sinon.stub().rejects(S3NotFoundError) - }) - try { - await S3Persistor.promises.copyFile(bucket, key, destKey) - } catch (err) { - error = err - } - }) - - it('should throw a NotFoundError', function() { - expect(error).to.be.an.instanceOf(Errors.NotFoundError) - }) - }) - }) - - describe('deleteFile', function() { - describe('with valid parameters', function() { - beforeEach(async function() { - return S3Persistor.promises.deleteFile(bucket, key) - }) - - it('should delete the object', function() { - expect(S3Client.deleteObject).to.have.been.calledWith({ - Bucket: bucket, - Key: key - }) - }) - }) - }) - - describe('deleteDirectory', function() { - describe('with valid parameters', function() { - beforeEach(async function() { - return S3Persistor.promises.deleteDirectory(bucket, key) - }) - - it('should list the objects in the directory', function() { - expect(S3Client.listObjects).to.have.been.calledWith({ - Bucket: bucket, - Prefix: key - }) - }) - - it('should delete the objects using their keys', function() { - expect(S3Client.deleteObjects).to.have.been.calledWith({ - Bucket: bucket, - Delete: { - Objects: [{ Key: 'llama' }, { Key: 'hippo' }], - Quiet: true - } - }) - }) - }) - - describe('when there are no files', function() { - beforeEach(async function() { - S3Client.listObjects = sinon - .stub() - .returns({ promise: sinon.stub().resolves({ Contents: [] }) }) - return S3Persistor.promises.deleteDirectory(bucket, key) - }) - - it('should list the objects in the directory', function() { - expect(S3Client.listObjects).to.have.been.calledWith({ - Bucket: bucket, - Prefix: key - }) - }) - - it('should not try to delete any objects', function() { - expect(S3Client.deleteObjects).not.to.have.been.called - }) - }) - - describe('when there is an error listing the objects', function() { - let error - - beforeEach(async function() { - S3Client.listObjects = sinon - .stub() - .returns({ promise: sinon.stub().rejects(genericError) }) - try { - await S3Persistor.promises.deleteDirectory(bucket, key) - } catch (err) { - error = err - } - }) - - it('should generate a ReadError', function() { - expect(error).to.be.an.instanceOf(Errors.ReadError) - }) - - it('should wrap the error', function() { - expect(error.cause).to.equal(genericError) - }) - - it('should not try to delete any objects', function() { - expect(S3Client.deleteObjects).not.to.have.been.called - }) - }) - - describe('when there is an error deleting the objects', function() { - let error - - beforeEach(async function() { - S3Client.deleteObjects = sinon - .stub() - .returns({ promise: sinon.stub().rejects(genericError) }) - try { - await S3Persistor.promises.deleteDirectory(bucket, key) - } catch (err) { - error = err - } - }) - - it('should generate a WriteError', function() { - expect(error).to.be.an.instanceOf(Errors.WriteError) - }) - - it('should wrap the error', function() { - expect(error.cause).to.equal(genericError) - }) - }) - }) - - describe('directorySize', function() { - describe('with valid parameters', function() { - let size - - beforeEach(async function() { - size = await S3Persistor.promises.directorySize(bucket, key) - }) - - it('should list the objects in the directory', function() { - expect(S3Client.listObjects).to.have.been.calledWith({ - Bucket: bucket, - Prefix: key - }) - }) - - it('should return the directory size', function() { - expect(size).to.equal(filesSize) - }) - }) - - describe('when there are no files', function() { - let size - - beforeEach(async function() { - S3Client.listObjects = sinon - .stub() - .returns({ promise: sinon.stub().resolves({ Contents: [] }) }) - size = await S3Persistor.promises.directorySize(bucket, key) - }) - - it('should list the objects in the directory', function() { - expect(S3Client.listObjects).to.have.been.calledWith({ - Bucket: bucket, - Prefix: key - }) - }) - - it('should return zero', function() { - expect(size).to.equal(0) - }) - }) - - describe('when there is an error listing the objects', function() { - let error - - beforeEach(async function() { - S3Client.listObjects = sinon - .stub() - .returns({ promise: sinon.stub().rejects(genericError) }) - try { - await S3Persistor.promises.directorySize(bucket, key) - } catch (err) { - error = err - } - }) - - it('should generate a ReadError', function() { - expect(error).to.be.an.instanceOf(Errors.ReadError) - }) - - it('should wrap the error', function() { - expect(error.cause).to.equal(genericError) - }) - }) - }) - - describe('checkIfFileExists', function() { - describe('when the file exists', function() { - let exists - - beforeEach(async function() { - exists = await S3Persistor.promises.checkIfFileExists(bucket, key) - }) - - it('should get the object header', function() { - expect(S3Client.headObject).to.have.been.calledWith({ - Bucket: bucket, - Key: key - }) - }) - - it('should return that the file exists', function() { - expect(exists).to.equal(true) - }) - }) - - describe('when the file does not exist', function() { - let exists - - beforeEach(async function() { - S3Client.headObject = sinon - .stub() - .returns({ promise: sinon.stub().rejects(S3NotFoundError) }) - exists = await S3Persistor.promises.checkIfFileExists(bucket, key) - }) - - it('should get the object header', function() { - expect(S3Client.headObject).to.have.been.calledWith({ - Bucket: bucket, - Key: key - }) - }) - - it('should return that the file does not exist', function() { - expect(exists).to.equal(false) - }) - }) - - describe('when there is an error', function() { - let error - - beforeEach(async function() { - S3Client.headObject = sinon - .stub() - .returns({ promise: sinon.stub().rejects(genericError) }) - try { - await S3Persistor.promises.checkIfFileExists(bucket, key) - } catch (err) { - error = err - } - }) - - it('should generate a ReadError', function() { - expect(error).to.be.an.instanceOf(Errors.ReadError) - }) - - it('should wrap the upstream ReadError', function() { - expect(error.cause).to.be.an.instanceOf(Errors.ReadError) - }) - - it('should eventually wrap the error', function() { - expect(error.cause.cause).to.equal(genericError) - }) - }) - }) -}) diff --git a/services/filestore/test/unit/js/SafeExecTests.js b/services/filestore/test/unit/js/SafeExecTests.js index 6b89c53c01..4d31f6f57f 100644 --- a/services/filestore/test/unit/js/SafeExecTests.js +++ b/services/filestore/test/unit/js/SafeExecTests.js @@ -2,6 +2,7 @@ const chai = require('chai') const should = chai.should() const { expect } = chai const modulePath = '../../../app/js/SafeExec' +const { Errors } = require('@overleaf/object-persistor') const SandboxedModule = require('sandboxed-module') describe('SafeExec', function() { @@ -11,10 +12,13 @@ describe('SafeExec', function() { settings = { enableConversions: true } options = { timeout: 10 * 1000, killSignal: 'SIGTERM' } + const ObjectPersistor = { Errors } + safeExec = SandboxedModule.require(modulePath, { globals: { process }, requires: { - 'settings-sharelatex': settings + 'settings-sharelatex': settings, + '@overleaf/object-persistor': ObjectPersistor } }) }) From 14834abf830d8bdbfea3e30c0c9306a14766292c Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Wed, 8 Jul 2020 10:14:02 +0100 Subject: [PATCH 518/555] Move config settings to correct place for new persistor module --- services/filestore/config/settings.defaults.coffee | 5 ++--- services/filestore/test/unit/js/SettingsTests.js | 2 +- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/services/filestore/config/settings.defaults.coffee b/services/filestore/config/settings.defaults.coffee index 272230f918..a53a4cf840 100644 --- a/services/filestore/config/settings.defaults.coffee +++ b/services/filestore/config/settings.defaults.coffee @@ -43,6 +43,7 @@ settings = unlockBeforeDelete: process.env['GCS_UNLOCK_BEFORE_DELETE'] == "true" # unlock an event-based hold before deleting. default false deletedBucketSuffix: process.env['GCS_DELETED_BUCKET_SUFFIX'] # if present, copy file to another bucket on delete. default null deleteConcurrency: parseInt(process.env['GCS_DELETE_CONCURRENCY']) || 50 + signedUrlExpiryInMs: parseInt(process.env['LINK_EXPIRY_TIMEOUT'] || 60000) s3: if process.env['AWS_ACCESS_KEY_ID']? or process.env['S3_BUCKET_CREDENTIALS']? @@ -51,6 +52,7 @@ settings = endpoint: process.env['AWS_S3_ENDPOINT'] pathStyle: process.env['AWS_S3_PATH_STYLE'] partSize: process.env['AWS_S3_PARTSIZE'] or (100 * 1024 * 1024) + bucketCreds: JSON.parse process.env['S3_BUCKET_CREDENTIALS'] if process.env['S3_BUCKET_CREDENTIALS']? # GCS should be configured by the service account on the kubernetes pod. See GOOGLE_APPLICATION_CREDENTIALS, # which will be picked up automatically. @@ -60,8 +62,6 @@ settings = template_files: process.env['TEMPLATE_FILES_BUCKET_NAME'] public_files: process.env['PUBLIC_FILES_BUCKET_NAME'] - s3BucketCreds: JSON.parse process.env['S3_BUCKET_CREDENTIALS'] if process.env['S3_BUCKET_CREDENTIALS']? - fallback: if process.env['FALLBACK_BACKEND']? backend: process.env['FALLBACK_BACKEND'] @@ -71,7 +71,6 @@ settings = copyOnMiss: process.env['COPY_ON_MISS'] == 'true' allowRedirects: if process.env['ALLOW_REDIRECTS'] == 'true' then true else false - signedUrlExpiryInMs: parseInt(process.env['LINK_EXPIRY_TIMEOUT'] || 60000) path: uploadFolder: Path.resolve(__dirname + "/../uploads") diff --git a/services/filestore/test/unit/js/SettingsTests.js b/services/filestore/test/unit/js/SettingsTests.js index 84c3361eab..230ea45d15 100644 --- a/services/filestore/test/unit/js/SettingsTests.js +++ b/services/filestore/test/unit/js/SettingsTests.js @@ -15,7 +15,7 @@ describe('Settings', function() { const settings = SandboxedModule.require('settings-sharelatex', { globals: { console, process } }) - expect(settings.filestore.s3BucketCreds).to.deep.equal(s3Settings) + expect(settings.filestore.s3.bucketCreds).to.deep.equal(s3Settings) }) }) }) From a64bbc928588415cbf1bfb249ddea580770d69c4 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Tue, 21 Jul 2020 15:58:01 +0100 Subject: [PATCH 519/555] Load logger module first --- services/filestore/app.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/filestore/app.js b/services/filestore/app.js index 2d5b27ee6b..c242c8267b 100644 --- a/services/filestore/app.js +++ b/services/filestore/app.js @@ -1,8 +1,8 @@ -const Metrics = require('metrics-sharelatex') const logger = require('logger-sharelatex') +const Metrics = require('metrics-sharelatex') -Metrics.initialize('filestore') logger.initialize('filestore') +Metrics.initialize('filestore') const settings = require('settings-sharelatex') const express = require('express') From 89fc640ec4a33bd339d07290409e1c894947d258 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Wed, 22 Jul 2020 16:57:41 +0100 Subject: [PATCH 520/555] Use METRICS_APP_NAME env var to initialise metrics, if present --- services/filestore/app.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/filestore/app.js b/services/filestore/app.js index c242c8267b..661b131de6 100644 --- a/services/filestore/app.js +++ b/services/filestore/app.js @@ -1,8 +1,8 @@ const logger = require('logger-sharelatex') -const Metrics = require('metrics-sharelatex') +logger.initialize(process.env.METRICS_APP_NAME || 'filestore') -logger.initialize('filestore') -Metrics.initialize('filestore') +const Metrics = require('metrics-sharelatex') +Metrics.initialize(process.env.METRICS_APP_NAME || 'filestore') const settings = require('settings-sharelatex') const express = require('express') From 5329cf24b00d232bfabdcf2746e0b7b77635d47d Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Wed, 22 Jul 2020 16:51:59 +0100 Subject: [PATCH 521/555] Remove unneeded @google-cloud/storage module from main deps --- services/filestore/package-lock.json | 313 +++++++++++++++++++++++++-- services/filestore/package.json | 2 +- 2 files changed, 294 insertions(+), 21 deletions(-) diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index a7a0daeb90..ac7f83b015 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -692,13 +692,14 @@ "integrity": "sha512-VccZDcOql77obTnFh0TbNED/6ZbbmHDf8UMNnzO1d5g9V0Htfm4k5cllY8P1tJsRKC3zWYGRLaViiupcgVjBoQ==" }, "@google-cloud/storage": { - "version": "4.7.0", - "resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-4.7.0.tgz", - "integrity": "sha512-f0guAlbeg7Z0m3gKjCfBCu7FG9qS3M3oL5OQQxlvGoPtK7/qg3+W+KQV73O2/sbuS54n0Kh2mvT5K2FWzF5vVQ==", + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-5.1.2.tgz", + "integrity": "sha512-j2blsBVv6Tt5Z7ff6kOSIg5zVQPdlcTQh/4zMb9h7xMj4ekwndQA60le8c1KEa+Y6SR3EM6ER2AvKYK53P7vdQ==", + "dev": true, "requires": { - "@google-cloud/common": "^2.1.1", - "@google-cloud/paginator": "^2.0.0", - "@google-cloud/promisify": "^1.0.0", + "@google-cloud/common": "^3.0.0", + "@google-cloud/paginator": "^3.0.0", + "@google-cloud/promisify": "^2.0.0", "arrify": "^2.0.0", "compressible": "^2.0.12", "concat-stream": "^2.0.0", @@ -706,24 +707,84 @@ "duplexify": "^3.5.0", "extend": "^3.0.2", "gaxios": "^3.0.0", - "gcs-resumable-upload": "^2.2.4", + "gcs-resumable-upload": "^3.0.0", "hash-stream-validation": "^0.2.2", "mime": "^2.2.0", "mime-types": "^2.0.8", "onetime": "^5.1.0", - "p-limit": "^2.2.0", + "p-limit": "^3.0.1", "pumpify": "^2.0.0", "readable-stream": "^3.4.0", "snakeize": "^0.1.0", "stream-events": "^1.0.1", - "through2": "^3.0.0", + "through2": "^4.0.0", "xdg-basedir": "^4.0.0" }, "dependencies": { - "gaxios": { + "@google-cloud/common": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-3.3.2.tgz", + "integrity": "sha512-W7JRLBEJWYtZQQuGQX06U6GBOSLrSrlvZxv6kGNwJtFrusu6AVgZltQ9Pajuz9Dh9aSXy9aTnBcyxn2/O0EGUw==", + "dev": true, + "requires": { + "@google-cloud/projectify": "^2.0.0", + "@google-cloud/promisify": "^2.0.0", + "arrify": "^2.0.1", + "duplexify": "^4.1.1", + "ent": "^2.2.0", + "extend": "^3.0.2", + "google-auth-library": "^6.0.0", + "retry-request": "^4.1.1", + "teeny-request": "^7.0.0" + }, + "dependencies": { + "duplexify": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", + "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", + "dev": true, + "requires": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.0" + } + } + } + }, + "@google-cloud/paginator": { "version": "3.0.2", - "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-3.0.2.tgz", - "integrity": "sha512-cLOetrsKOBLPwjzVyFzirYaGjrhtYjbKUHp6fQpsio2HH8Mil35JTFQLgkV5D3CCXV7Gnd5V69/m4C9rMBi9bA==", + "resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-3.0.2.tgz", + "integrity": "sha512-kXK+Dbz4pNvv8bKU80Aw5HsIdgOe0WuMTd8/fI6tkANUxzvJOVJQQRsWVqcHSWK2RXHPTA9WBniUCwY6gAJDXw==", + "dev": true, + "requires": { + "arrify": "^2.0.0", + "extend": "^3.0.2" + } + }, + "@google-cloud/projectify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-2.0.1.tgz", + "integrity": "sha512-ZDG38U/Yy6Zr21LaR3BTiiLtpJl6RkPS/JwoRT453G+6Q1DhlV0waNf8Lfu+YVYGIIxgKnLayJRfYlFJfiI8iQ==", + "dev": true + }, + "@google-cloud/promisify": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-2.0.2.tgz", + "integrity": "sha512-EvuabjzzZ9E2+OaYf+7P9OAiiwbTxKYL0oGLnREQd+Su2NTQBpomkdlkBowFvyWsaV0d1sSGxrKpSNcrhPqbxg==", + "dev": true + }, + "bignumber.js": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.0.tgz", + "integrity": "sha512-t/OYhhJ2SD+YGBQcjY8GzzDHEk9f3nerxjtfa6tlMXfe7frs/WozhvCNoGvpM0P3bNf3Gq5ZRMlGr5f3r4/N8A==", + "dev": true + }, + "gaxios": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-3.0.4.tgz", + "integrity": "sha512-97NmFuMETFQh6gqPUxkqjxRMjmY8aRKRMphIkgO/b90AbCt5wAVuXsp8oWjIXlLN2pIK/fsXD8edcM7ULkFMLg==", + "dev": true, "requires": { "abort-controller": "^3.0.0", "extend": "^3.0.2", @@ -732,20 +793,131 @@ "node-fetch": "^2.3.0" } }, + "gcp-metadata": { + "version": "4.1.4", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.1.4.tgz", + "integrity": "sha512-5J/GIH0yWt/56R3dNaNWPGQ/zXsZOddYECfJaqxFWgrZ9HC2Kvc5vl9upOgUUHKzURjAVf2N+f6tEJiojqXUuA==", + "dev": true, + "requires": { + "gaxios": "^3.0.0", + "json-bigint": "^1.0.0" + } + }, + "google-auth-library": { + "version": "6.0.5", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.0.5.tgz", + "integrity": "sha512-Wj31lfTm2yR4g3WfOOB1Am1tt478Xq9OvzTPQJi17tn/I9R5IcsxjANBsE93nYmxYxtwDedhOdIb8l3vSPG49Q==", + "dev": true, + "requires": { + "arrify": "^2.0.0", + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "fast-text-encoding": "^1.0.0", + "gaxios": "^3.0.0", + "gcp-metadata": "^4.1.0", + "gtoken": "^5.0.0", + "jws": "^4.0.0", + "lru-cache": "^6.0.0" + } + }, + "google-p12-pem": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.2.tgz", + "integrity": "sha512-tbjzndQvSIHGBLzHnhDs3cL4RBjLbLXc2pYvGH+imGVu5b4RMAttUTdnmW2UH0t11QeBTXZ7wlXPS7hrypO/tg==", + "dev": true, + "requires": { + "node-forge": "^0.9.0" + } + }, + "gtoken": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.0.2.tgz", + "integrity": "sha512-lull70rHCTvRTmAt+R/6W5bTtx4MjHku7AwJwK5fGqhOmygcZud0nrZcX+QUNfBJwCzqy7S5i1Bc4NYnr5PMMA==", + "dev": true, + "requires": { + "gaxios": "^3.0.0", + "google-p12-pem": "^3.0.0", + "jws": "^4.0.0", + "mime": "^2.2.0" + } + }, + "json-bigint": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", + "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", + "dev": true, + "requires": { + "bignumber.js": "^9.0.0" + } + }, + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "requires": { + "yallist": "^4.0.0" + } + }, "mime": { - "version": "2.4.4", - "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.4.tgz", - "integrity": "sha512-LRxmNwziLPT828z+4YkNzloCFC2YM4wrB99k+AV5ZbEyfGNWfG8SO1FUXLmLDBSo89NrJZ4DIWeLjy1CHGhMGA==" + "version": "2.4.6", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.6.tgz", + "integrity": "sha512-RZKhC3EmpBchfTGBVb8fb+RL2cWyw/32lshnsETttkBAyAUXSGHxbEJWWRXc751DrIxG1q04b8QwMbAwkRPpUA==", + "dev": true + }, + "p-limit": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.0.2.tgz", + "integrity": "sha512-iwqZSOoWIW+Ew4kAGUlN16J4M7OB3ysMLSZtnhmqx7njIHFPlxWBX8xo3lVTyFVq6mI/lL9qt2IsN1sHwaxJkg==", + "dev": true, + "requires": { + "p-try": "^2.0.0" + } }, "readable-stream": { "version": "3.6.0", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dev": true, "requires": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } + }, + "teeny-request": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-7.0.0.tgz", + "integrity": "sha512-kWD3sdGmIix6w7c8ZdVKxWq+3YwVPGWz+Mq0wRZXayEKY/YHb63b8uphfBzcFDmyq8frD9+UTc3wLyOhltRbtg==", + "dev": true, + "requires": { + "http-proxy-agent": "^4.0.0", + "https-proxy-agent": "^5.0.0", + "node-fetch": "^2.2.0", + "stream-events": "^1.0.5", + "uuid": "^8.0.0" + } + }, + "through2": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/through2/-/through2-4.0.2.tgz", + "integrity": "sha512-iOqSav00cVxEEICeD7TjLB1sueEL+81Wpzp2bY17uZjZN0pWZPuo4suZ/61VujxmqSGFfgOcNuTZ85QJwNZQpw==", + "dev": true, + "requires": { + "readable-stream": "3" + } + }, + "uuid": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.2.0.tgz", + "integrity": "sha512-CYpGiFTUrmI6OBMkAdjSDM0k5h8SkkiTP4WAjQgDgNB1S3Ou9VBEvr6q0Kv2H1mMk7IWfxYGpMH5sd5AvcIV2Q==", + "dev": true + }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true } } }, @@ -3071,16 +3243,117 @@ } }, "gcs-resumable-upload": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/gcs-resumable-upload/-/gcs-resumable-upload-2.3.3.tgz", - "integrity": "sha512-sf896I5CC/1AxeaGfSFg3vKMjUq/r+A3bscmVzZm10CElyRanN0XwPu/MxeIO4LSP+9uF6yKzXvNsaTsMXUG6Q==", + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/gcs-resumable-upload/-/gcs-resumable-upload-3.1.1.tgz", + "integrity": "sha512-RS1osvAicj9+MjCc6jAcVL1Pt3tg7NK2C2gXM5nqD1Gs0klF2kj5nnAFSBy97JrtslMIQzpb7iSuxaG8rFWd2A==", + "dev": true, "requires": { "abort-controller": "^3.0.0", "configstore": "^5.0.0", - "gaxios": "^2.0.0", - "google-auth-library": "^5.0.0", + "extend": "^3.0.2", + "gaxios": "^3.0.0", + "google-auth-library": "^6.0.0", "pumpify": "^2.0.0", "stream-events": "^1.0.4" + }, + "dependencies": { + "bignumber.js": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.0.tgz", + "integrity": "sha512-t/OYhhJ2SD+YGBQcjY8GzzDHEk9f3nerxjtfa6tlMXfe7frs/WozhvCNoGvpM0P3bNf3Gq5ZRMlGr5f3r4/N8A==", + "dev": true + }, + "gaxios": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-3.0.4.tgz", + "integrity": "sha512-97NmFuMETFQh6gqPUxkqjxRMjmY8aRKRMphIkgO/b90AbCt5wAVuXsp8oWjIXlLN2pIK/fsXD8edcM7ULkFMLg==", + "dev": true, + "requires": { + "abort-controller": "^3.0.0", + "extend": "^3.0.2", + "https-proxy-agent": "^5.0.0", + "is-stream": "^2.0.0", + "node-fetch": "^2.3.0" + } + }, + "gcp-metadata": { + "version": "4.1.4", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.1.4.tgz", + "integrity": "sha512-5J/GIH0yWt/56R3dNaNWPGQ/zXsZOddYECfJaqxFWgrZ9HC2Kvc5vl9upOgUUHKzURjAVf2N+f6tEJiojqXUuA==", + "dev": true, + "requires": { + "gaxios": "^3.0.0", + "json-bigint": "^1.0.0" + } + }, + "google-auth-library": { + "version": "6.0.5", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.0.5.tgz", + "integrity": "sha512-Wj31lfTm2yR4g3WfOOB1Am1tt478Xq9OvzTPQJi17tn/I9R5IcsxjANBsE93nYmxYxtwDedhOdIb8l3vSPG49Q==", + "dev": true, + "requires": { + "arrify": "^2.0.0", + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "fast-text-encoding": "^1.0.0", + "gaxios": "^3.0.0", + "gcp-metadata": "^4.1.0", + "gtoken": "^5.0.0", + "jws": "^4.0.0", + "lru-cache": "^6.0.0" + } + }, + "google-p12-pem": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.2.tgz", + "integrity": "sha512-tbjzndQvSIHGBLzHnhDs3cL4RBjLbLXc2pYvGH+imGVu5b4RMAttUTdnmW2UH0t11QeBTXZ7wlXPS7hrypO/tg==", + "dev": true, + "requires": { + "node-forge": "^0.9.0" + } + }, + "gtoken": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.0.2.tgz", + "integrity": "sha512-lull70rHCTvRTmAt+R/6W5bTtx4MjHku7AwJwK5fGqhOmygcZud0nrZcX+QUNfBJwCzqy7S5i1Bc4NYnr5PMMA==", + "dev": true, + "requires": { + "gaxios": "^3.0.0", + "google-p12-pem": "^3.0.0", + "jws": "^4.0.0", + "mime": "^2.2.0" + } + }, + "json-bigint": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", + "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", + "dev": true, + "requires": { + "bignumber.js": "^9.0.0" + } + }, + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "requires": { + "yallist": "^4.0.0" + } + }, + "mime": { + "version": "2.4.6", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.6.tgz", + "integrity": "sha512-RZKhC3EmpBchfTGBVb8fb+RL2cWyw/32lshnsETttkBAyAUXSGHxbEJWWRXc751DrIxG1q04b8QwMbAwkRPpUA==", + "dev": true + }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + } } }, "get-caller-file": { diff --git a/services/filestore/package.json b/services/filestore/package.json index 8efec070aa..2c23e2adbb 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -20,7 +20,6 @@ "test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js" }, "dependencies": { - "@google-cloud/storage": "^4.7.0", "@overleaf/o-error": "^3.0.0", "@overleaf/object-persistor": "git+https://github.com/overleaf/object-persistor.git", "aws-sdk": "^2.710.0", @@ -40,6 +39,7 @@ "tiny-async-pool": "^1.1.0" }, "devDependencies": { + "@google-cloud/storage": "^5.1.2", "babel-eslint": "^10.1.0", "bunyan": "^1.8.14", "chai": "4.2.0", From 4bb6509b9c327c651e755b6e4051e60511c14349 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 23 Jul 2020 10:45:27 +0100 Subject: [PATCH 522/555] Update object-persistor and make aws a dev dependency --- services/filestore/package-lock.json | 264 ++++----------------------- services/filestore/package.json | 2 +- 2 files changed, 35 insertions(+), 231 deletions(-) diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index ac7f83b015..5a1357d227 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -695,7 +695,6 @@ "version": "5.1.2", "resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-5.1.2.tgz", "integrity": "sha512-j2blsBVv6Tt5Z7ff6kOSIg5zVQPdlcTQh/4zMb9h7xMj4ekwndQA60le8c1KEa+Y6SR3EM6ER2AvKYK53P7vdQ==", - "dev": true, "requires": { "@google-cloud/common": "^3.0.0", "@google-cloud/paginator": "^3.0.0", @@ -725,7 +724,6 @@ "version": "3.3.2", "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-3.3.2.tgz", "integrity": "sha512-W7JRLBEJWYtZQQuGQX06U6GBOSLrSrlvZxv6kGNwJtFrusu6AVgZltQ9Pajuz9Dh9aSXy9aTnBcyxn2/O0EGUw==", - "dev": true, "requires": { "@google-cloud/projectify": "^2.0.0", "@google-cloud/promisify": "^2.0.0", @@ -742,7 +740,6 @@ "version": "4.1.1", "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", - "dev": true, "requires": { "end-of-stream": "^1.4.1", "inherits": "^2.0.3", @@ -756,7 +753,6 @@ "version": "3.0.2", "resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-3.0.2.tgz", "integrity": "sha512-kXK+Dbz4pNvv8bKU80Aw5HsIdgOe0WuMTd8/fI6tkANUxzvJOVJQQRsWVqcHSWK2RXHPTA9WBniUCwY6gAJDXw==", - "dev": true, "requires": { "arrify": "^2.0.0", "extend": "^3.0.2" @@ -765,26 +761,22 @@ "@google-cloud/projectify": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-2.0.1.tgz", - "integrity": "sha512-ZDG38U/Yy6Zr21LaR3BTiiLtpJl6RkPS/JwoRT453G+6Q1DhlV0waNf8Lfu+YVYGIIxgKnLayJRfYlFJfiI8iQ==", - "dev": true + "integrity": "sha512-ZDG38U/Yy6Zr21LaR3BTiiLtpJl6RkPS/JwoRT453G+6Q1DhlV0waNf8Lfu+YVYGIIxgKnLayJRfYlFJfiI8iQ==" }, "@google-cloud/promisify": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-2.0.2.tgz", - "integrity": "sha512-EvuabjzzZ9E2+OaYf+7P9OAiiwbTxKYL0oGLnREQd+Su2NTQBpomkdlkBowFvyWsaV0d1sSGxrKpSNcrhPqbxg==", - "dev": true + "integrity": "sha512-EvuabjzzZ9E2+OaYf+7P9OAiiwbTxKYL0oGLnREQd+Su2NTQBpomkdlkBowFvyWsaV0d1sSGxrKpSNcrhPqbxg==" }, "bignumber.js": { "version": "9.0.0", "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.0.tgz", - "integrity": "sha512-t/OYhhJ2SD+YGBQcjY8GzzDHEk9f3nerxjtfa6tlMXfe7frs/WozhvCNoGvpM0P3bNf3Gq5ZRMlGr5f3r4/N8A==", - "dev": true + "integrity": "sha512-t/OYhhJ2SD+YGBQcjY8GzzDHEk9f3nerxjtfa6tlMXfe7frs/WozhvCNoGvpM0P3bNf3Gq5ZRMlGr5f3r4/N8A==" }, "gaxios": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-3.0.4.tgz", "integrity": "sha512-97NmFuMETFQh6gqPUxkqjxRMjmY8aRKRMphIkgO/b90AbCt5wAVuXsp8oWjIXlLN2pIK/fsXD8edcM7ULkFMLg==", - "dev": true, "requires": { "abort-controller": "^3.0.0", "extend": "^3.0.2", @@ -797,7 +789,6 @@ "version": "4.1.4", "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.1.4.tgz", "integrity": "sha512-5J/GIH0yWt/56R3dNaNWPGQ/zXsZOddYECfJaqxFWgrZ9HC2Kvc5vl9upOgUUHKzURjAVf2N+f6tEJiojqXUuA==", - "dev": true, "requires": { "gaxios": "^3.0.0", "json-bigint": "^1.0.0" @@ -807,7 +798,6 @@ "version": "6.0.5", "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.0.5.tgz", "integrity": "sha512-Wj31lfTm2yR4g3WfOOB1Am1tt478Xq9OvzTPQJi17tn/I9R5IcsxjANBsE93nYmxYxtwDedhOdIb8l3vSPG49Q==", - "dev": true, "requires": { "arrify": "^2.0.0", "base64-js": "^1.3.0", @@ -824,7 +814,6 @@ "version": "3.0.2", "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.2.tgz", "integrity": "sha512-tbjzndQvSIHGBLzHnhDs3cL4RBjLbLXc2pYvGH+imGVu5b4RMAttUTdnmW2UH0t11QeBTXZ7wlXPS7hrypO/tg==", - "dev": true, "requires": { "node-forge": "^0.9.0" } @@ -833,7 +822,6 @@ "version": "5.0.2", "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.0.2.tgz", "integrity": "sha512-lull70rHCTvRTmAt+R/6W5bTtx4MjHku7AwJwK5fGqhOmygcZud0nrZcX+QUNfBJwCzqy7S5i1Bc4NYnr5PMMA==", - "dev": true, "requires": { "gaxios": "^3.0.0", "google-p12-pem": "^3.0.0", @@ -845,7 +833,6 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", - "dev": true, "requires": { "bignumber.js": "^9.0.0" } @@ -854,7 +841,6 @@ "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, "requires": { "yallist": "^4.0.0" } @@ -862,14 +848,12 @@ "mime": { "version": "2.4.6", "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.6.tgz", - "integrity": "sha512-RZKhC3EmpBchfTGBVb8fb+RL2cWyw/32lshnsETttkBAyAUXSGHxbEJWWRXc751DrIxG1q04b8QwMbAwkRPpUA==", - "dev": true + "integrity": "sha512-RZKhC3EmpBchfTGBVb8fb+RL2cWyw/32lshnsETttkBAyAUXSGHxbEJWWRXc751DrIxG1q04b8QwMbAwkRPpUA==" }, "p-limit": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.0.2.tgz", "integrity": "sha512-iwqZSOoWIW+Ew4kAGUlN16J4M7OB3ysMLSZtnhmqx7njIHFPlxWBX8xo3lVTyFVq6mI/lL9qt2IsN1sHwaxJkg==", - "dev": true, "requires": { "p-try": "^2.0.0" } @@ -878,7 +862,6 @@ "version": "3.6.0", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", - "dev": true, "requires": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", @@ -889,7 +872,6 @@ "version": "7.0.0", "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-7.0.0.tgz", "integrity": "sha512-kWD3sdGmIix6w7c8ZdVKxWq+3YwVPGWz+Mq0wRZXayEKY/YHb63b8uphfBzcFDmyq8frD9+UTc3wLyOhltRbtg==", - "dev": true, "requires": { "http-proxy-agent": "^4.0.0", "https-proxy-agent": "^5.0.0", @@ -902,7 +884,6 @@ "version": "4.0.2", "resolved": "https://registry.npmjs.org/through2/-/through2-4.0.2.tgz", "integrity": "sha512-iOqSav00cVxEEICeD7TjLB1sueEL+81Wpzp2bY17uZjZN0pWZPuo4suZ/61VujxmqSGFfgOcNuTZ85QJwNZQpw==", - "dev": true, "requires": { "readable-stream": "3" } @@ -910,14 +891,12 @@ "uuid": { "version": "8.2.0", "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.2.0.tgz", - "integrity": "sha512-CYpGiFTUrmI6OBMkAdjSDM0k5h8SkkiTP4WAjQgDgNB1S3Ou9VBEvr6q0Kv2H1mMk7IWfxYGpMH5sd5AvcIV2Q==", - "dev": true + "integrity": "sha512-CYpGiFTUrmI6OBMkAdjSDM0k5h8SkkiTP4WAjQgDgNB1S3Ou9VBEvr6q0Kv2H1mMk7IWfxYGpMH5sd5AvcIV2Q==" }, "yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" } } }, @@ -1168,12 +1147,12 @@ "integrity": "sha512-LsM2s6Iy9G97ktPo0ys4VxtI/m3ahc1ZHwjo5XnhXtjeIkkkVAehsrcRRoV/yWepPjymB0oZonhcfojpjYR/tg==" }, "@overleaf/object-persistor": { - "version": "git+https://github.com/overleaf/object-persistor.git#923c26a04dfeb9e79caaa5133394678253b5d006", + "version": "git+https://github.com/overleaf/object-persistor.git#8b8bc4b8d1e8b8aa3ca9245691d6ddd69d663d06", "from": "git+https://github.com/overleaf/object-persistor.git", "requires": { - "@google-cloud/storage": "^5.1.1", + "@google-cloud/storage": "^5.1.2", "@overleaf/o-error": "^3.0.0", - "aws-sdk": "^2.710.0", + "aws-sdk": "^2.718.0", "fast-crc32c": "^2.0.0", "glob": "^7.1.6", "logger-sharelatex": "^2.1.1", @@ -1182,192 +1161,26 @@ "tiny-async-pool": "^1.1.0" }, "dependencies": { - "@google-cloud/common": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-3.3.1.tgz", - "integrity": "sha512-bJamcNvZ2j5xS01uFBT1GqfHIKrtwpyUhIU/Xn3uwMZkK/t6JA3mlID0wuZlo7XjbjFSRT2iLBEmDWv9T2hP8g==", + "aws-sdk": { + "version": "2.718.0", + "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.718.0.tgz", + "integrity": "sha512-YMWR1RJ3VuSbUOGeOfDw2QqRzwX51oa9TCm2G6SW+JywJUy0FTxi/Nj0VjVEQvKC0GqGu5QCgUTaarF7S0nQdw==", "requires": { - "@google-cloud/projectify": "^2.0.0", - "@google-cloud/promisify": "^2.0.0", - "arrify": "^2.0.1", - "duplexify": "^4.1.1", - "ent": "^2.2.0", - "extend": "^3.0.2", - "google-auth-library": "^6.0.0", - "retry-request": "^4.1.1", - "teeny-request": "^7.0.0" - }, - "dependencies": { - "duplexify": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", - "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", - "requires": { - "end-of-stream": "^1.4.1", - "inherits": "^2.0.3", - "readable-stream": "^3.1.1", - "stream-shift": "^1.0.0" - } - } - } - }, - "@google-cloud/paginator": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-3.0.1.tgz", - "integrity": "sha512-ykqRmHRg6rcIZTE+JjUMNBKOQ8uvmbVrhY//lTxZgf5QBPbZW3PoN7VK+D43yCaRJJjRmmWsaG5YdxLR6h0n0A==", - "requires": { - "arrify": "^2.0.0", - "extend": "^3.0.2" - } - }, - "@google-cloud/projectify": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-2.0.0.tgz", - "integrity": "sha512-7wZ+m4N3Imtb5afOPfqNFyj9cKrlfVQ+t5YRxLS7tUpn8Pn/i7QuVubZRTXllaWjO4T5t/gm/r2x7oy5ajjvFQ==" - }, - "@google-cloud/promisify": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-2.0.1.tgz", - "integrity": "sha512-82EQzwrNauw1fkbUSr3f+50Bcq7g4h0XvLOk8C5e9ABkXYHei7ZPi9tiMMD7Vh3SfcdH97d1ibJ3KBWp2o1J+w==" - }, - "@google-cloud/storage": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-5.1.1.tgz", - "integrity": "sha512-w/64V+eJl+vpYUXT15sBcO8pX0KTmb9Ni2ZNuQQ8HmyhAbEA3//G8JFaLPCXGBWO2/b0OQZytUT6q2wII9a9aQ==", - "requires": { - "@google-cloud/common": "^3.0.0", - "@google-cloud/paginator": "^3.0.0", - "@google-cloud/promisify": "^2.0.0", - "arrify": "^2.0.0", - "compressible": "^2.0.12", - "concat-stream": "^2.0.0", - "date-and-time": "^0.13.0", - "duplexify": "^3.5.0", - "extend": "^3.0.2", - "gaxios": "^3.0.0", - "gcs-resumable-upload": "^3.0.0", - "hash-stream-validation": "^0.2.2", - "mime": "^2.2.0", - "mime-types": "^2.0.8", - "onetime": "^5.1.0", - "p-limit": "^3.0.1", - "pumpify": "^2.0.0", - "readable-stream": "^3.4.0", - "snakeize": "^0.1.0", - "stream-events": "^1.0.1", - "through2": "^3.0.0", - "xdg-basedir": "^4.0.0" - } - }, - "gaxios": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-3.0.3.tgz", - "integrity": "sha512-PkzQludeIFhd535/yucALT/Wxyj/y2zLyrMwPcJmnLHDugmV49NvAi/vb+VUq/eWztATZCNcb8ue+ywPG+oLuw==", - "requires": { - "abort-controller": "^3.0.0", - "extend": "^3.0.2", - "https-proxy-agent": "^5.0.0", - "is-stream": "^2.0.0", - "node-fetch": "^2.3.0" - } - }, - "gcp-metadata": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.1.0.tgz", - "integrity": "sha512-r57SV28+olVsflPlKyVig3Muo/VDlcsObMtvDGOEtEJXj+DDE8bEl0coIkXh//hbkSDTvo+f5lbihZOndYXQQQ==", - "requires": { - "gaxios": "^3.0.0", - "json-bigint": "^0.3.0" - } - }, - "gcs-resumable-upload": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/gcs-resumable-upload/-/gcs-resumable-upload-3.1.0.tgz", - "integrity": "sha512-gB8xH6EjYCv9lfBEL4FK5+AMgTY0feYoNHAYOV5nCuOrDPhy5MOiyJE8WosgxhbKBPS361H7fkwv6CTufEh9bg==", - "requires": { - "abort-controller": "^3.0.0", - "configstore": "^5.0.0", - "extend": "^3.0.2", - "gaxios": "^3.0.0", - "google-auth-library": "^6.0.0", - "pumpify": "^2.0.0", - "stream-events": "^1.0.4" - } - }, - "google-auth-library": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.0.3.tgz", - "integrity": "sha512-2Np6ojPmaJGXHSMsBhtTQEKfSMdLc8hefoihv7N2cwEr8E5bq39fhoat6TcXHwa0XoGO5Guh9sp3nxHFPmibMw==", - "requires": { - "arrify": "^2.0.0", - "base64-js": "^1.3.0", - "ecdsa-sig-formatter": "^1.0.11", - "fast-text-encoding": "^1.0.0", - "gaxios": "^3.0.0", - "gcp-metadata": "^4.1.0", - "gtoken": "^5.0.0", - "jws": "^4.0.0", - "lru-cache": "^5.0.0" - } - }, - "google-p12-pem": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.1.tgz", - "integrity": "sha512-VlQgtozgNVVVcYTXS36eQz4PXPt9gIPqLOhHN0QiV6W6h4qSCNVKPtKC5INtJsaHHF2r7+nOIa26MJeJMTaZEQ==", - "requires": { - "node-forge": "^0.9.0" - } - }, - "gtoken": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.0.1.tgz", - "integrity": "sha512-33w4FNDkUcyIOq/TqyC+drnKdI4PdXmWp9lZzssyEQKuvu9ZFN3KttaSnDKo52U3E51oujVGop93mKxmqO8HHg==", - "requires": { - "gaxios": "^3.0.0", - "google-p12-pem": "^3.0.0", - "jws": "^4.0.0", - "mime": "^2.2.0" - } - }, - "mime": { - "version": "2.4.6", - "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.6.tgz", - "integrity": "sha512-RZKhC3EmpBchfTGBVb8fb+RL2cWyw/32lshnsETttkBAyAUXSGHxbEJWWRXc751DrIxG1q04b8QwMbAwkRPpUA==" - }, - "p-limit": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.0.1.tgz", - "integrity": "sha512-mw/p92EyOzl2MhauKodw54Rx5ZK4624rNfgNaBguFZkHzyUG9WsDzFF5/yQVEJinbJDdP4jEfMN+uBquiGnaLg==", - "requires": { - "p-try": "^2.0.0" - } - }, - "readable-stream": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", - "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", - "requires": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - } - }, - "teeny-request": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-7.0.0.tgz", - "integrity": "sha512-kWD3sdGmIix6w7c8ZdVKxWq+3YwVPGWz+Mq0wRZXayEKY/YHb63b8uphfBzcFDmyq8frD9+UTc3wLyOhltRbtg==", - "requires": { - "http-proxy-agent": "^4.0.0", - "https-proxy-agent": "^5.0.0", - "node-fetch": "^2.2.0", - "stream-events": "^1.0.5", - "uuid": "^8.0.0" + "buffer": "4.9.2", + "events": "1.1.1", + "ieee754": "1.1.13", + "jmespath": "0.15.0", + "querystring": "0.2.0", + "sax": "1.2.1", + "url": "0.10.3", + "uuid": "3.3.2", + "xml2js": "0.4.19" } }, "uuid": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.2.0.tgz", - "integrity": "sha512-CYpGiFTUrmI6OBMkAdjSDM0k5h8SkkiTP4WAjQgDgNB1S3Ou9VBEvr6q0Kv2H1mMk7IWfxYGpMH5sd5AvcIV2Q==" + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", + "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==" } } }, @@ -1817,9 +1630,10 @@ "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" }, "aws-sdk": { - "version": "2.710.0", - "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.710.0.tgz", - "integrity": "sha512-GQTPH0DzJMpvvtZ3VO+grkKVdL/nqjWsIfcVf1c3oedvEjW24wSXQEs6KWAGbpG2jbHsYKH7kZ4XXuq428LVAw==", + "version": "2.718.0", + "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.718.0.tgz", + "integrity": "sha512-YMWR1RJ3VuSbUOGeOfDw2QqRzwX51oa9TCm2G6SW+JywJUy0FTxi/Nj0VjVEQvKC0GqGu5QCgUTaarF7S0nQdw==", + "dev": true, "requires": { "buffer": "4.9.2", "events": "1.1.1", @@ -1835,7 +1649,8 @@ "uuid": { "version": "3.3.2", "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", - "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==" + "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==", + "dev": true } } }, @@ -3246,7 +3061,6 @@ "version": "3.1.1", "resolved": "https://registry.npmjs.org/gcs-resumable-upload/-/gcs-resumable-upload-3.1.1.tgz", "integrity": "sha512-RS1osvAicj9+MjCc6jAcVL1Pt3tg7NK2C2gXM5nqD1Gs0klF2kj5nnAFSBy97JrtslMIQzpb7iSuxaG8rFWd2A==", - "dev": true, "requires": { "abort-controller": "^3.0.0", "configstore": "^5.0.0", @@ -3260,14 +3074,12 @@ "bignumber.js": { "version": "9.0.0", "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.0.tgz", - "integrity": "sha512-t/OYhhJ2SD+YGBQcjY8GzzDHEk9f3nerxjtfa6tlMXfe7frs/WozhvCNoGvpM0P3bNf3Gq5ZRMlGr5f3r4/N8A==", - "dev": true + "integrity": "sha512-t/OYhhJ2SD+YGBQcjY8GzzDHEk9f3nerxjtfa6tlMXfe7frs/WozhvCNoGvpM0P3bNf3Gq5ZRMlGr5f3r4/N8A==" }, "gaxios": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-3.0.4.tgz", "integrity": "sha512-97NmFuMETFQh6gqPUxkqjxRMjmY8aRKRMphIkgO/b90AbCt5wAVuXsp8oWjIXlLN2pIK/fsXD8edcM7ULkFMLg==", - "dev": true, "requires": { "abort-controller": "^3.0.0", "extend": "^3.0.2", @@ -3280,7 +3092,6 @@ "version": "4.1.4", "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.1.4.tgz", "integrity": "sha512-5J/GIH0yWt/56R3dNaNWPGQ/zXsZOddYECfJaqxFWgrZ9HC2Kvc5vl9upOgUUHKzURjAVf2N+f6tEJiojqXUuA==", - "dev": true, "requires": { "gaxios": "^3.0.0", "json-bigint": "^1.0.0" @@ -3290,7 +3101,6 @@ "version": "6.0.5", "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.0.5.tgz", "integrity": "sha512-Wj31lfTm2yR4g3WfOOB1Am1tt478Xq9OvzTPQJi17tn/I9R5IcsxjANBsE93nYmxYxtwDedhOdIb8l3vSPG49Q==", - "dev": true, "requires": { "arrify": "^2.0.0", "base64-js": "^1.3.0", @@ -3307,7 +3117,6 @@ "version": "3.0.2", "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.2.tgz", "integrity": "sha512-tbjzndQvSIHGBLzHnhDs3cL4RBjLbLXc2pYvGH+imGVu5b4RMAttUTdnmW2UH0t11QeBTXZ7wlXPS7hrypO/tg==", - "dev": true, "requires": { "node-forge": "^0.9.0" } @@ -3316,7 +3125,6 @@ "version": "5.0.2", "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.0.2.tgz", "integrity": "sha512-lull70rHCTvRTmAt+R/6W5bTtx4MjHku7AwJwK5fGqhOmygcZud0nrZcX+QUNfBJwCzqy7S5i1Bc4NYnr5PMMA==", - "dev": true, "requires": { "gaxios": "^3.0.0", "google-p12-pem": "^3.0.0", @@ -3328,7 +3136,6 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", - "dev": true, "requires": { "bignumber.js": "^9.0.0" } @@ -3337,7 +3144,6 @@ "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, "requires": { "yallist": "^4.0.0" } @@ -3345,14 +3151,12 @@ "mime": { "version": "2.4.6", "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.6.tgz", - "integrity": "sha512-RZKhC3EmpBchfTGBVb8fb+RL2cWyw/32lshnsETttkBAyAUXSGHxbEJWWRXc751DrIxG1q04b8QwMbAwkRPpUA==", - "dev": true + "integrity": "sha512-RZKhC3EmpBchfTGBVb8fb+RL2cWyw/32lshnsETttkBAyAUXSGHxbEJWWRXc751DrIxG1q04b8QwMbAwkRPpUA==" }, "yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" } } }, diff --git a/services/filestore/package.json b/services/filestore/package.json index 2c23e2adbb..11183e1df8 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -22,7 +22,6 @@ "dependencies": { "@overleaf/o-error": "^3.0.0", "@overleaf/object-persistor": "git+https://github.com/overleaf/object-persistor.git", - "aws-sdk": "^2.710.0", "body-parser": "^1.19.0", "express": "^4.17.1", "fast-crc32c": "^2.0.0", @@ -40,6 +39,7 @@ }, "devDependencies": { "@google-cloud/storage": "^5.1.2", + "aws-sdk": "^2.718.0", "babel-eslint": "^10.1.0", "bunyan": "^1.8.14", "chai": "4.2.0", From cb86ee8c10c43609df898adb1e090b2621b5c6a9 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 23 Jul 2020 13:35:52 +0100 Subject: [PATCH 523/555] Initialise metrics before logger --- services/filestore/app.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/filestore/app.js b/services/filestore/app.js index 661b131de6..83113f6b42 100644 --- a/services/filestore/app.js +++ b/services/filestore/app.js @@ -1,9 +1,9 @@ -const logger = require('logger-sharelatex') -logger.initialize(process.env.METRICS_APP_NAME || 'filestore') - const Metrics = require('metrics-sharelatex') Metrics.initialize(process.env.METRICS_APP_NAME || 'filestore') +const logger = require('logger-sharelatex') +logger.initialize(process.env.METRICS_APP_NAME || 'filestore') + const settings = require('settings-sharelatex') const express = require('express') const bodyParser = require('body-parser') From f0551307d25f2dc6fb6437677a772f081a3544fb Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Mon, 10 Aug 2020 17:01:12 +0100 Subject: [PATCH 524/555] [misc] bump the dev-env to 3.3.2 --- services/filestore/.github/dependabot.yml | 17 ++ services/filestore/.gitignore | 3 + services/filestore/Dockerfile | 2 - services/filestore/Jenkinsfile | 131 ----------- services/filestore/Makefile | 6 +- services/filestore/app.js | 8 +- services/filestore/app/js/FileController.js | 18 +- services/filestore/app/js/FileHandler.js | 6 +- .../filestore/app/js/HealthCheckController.js | 2 +- services/filestore/app/js/RequestLogger.js | 7 +- services/filestore/app/js/SafeExec.js | 12 +- services/filestore/buildscript.txt | 4 +- services/filestore/docker-compose.ci.yml | 23 +- services/filestore/docker-compose.yml | 29 ++- services/filestore/nodemon.json | 1 - services/filestore/package-lock.json | 18 +- services/filestore/package.json | 15 +- .../test/acceptance/js/FilestoreApp.js | 4 +- .../test/acceptance/js/FilestoreTests.js | 208 +++++++++--------- .../test/acceptance/js/TestHelper.js | 2 +- .../test/unit/js/FileControllerTests.js | 100 ++++----- .../test/unit/js/FileConverterTests.js | 26 +-- .../test/unit/js/FileHandlerTests.js | 126 +++++------ .../test/unit/js/ImageOptimiserTests.js | 24 +- .../filestore/test/unit/js/KeybuilderTests.js | 12 +- .../test/unit/js/LocalFileWriterTests.js | 36 +-- .../filestore/test/unit/js/SafeExecTests.js | 34 +-- .../filestore/test/unit/js/SettingsTests.js | 6 +- 28 files changed, 383 insertions(+), 497 deletions(-) create mode 100644 services/filestore/.github/dependabot.yml delete mode 100644 services/filestore/Jenkinsfile diff --git a/services/filestore/.github/dependabot.yml b/services/filestore/.github/dependabot.yml new file mode 100644 index 0000000000..c6f98d843d --- /dev/null +++ b/services/filestore/.github/dependabot.yml @@ -0,0 +1,17 @@ +version: 2 +updates: + - package-ecosystem: "npm" + directory: "/" + schedule: + interval: "daily" + + pull-request-branch-name: + # Separate sections of the branch name with a hyphen + # Docker images use the branch name and do not support slashes in tags + # https://github.com/overleaf/google-ops/issues/822 + # https://docs.github.com/en/github/administering-a-repository/configuration-options-for-dependency-updates#pull-request-branch-nameseparator + separator: "-" + + # Block informal upgrades -- security upgrades use a separate queue. + # https://docs.github.com/en/github/administering-a-repository/configuration-options-for-dependency-updates#open-pull-requests-limit + open-pull-requests-limit: 0 diff --git a/services/filestore/.gitignore b/services/filestore/.gitignore index 3c0b90c20d..a2f4b5afb2 100644 --- a/services/filestore/.gitignore +++ b/services/filestore/.gitignore @@ -49,3 +49,6 @@ template_files/* /log.json hash_folder + +# managed by dev-environment$ bin/update_build_scripts +.npmrc diff --git a/services/filestore/Dockerfile b/services/filestore/Dockerfile index 0004f0efc7..60264147af 100644 --- a/services/filestore/Dockerfile +++ b/services/filestore/Dockerfile @@ -17,8 +17,6 @@ RUN npm ci --quiet COPY . /app - - FROM base COPY --from=app /app /app diff --git a/services/filestore/Jenkinsfile b/services/filestore/Jenkinsfile deleted file mode 100644 index fe20b5e06e..0000000000 --- a/services/filestore/Jenkinsfile +++ /dev/null @@ -1,131 +0,0 @@ -String cron_string = BRANCH_NAME == "master" ? "@daily" : "" - -pipeline { - agent any - - environment { - GIT_PROJECT = "filestore" - JENKINS_WORKFLOW = "filestore-sharelatex" - TARGET_URL = "${env.JENKINS_URL}blue/organizations/jenkins/${JENKINS_WORKFLOW}/detail/$BRANCH_NAME/$BUILD_NUMBER/pipeline" - GIT_API_URL = "https://api.github.com/repos/overleaf/${GIT_PROJECT}/statuses/$GIT_COMMIT" - } - - triggers { - pollSCM('* * * * *') - cron(cron_string) - } - - stages { - - stage('Install') { - steps { - withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) { - sh "curl $GIT_API_URL \ - --data '{ \ - \"state\" : \"pending\", \ - \"target_url\": \"$TARGET_URL\", \ - \"description\": \"Your build is underway\", \ - \"context\": \"ci/jenkins\" }' \ - -u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD" - } - } - } - - stage('Build') { - steps { - sh 'make build' - } - } - - stage('Linting') { - steps { - sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make format' - sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make lint' - } - } - - stage('Unit Tests') { - steps { - sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_unit' - } - } - - stage('Acceptance Tests') { - steps { - sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_acceptance' - } - } - - stage('Package and docker push') { - steps { - sh 'echo ${BUILD_NUMBER} > build_number.txt' - sh 'touch build.tar.gz' // Avoid tar warning about files changing during read - sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make tar' - - withCredentials([file(credentialsId: 'gcr.io_overleaf-ops', variable: 'DOCKER_REPO_KEY_PATH')]) { - sh 'docker login -u _json_key --password-stdin https://gcr.io/overleaf-ops < ${DOCKER_REPO_KEY_PATH}' - } - sh 'DOCKER_REPO=gcr.io/overleaf-ops make publish' - sh 'docker logout https://gcr.io/overleaf-ops' - - } - } - - stage('Publish to s3') { - steps { - sh 'echo ${BRANCH_NAME}-${BUILD_NUMBER} > build_number.txt' - withAWS(credentials:'S3_CI_BUILDS_AWS_KEYS', region:"${S3_REGION_BUILD_ARTEFACTS}") { - s3Upload(file:'build.tar.gz', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/${BUILD_NUMBER}.tar.gz") - } - withAWS(credentials:'S3_CI_BUILDS_AWS_KEYS', region:"${S3_REGION_BUILD_ARTEFACTS}") { - // The deployment process uses this file to figure out the latest build - s3Upload(file:'build_number.txt', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/latest") - } - } - } - } - - post { - always { - sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_clean' - sh 'make clean' - } - - success { - withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) { - sh "curl $GIT_API_URL \ - --data '{ \ - \"state\" : \"success\", \ - \"target_url\": \"$TARGET_URL\", \ - \"description\": \"Your build succeeded!\", \ - \"context\": \"ci/jenkins\" }' \ - -u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD" - } - } - - failure { - mail(from: "${EMAIL_ALERT_FROM}", - to: "${EMAIL_ALERT_TO}", - subject: "Jenkins build failed: ${JOB_NAME}:${BUILD_NUMBER}", - body: "Build: ${BUILD_URL}") - withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) { - sh "curl $GIT_API_URL \ - --data '{ \ - \"state\" : \"failure\", \ - \"target_url\": \"$TARGET_URL\", \ - \"description\": \"Your build failed\", \ - \"context\": \"ci/jenkins\" }' \ - -u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD" - } - } - } - - // The options directive is for configuration that applies to the whole job. - options { - // we'd like to make sure remove old builds, so we don't fill up our storage! - buildDiscarder(logRotator(numToKeepStr:'50')) - - // And we'd really like to be sure that this build doesn't hang forever, so let's time it out after: - timeout(time: 30, unit: 'MINUTES') - } -} diff --git a/services/filestore/Makefile b/services/filestore/Makefile index 87c33e1bbd..f713b7a0a2 100644 --- a/services/filestore/Makefile +++ b/services/filestore/Makefile @@ -25,13 +25,13 @@ clean: docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) format: - $(DOCKER_COMPOSE) run --rm test_unit npm run format + $(DOCKER_COMPOSE) run --rm test_unit npm run --silent format format_fix: - $(DOCKER_COMPOSE) run --rm test_unit npm run format:fix + $(DOCKER_COMPOSE) run --rm test_unit npm run --silent format:fix lint: - $(DOCKER_COMPOSE) run --rm test_unit npm run lint + $(DOCKER_COMPOSE) run --rm test_unit npm run --silent lint test: format lint test_unit test_acceptance diff --git a/services/filestore/app.js b/services/filestore/app.js index 83113f6b42..7a2a866ea2 100644 --- a/services/filestore/app.js +++ b/services/filestore/app.js @@ -28,7 +28,7 @@ if (Metrics.event_loop) { Metrics.event_loop.monitor(logger) } -app.use(function(req, res, next) { +app.use(function (req, res, next) { Metrics.inc('http-request') next() }) @@ -127,7 +127,7 @@ app.get( fileController.getFile ) -app.get('/status', function(req, res) { +app.get('/status', function (req, res) { res.send('filestore sharelatex up') }) @@ -140,7 +140,7 @@ const host = '0.0.0.0' if (!module.parent) { // Called directly - app.listen(port, host, error => { + app.listen(port, host, (error) => { if (error) { logger.error('Error starting Filestore', error) throw error @@ -153,7 +153,7 @@ process .on('unhandledRejection', (reason, p) => { logger.err(reason, 'Unhandled Rejection at Promise', p) }) - .on('uncaughtException', err => { + .on('uncaughtException', (err) => { logger.err(err, 'Uncaught Exception thrown') process.exit(1) }) diff --git a/services/filestore/app/js/FileController.js b/services/filestore/app/js/FileController.js index 5d1f0aa5b1..80b67d7e9c 100644 --- a/services/filestore/app/js/FileController.js +++ b/services/filestore/app/js/FileController.js @@ -46,7 +46,7 @@ function getFile(req, res, next) { } } - FileHandler.getRedirectUrl(bucket, key, options, function(err, redirectUrl) { + FileHandler.getRedirectUrl(bucket, key, options, function (err, redirectUrl) { if (err) { metrics.inc('file_redirect_error') } @@ -56,7 +56,7 @@ function getFile(req, res, next) { return res.redirect(redirectUrl) } - FileHandler.getFile(bucket, key, options, function(err, fileStream) { + FileHandler.getFile(bucket, key, options, function (err, fileStream) { if (err) { if (err instanceof Errors.NotFoundError) { res.sendStatus(404) @@ -70,7 +70,7 @@ function getFile(req, res, next) { return res.sendStatus(200).end() } - pipeline(fileStream, res, err => { + pipeline(fileStream, res, (err) => { if (err && err.code === 'ERR_STREAM_PREMATURE_CLOSE') { res.end() } else if (err) { @@ -94,7 +94,7 @@ function getFileHead(req, res, next) { req.requestLogger.setMessage('getting file size') req.requestLogger.addFields({ key, bucket }) - FileHandler.getFileSize(bucket, key, function(err, fileSize) { + FileHandler.getFileSize(bucket, key, function (err, fileSize) { if (err) { if (err instanceof Errors.NotFoundError) { res.sendStatus(404) @@ -115,7 +115,7 @@ function insertFile(req, res, next) { req.requestLogger.setMessage('inserting file') req.requestLogger.addFields({ key, bucket }) - FileHandler.insertFile(bucket, key, req, function(err) { + FileHandler.insertFile(bucket, key, req, function (err) { if (err) { next(err) } else { @@ -140,7 +140,7 @@ function copyFile(req, res, next) { PersistorManager.copyObject(bucket, `${oldProjectId}/${oldFileId}`, key) .then(() => res.sendStatus(200)) - .catch(err => { + .catch((err) => { if (err) { if (err instanceof Errors.NotFoundError) { res.sendStatus(404) @@ -158,7 +158,7 @@ function deleteFile(req, res, next) { req.requestLogger.addFields({ key, bucket }) req.requestLogger.setMessage('deleting file') - FileHandler.deleteFile(bucket, key, function(err) { + FileHandler.deleteFile(bucket, key, function (err) { if (err) { next(err) } else { @@ -174,7 +174,7 @@ function deleteProject(req, res, next) { req.requestLogger.setMessage('deleting project') req.requestLogger.addFields({ key, bucket }) - FileHandler.deleteProject(bucket, key, function(err) { + FileHandler.deleteProject(bucket, key, function (err) { if (err) { if (err instanceof Errors.InvalidParametersError) { return res.sendStatus(400) @@ -193,7 +193,7 @@ function directorySize(req, res, next) { req.requestLogger.setMessage('getting project size') req.requestLogger.addFields({ projectId, bucket }) - FileHandler.getDirectorySize(bucket, projectId, function(err, size) { + FileHandler.getDirectorySize(bucket, projectId, function (err, size) { if (err) { return next(err) } diff --git a/services/filestore/app/js/FileHandler.js b/services/filestore/app/js/FileHandler.js index 6ac25a3c9e..7f476e3630 100644 --- a/services/filestore/app/js/FileHandler.js +++ b/services/filestore/app/js/FileHandler.js @@ -143,8 +143,8 @@ async function _getConvertedFileAndCache(bucket, key, convertedKey, opts) { // S3 provides eventual consistency for read-after-write."" // https://docs.aws.amazon.com/AmazonS3/latest/dev/Introduction.html#ConsistencyModel const readStream = fs.createReadStream(convertedFsPath) - readStream.on('end', function() { - LocalFileWriter.deleteFile(convertedFsPath, function() {}) + readStream.on('end', function () { + LocalFileWriter.deleteFile(convertedFsPath, function () {}) }) return readStream } @@ -185,7 +185,7 @@ async function _convertFile(bucket, originalKey, opts) { err ) } - LocalFileWriter.deleteFile(originalFsPath, function() {}) + LocalFileWriter.deleteFile(originalFsPath, function () {}) return destPath } diff --git a/services/filestore/app/js/HealthCheckController.js b/services/filestore/app/js/HealthCheckController.js index 4ee7534307..c71b567fe0 100644 --- a/services/filestore/app/js/HealthCheckController.js +++ b/services/filestore/app/js/HealthCheckController.js @@ -62,7 +62,7 @@ module.exports = { check(req, res, next) { Promise.all([checkCanGetFiles(), checkFileConvert()]) .then(() => res.sendStatus(200)) - .catch(err => { + .catch((err) => { next(err) }) } diff --git a/services/filestore/app/js/RequestLogger.js b/services/filestore/app/js/RequestLogger.js index f68e4b4304..ae706c2829 100644 --- a/services/filestore/app/js/RequestLogger.js +++ b/services/filestore/app/js/RequestLogger.js @@ -26,7 +26,7 @@ class RequestLogger { // override the 'end' method to log and record metrics const end = res.end - res.end = function() { + res.end = function () { // apply the standard request 'end' method before logging and metrics end.apply(this, arguments) @@ -38,10 +38,7 @@ class RequestLogger { metrics.timing('http_request', responseTime, null, { method: req.method, status_code: res.statusCode, - path: routePath - .replace(/\//g, '_') - .replace(/:/g, '') - .slice(1) + path: routePath.replace(/\//g, '_').replace(/:/g, '').slice(1) }) } diff --git a/services/filestore/app/js/SafeExec.js b/services/filestore/app/js/SafeExec.js index 5ee8e8830b..6bdae0ea34 100644 --- a/services/filestore/app/js/SafeExec.js +++ b/services/filestore/app/js/SafeExec.js @@ -28,7 +28,7 @@ function safeExec(command, options, callback) { let killTimer - const cleanup = lodashOnce(function(err) { + const cleanup = lodashOnce(function (err) { if (killTimer) { clearTimeout(killTimer) } @@ -36,7 +36,7 @@ function safeExec(command, options, callback) { }) if (options.timeout) { - killTimer = setTimeout(function() { + killTimer = setTimeout(function () { try { // use negative process id to kill process group process.kill(-child.pid, options.killSignal || 'SIGTERM') @@ -52,7 +52,7 @@ function safeExec(command, options, callback) { }, options.timeout) } - child.on('close', function(code, signal) { + child.on('close', function (code, signal) { if (code || signal) { return cleanup( new FailedCommandError(command, code || signal, stdout, stderr) @@ -62,13 +62,13 @@ function safeExec(command, options, callback) { cleanup() }) - child.on('error', err => { + child.on('error', (err) => { cleanup(err) }) - child.stdout.on('data', chunk => { + child.stdout.on('data', (chunk) => { stdout += chunk }) - child.stderr.on('data', chunk => { + child.stderr.on('data', (chunk) => { stderr += chunk }) } diff --git a/services/filestore/buildscript.txt b/services/filestore/buildscript.txt index 5938593a5d..c3dbf05e1d 100644 --- a/services/filestore/buildscript.txt +++ b/services/filestore/buildscript.txt @@ -1,11 +1,9 @@ filestore ---acceptance-creds= --data-dirs=uploads,user_files,template_files --dependencies=s3,gcs --docker-repos=gcr.io/overleaf-ops --env-add=ENABLE_CONVERSIONS="true",USE_PROM_METRICS="true",AWS_S3_USER_FILES_BUCKET_NAME=fake_user_files,AWS_S3_TEMPLATE_FILES_BUCKET_NAME=fake_template_files,AWS_S3_PUBLIC_FILES_BUCKET_NAME=fake_public_files --env-pass-through= ---language=es --node-version=12.18.0 --public-repo=True ---script-version=2.2.0 +--script-version=3.3.2 diff --git a/services/filestore/docker-compose.ci.yml b/services/filestore/docker-compose.ci.yml index e6d4ddcc76..5f2d01e9a6 100644 --- a/services/filestore/docker-compose.ci.yml +++ b/services/filestore/docker-compose.ci.yml @@ -11,6 +11,7 @@ services: command: npm run test:unit:_run environment: NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" test_acceptance: @@ -21,23 +22,22 @@ services: REDIS_HOST: redis MONGO_HOST: mongo POSTGRES_HOST: postgres + AWS_S3_ENDPOINT: http://s3:9090 + AWS_S3_PATH_STYLE: 'true' + AWS_ACCESS_KEY_ID: fake + AWS_SECRET_ACCESS_KEY: fake + GCS_API_ENDPOINT: gcs:9090 + GCS_API_SCHEME: http + GCS_PROJECT_ID: fake + STORAGE_EMULATOR_HOST: http://gcs:9090/storage/v1 MOCHA_GREP: ${MOCHA_GREP} NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" ENABLE_CONVERSIONS: "true" USE_PROM_METRICS: "true" AWS_S3_USER_FILES_BUCKET_NAME: fake_user_files AWS_S3_TEMPLATE_FILES_BUCKET_NAME: fake_template_files AWS_S3_PUBLIC_FILES_BUCKET_NAME: fake_public_files - AWS_S3_ENDPOINT: http://s3:9090 - AWS_ACCESS_KEY_ID: fake - AWS_SECRET_ACCESS_KEY: fake - AWS_S3_PATH_STYLE: 'true' - GCS_API_ENDPOINT: gcs:9090 - GCS_API_SCHEME: http - GCS_USER_FILES_BUCKET_NAME: fake_userfiles - GCS_TEMPLATE_FILES_BUCKET_NAME: fake_templatefiles - GCS_PUBLIC_FILES_BUCKET_NAME: fake_publicfiles - STORAGE_EMULATOR_HOST: http://gcs:9090/storage/v1 depends_on: s3: condition: service_healthy @@ -59,8 +59,7 @@ services: context: test/acceptance/deps dockerfile: Dockerfile.s3mock environment: - - initialBuckets=fake_user_files,fake_template_files,fake_public_files - + - initialBuckets=fake_user_files,fake_template_files,fake_public_files,bucket gcs: build: context: test/acceptance/deps diff --git a/services/filestore/docker-compose.yml b/services/filestore/docker-compose.yml index a0093ddb8e..32424d97aa 100644 --- a/services/filestore/docker-compose.yml +++ b/services/filestore/docker-compose.yml @@ -15,7 +15,8 @@ services: environment: MOCHA_GREP: ${MOCHA_GREP} NODE_ENV: test - command: npm run test:unit + NODE_OPTIONS: "--unhandled-rejections=strict" + command: npm run --silent test:unit user: node test_acceptance: @@ -30,39 +31,37 @@ services: REDIS_HOST: redis MONGO_HOST: mongo POSTGRES_HOST: postgres - MOCHA_GREP: ${MOCHA_GREP} - LOG_LEVEL: ERROR - NODE_ENV: test - ENABLE_CONVERSIONS: "true" - USE_PROM_METRICS: "true" - AWS_S3_USER_FILES_BUCKET_NAME: fake_user_files - AWS_S3_TEMPLATE_FILES_BUCKET_NAME: fake_template_files - AWS_S3_PUBLIC_FILES_BUCKET_NAME: fake_public_files AWS_S3_ENDPOINT: http://s3:9090 AWS_S3_PATH_STYLE: 'true' AWS_ACCESS_KEY_ID: fake AWS_SECRET_ACCESS_KEY: fake GCS_API_ENDPOINT: gcs:9090 GCS_API_SCHEME: http - GCS_USER_FILES_BUCKET_NAME: fake_userfiles - GCS_TEMPLATE_FILES_BUCKET_NAME: fake_templatefiles - GCS_PUBLIC_FILES_BUCKET_NAME: fake_publicfiles + GCS_PROJECT_ID: fake STORAGE_EMULATOR_HOST: http://gcs:9090/storage/v1 + MOCHA_GREP: ${MOCHA_GREP} + LOG_LEVEL: ERROR + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + ENABLE_CONVERSIONS: "true" + USE_PROM_METRICS: "true" + AWS_S3_USER_FILES_BUCKET_NAME: fake_user_files + AWS_S3_TEMPLATE_FILES_BUCKET_NAME: fake_template_files + AWS_S3_PUBLIC_FILES_BUCKET_NAME: fake_public_files user: node depends_on: s3: condition: service_healthy gcs: condition: service_healthy - command: npm run test:acceptance + command: npm run --silent test:acceptance s3: build: context: test/acceptance/deps dockerfile: Dockerfile.s3mock environment: - - initialBuckets=fake_user_files,fake_template_files,fake_public_files - + - initialBuckets=fake_user_files,fake_template_files,fake_public_files,bucket gcs: build: context: test/acceptance/deps diff --git a/services/filestore/nodemon.json b/services/filestore/nodemon.json index 5826281b84..e3e8817d90 100644 --- a/services/filestore/nodemon.json +++ b/services/filestore/nodemon.json @@ -8,7 +8,6 @@ "execMap": { "js": "npm run start" }, - "watch": [ "app/js/", "app.js", diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index 5a1357d227..70fc188732 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -2521,9 +2521,9 @@ "dev": true }, "eslint-plugin-chai-friendly": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-chai-friendly/-/eslint-plugin-chai-friendly-0.6.0.tgz", - "integrity": "sha512-Uvvv1gkbRGp/qfN15B0kQyQWg+oFA8buDSqrwmW3egNSk/FpqH2MjQqKOuKwmEL6w4QIQrIjDp+gg6kGGmD3oQ==", + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-chai-friendly/-/eslint-plugin-chai-friendly-0.5.0.tgz", + "integrity": "sha512-Pxe6z8C9fP0pn2X2nGFU/b3GBOCM/5FVus1hsMwJsXP3R7RiXFl7g0ksJbsc0GxiLyidTW4mEFk77qsNn7Tk7g==", "dev": true }, "eslint-plugin-es": { @@ -4737,9 +4737,9 @@ "dev": true }, "prettier": { - "version": "1.19.1", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-1.19.1.tgz", - "integrity": "sha512-s7PoyDv/II1ObgQunCbB9PdLmUcBZcnWOcxDh7O0N/UwDEsHyqkW+Qh28jW+mVuCdx7gLB0BotYI1Y6uI9iyew==", + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.0.5.tgz", + "integrity": "sha512-7PtVymN48hGcO4fGjybyBSIWDsLU4H4XlvOHfq91pz9kkGlonzwTfYkaIEwiRg/dAJF9YlbsduBAgtYLi+8cFg==", "dev": true }, "prettier-eslint": { @@ -4938,6 +4938,12 @@ "mimic-fn": "^1.0.0" } }, + "prettier": { + "version": "1.19.1", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-1.19.1.tgz", + "integrity": "sha512-s7PoyDv/II1ObgQunCbB9PdLmUcBZcnWOcxDh7O0N/UwDEsHyqkW+Qh28jW+mVuCdx7gLB0BotYI1Y6uI9iyew==", + "dev": true + }, "restore-cursor": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz", diff --git a/services/filestore/package.json b/services/filestore/package.json index 11183e1df8..6408114f4f 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -13,7 +13,7 @@ "test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP", "start": "node $NODE_APP_OPTIONS app.js", "nodemon": "nodemon --config nodemon.json", - "lint": "node_modules/.bin/eslint app test *.js", + "lint": "node_modules/.bin/eslint --max-warnings 0 .", "format": "node_modules/.bin/prettier-eslint $PWD'/**/*.js' --list-different", "format:fix": "node_modules/.bin/prettier-eslint $PWD'/**/*.js' --write", "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", @@ -46,18 +46,19 @@ "chai-as-promised": "^7.1.1", "disrequire": "^1.1.0", "eslint": "^6.8.0", - "eslint-config-prettier": "^6.11.0", - "eslint-config-standard": "^14.1.1", + "eslint-config-prettier": "^6.10.0", + "eslint-config-standard": "^14.1.0", "eslint-plugin-chai-expect": "^2.1.0", - "eslint-plugin-chai-friendly": "^0.6.0", - "eslint-plugin-import": "^2.22.0", + "eslint-plugin-chai-friendly": "^0.5.0", + "eslint-plugin-import": "^2.20.1", "eslint-plugin-mocha": "^6.3.0", - "eslint-plugin-node": "^11.1.0", - "eslint-plugin-prettier": "^3.1.4", + "eslint-plugin-node": "^11.0.0", + "eslint-plugin-prettier": "^3.1.2", "eslint-plugin-promise": "^4.2.1", "eslint-plugin-standard": "^4.0.1", "mocha": "7.2.0", "mongodb": "^3.5.9", + "prettier": "^2.0.0", "prettier-eslint": "^9.0.2", "prettier-eslint-cli": "^5.0.0", "sandboxed-module": "2.0.4", diff --git a/services/filestore/test/acceptance/js/FilestoreApp.js b/services/filestore/test/acceptance/js/FilestoreApp.js index 076198a38b..acb6b71828 100644 --- a/services/filestore/test/acceptance/js/FilestoreApp.js +++ b/services/filestore/test/acceptance/js/FilestoreApp.js @@ -33,7 +33,7 @@ class FilestoreApp { this.server = this.app.listen( Settings.internal.filestore.port, 'localhost', - err => { + (err) => { if (err) { return reject(err) } @@ -110,7 +110,7 @@ class FilestoreApp { // unload the app, as we may be doing this on multiple runs with // different settings, which affect startup in some cases const files = await fsReaddir(Path.resolve(__dirname, '../../../app/js')) - files.forEach(file => { + files.forEach((file) => { disrequire(Path.resolve(__dirname, '../../../app/js', file)) }) disrequire(Path.resolve(__dirname, '../../../app')) diff --git a/services/filestore/test/acceptance/js/FilestoreTests.js b/services/filestore/test/acceptance/js/FilestoreTests.js index 6f7503410f..7c7cb90e54 100644 --- a/services/filestore/test/acceptance/js/FilestoreTests.js +++ b/services/filestore/test/acceptance/js/FilestoreTests.js @@ -29,7 +29,7 @@ if (!process.env.AWS_ACCESS_KEY_ID) { throw new Error('please provide credentials for the AWS S3 test server') } -process.on('unhandledRejection', e => { +process.on('unhandledRejection', (e) => { // eslint-disable-next-line no-console console.log('** Unhandled Promise Rejection **\n', e) throw e @@ -39,7 +39,7 @@ process.on('unhandledRejection', e => { // fs will always be available - add others if they are configured const BackendSettings = require('./TestConfig') -describe('Filestore', function() { +describe('Filestore', function () { this.timeout(1000 * 10) const filestoreUrl = `http://localhost:${Settings.internal.filestore.port}` @@ -51,7 +51,7 @@ describe('Filestore', function() { const badSockets = [] for (const socket of stdout.split('\n')) { - const fields = socket.split(' ').filter(part => part !== '') + const fields = socket.split(' ').filter((part) => part !== '') if ( fields.length > 2 && parseInt(fields[1]) && @@ -79,11 +79,11 @@ describe('Filestore', function() { } // redefine the test suite for every available backend - Object.keys(BackendSettings).forEach(backend => { - describe(backend, function() { + Object.keys(BackendSettings).forEach((backend) => { + describe(backend, function () { let app, previousEgress, previousIngress, metricPrefix, projectId - before(async function() { + before(async function () { // create the app with the relevant filestore settings Settings.filestore = BackendSettings[backend] app = new FilestoreApp() @@ -91,7 +91,7 @@ describe('Filestore', function() { }) if (BackendSettings[backend].gcs) { - before(async function() { + before(async function () { const storage = new Storage(Settings.filestore.gcs.endpoint) await storage.createBucket(process.env.GCS_USER_FILES_BUCKET_NAME) await storage.createBucket(process.env.GCS_PUBLIC_FILES_BUCKET_NAME) @@ -108,12 +108,12 @@ describe('Filestore', function() { }) } - after(async function() { + after(async function () { await msleep(3000) await app.stop() }) - beforeEach(async function() { + beforeEach(async function () { // retrieve previous metrics from the app if (['s3', 'gcs'].includes(Settings.filestore.backend)) { metricPrefix = Settings.filestore.backend @@ -125,26 +125,26 @@ describe('Filestore', function() { projectId = ObjectId().toString() }) - it('should send a 200 for the status endpoint', async function() { + it('should send a 200 for the status endpoint', async function () { const response = await rp(`${filestoreUrl}/status`) expect(response.statusCode).to.equal(200) expect(response.body).to.contain('filestore') expect(response.body).to.contain('up') }) - it('should send a 200 for the health-check endpoint', async function() { + it('should send a 200 for the health-check endpoint', async function () { const response = await rp(`${filestoreUrl}/health_check`) expect(response.statusCode).to.equal(200) expect(response.body).to.equal('OK') }) - describe('with a file on the server', function() { + describe('with a file on the server', function () { let fileId, fileUrl, constantFileContent const localFileReadPath = '/tmp/filestore_acceptance_tests_file_read.txt' - beforeEach(async function() { + beforeEach(async function () { fileId = ObjectId().toString() fileUrl = `${filestoreUrl}/project/${projectId}/file/${fileId}` constantFileContent = [ @@ -174,14 +174,14 @@ describe('Filestore', function() { } }) - it('should return 404 for a non-existant id', async function() { + it('should return 404 for a non-existant id', async function () { const options = { uri: fileUrl + '___this_is_clearly_wrong___' } await expect( rp.get(options) ).to.eventually.be.rejected.and.have.property('statusCode', 404) }) - it('should return the file size on a HEAD request', async function() { + it('should return the file size on a HEAD request', async function () { const expectedLength = Buffer.byteLength(constantFileContent) const res = await rp.head(fileUrl) expect(res.statusCode).to.equal(200) @@ -190,17 +190,17 @@ describe('Filestore', function() { ) }) - it('should be able get the file back', async function() { + it('should be able get the file back', async function () { const res = await rp.get(fileUrl) expect(res.body).to.equal(constantFileContent) }) - it('should not leak a socket', async function() { + it('should not leak a socket', async function () { await rp.get(fileUrl) await expectNoSockets() }) - it('should be able to get back the first 9 bytes of the file', async function() { + it('should be able to get back the first 9 bytes of the file', async function () { const options = { uri: fileUrl, headers: { @@ -211,7 +211,7 @@ describe('Filestore', function() { expect(res.body).to.equal('hello wor') }) - it('should be able to get back bytes 4 through 10 of the file', async function() { + it('should be able to get back bytes 4 through 10 of the file', async function () { const options = { uri: fileUrl, headers: { @@ -222,7 +222,7 @@ describe('Filestore', function() { expect(res.body).to.equal('o world') }) - it('should be able to delete the file', async function() { + it('should be able to delete the file', async function () { const response = await rp.del(fileUrl) expect(response.statusCode).to.equal(204) await expect( @@ -230,7 +230,7 @@ describe('Filestore', function() { ).to.eventually.be.rejected.and.have.property('statusCode', 404) }) - it('should be able to copy files', async function() { + it('should be able to copy files', async function () { const newProjectID = ObjectId().toString() const newFileId = ObjectId().toString() const newFileUrl = `${filestoreUrl}/project/${newProjectID}/file/${newFileId}` @@ -252,7 +252,7 @@ describe('Filestore', function() { expect(response.body).to.equal(constantFileContent) }) - it('should be able to overwrite the file', async function() { + it('should be able to overwrite the file', async function () { const newContent = `here is some different content, ${Math.random()}` const writeStream = request.post(fileUrl) const readStream = streamifier.createReadStream(newContent) @@ -265,7 +265,7 @@ describe('Filestore', function() { }) if (['S3Persistor', 'GcsPersistor'].includes(backend)) { - it('should record an egress metric for the upload', async function() { + it('should record an egress metric for the upload', async function () { const metric = await TestHelper.getMetric( filestoreUrl, `${metricPrefix}_egress` @@ -273,7 +273,7 @@ describe('Filestore', function() { expect(metric - previousEgress).to.equal(constantFileContent.length) }) - it('should record an ingress metric when downloading the file', async function() { + it('should record an ingress metric when downloading the file', async function () { await rp.get(fileUrl) const metric = await TestHelper.getMetric( filestoreUrl, @@ -284,7 +284,7 @@ describe('Filestore', function() { ) }) - it('should record an ingress metric for a partial download', async function() { + it('should record an ingress metric for a partial download', async function () { const options = { uri: fileUrl, headers: { @@ -301,7 +301,7 @@ describe('Filestore', function() { } }) - describe('with multiple files', function() { + describe('with multiple files', function () { let fileIds, fileUrls, projectUrl const localFileReadPaths = [ '/tmp/filestore_acceptance_tests_file_read_1.txt', @@ -320,14 +320,14 @@ describe('Filestore', function() { ].join('\n') ] - before(async function() { + before(async function () { return Promise.all([ fsWriteFile(localFileReadPaths[0], constantFileContents[0]), fsWriteFile(localFileReadPaths[1], constantFileContents[1]) ]) }) - beforeEach(async function() { + beforeEach(async function () { projectUrl = `${filestoreUrl}/project/${projectId}` fileIds = [ObjectId().toString(), ObjectId().toString()] fileUrls = [ @@ -354,7 +354,7 @@ describe('Filestore', function() { ]) }) - it('should get the directory size', async function() { + it('should get the directory size', async function () { const response = await rp.get( `${filestoreUrl}/project/${projectId}/size` ) @@ -363,7 +363,7 @@ describe('Filestore', function() { ) }) - it('should store the files', async function() { + it('should store the files', async function () { for (const index in fileUrls) { await expect(rp.get(fileUrls[index])).to.eventually.have.property( 'body', @@ -372,7 +372,7 @@ describe('Filestore', function() { } }) - it('should be able to delete the project', async function() { + it('should be able to delete the project', async function () { await expect(rp.delete(projectUrl)).to.eventually.have.property( 'statusCode', 204 @@ -385,17 +385,17 @@ describe('Filestore', function() { } }) - it('should not delete a partial project id', async function() { + it('should not delete a partial project id', async function () { await expect( rp.delete(`${filestoreUrl}/project/5`) ).to.eventually.be.rejected.and.have.property('statusCode', 400) }) }) - describe('with a large file', function() { + describe('with a large file', function () { let fileId, fileUrl, largeFileContent, error - beforeEach(async function() { + beforeEach(async function () { fileId = ObjectId().toString() fileUrl = `${filestoreUrl}/project/${projectId}/file/${fileId}` @@ -414,26 +414,26 @@ describe('Filestore', function() { } }) - it('should be able to get the file back', async function() { + it('should be able to get the file back', async function () { const response = await rp.get(fileUrl) expect(response.body).to.equal(largeFileContent) }) - it('should not throw an error', function() { + it('should not throw an error', function () { expect(error).not.to.exist }) - it('should not leak a socket', async function() { + it('should not leak a socket', async function () { await rp.get(fileUrl) await expectNoSockets() }) - it('should not leak a socket if the connection is aborted', async function() { + it('should not leak a socket if the connection is aborted', async function () { this.timeout(20000) for (let i = 0; i < 5; i++) { // test is not 100% reliable, so repeat // create a new connection and have it time out before reading any data - await new Promise(resolve => { + await new Promise((resolve) => { const streamThatHangs = new Stream.PassThrough() const stream = request({ url: fileUrl, timeout: 1000 }) stream.pipe(streamThatHangs) @@ -449,10 +449,10 @@ describe('Filestore', function() { }) if (backend === 'S3Persistor' || backend === 'FallbackGcsToS3Persistor') { - describe('with a file in a specific bucket', function() { + describe('with a file in a specific bucket', function () { let constantFileContent, fileId, fileUrl, bucketName - beforeEach(async function() { + beforeEach(async function () { constantFileContent = `This is a file in a different S3 bucket ${Math.random()}` fileId = ObjectId().toString() bucketName = ObjectId().toString() @@ -483,7 +483,7 @@ describe('Filestore', function() { .promise() }) - it('should get the file from the specified bucket', async function() { + it('should get the file from the specified bucket', async function () { const response = await rp.get(fileUrl) expect(response.body).to.equal(constantFileContent) }) @@ -491,10 +491,10 @@ describe('Filestore', function() { } if (backend === 'GcsPersistor') { - describe('when deleting a file in GCS', function() { + describe('when deleting a file in GCS', function () { let fileId, fileUrl, content, error, date - beforeEach(async function() { + beforeEach(async function () { date = new Date() tk.freeze(date) fileId = ObjectId() @@ -515,15 +515,15 @@ describe('Filestore', function() { } }) - afterEach(function() { + afterEach(function () { tk.reset() }) - it('should not throw an error', function() { + it('should not throw an error', function () { expect(error).not.to.exist }) - it('should copy the file to the deleted-files bucket', async function() { + it('should copy the file to the deleted-files bucket', async function () { await TestHelper.expectPersistorToHaveFile( app.persistor, `${Settings.filestore.stores.user_files}-deleted`, @@ -532,7 +532,7 @@ describe('Filestore', function() { ) }) - it('should remove the file from the original bucket', async function() { + it('should remove the file from the original bucket', async function () { await TestHelper.expectPersistorNotToHaveFile( app.persistor, Settings.filestore.stores.user_files, @@ -543,7 +543,7 @@ describe('Filestore', function() { } if (BackendSettings[backend].fallback) { - describe('with a fallback', function() { + describe('with a fallback', function () { let constantFileContent, fileId, fileKey, @@ -551,7 +551,7 @@ describe('Filestore', function() { bucket, fallbackBucket - beforeEach(function() { + beforeEach(function () { constantFileContent = `This is yet more file content ${Math.random()}` fileId = ObjectId().toString() fileKey = `${projectId}/${fileId}` @@ -561,8 +561,8 @@ describe('Filestore', function() { fallbackBucket = Settings.filestore.fallback.buckets[bucket] }) - describe('with a file in the fallback bucket', function() { - beforeEach(async function() { + describe('with a file in the fallback bucket', function () { + beforeEach(async function () { await TestHelper.uploadStringToPersistor( app.persistor.fallbackPersistor, fallbackBucket, @@ -571,7 +571,7 @@ describe('Filestore', function() { ) }) - it('should not find file in the primary', async function() { + it('should not find file in the primary', async function () { await TestHelper.expectPersistorNotToHaveFile( app.persistor.primaryPersistor, bucket, @@ -579,7 +579,7 @@ describe('Filestore', function() { ) }) - it('should find the file in the fallback', async function() { + it('should find the file in the fallback', async function () { await TestHelper.expectPersistorToHaveFile( app.persistor.fallbackPersistor, fallbackBucket, @@ -588,17 +588,17 @@ describe('Filestore', function() { ) }) - describe('when copyOnMiss is disabled', function() { - beforeEach(function() { + describe('when copyOnMiss is disabled', function () { + beforeEach(function () { app.persistor.settings.copyOnMiss = false }) - it('should fetch the file', async function() { + it('should fetch the file', async function () { const res = await rp.get(fileUrl) expect(res.body).to.equal(constantFileContent) }) - it('should not copy the file to the primary', async function() { + it('should not copy the file to the primary', async function () { await rp.get(fileUrl) await TestHelper.expectPersistorNotToHaveFile( @@ -609,17 +609,17 @@ describe('Filestore', function() { }) }) - describe('when copyOnMiss is enabled', function() { - beforeEach(function() { + describe('when copyOnMiss is enabled', function () { + beforeEach(function () { app.persistor.settings.copyOnMiss = true }) - it('should fetch the file', async function() { + it('should fetch the file', async function () { const res = await rp.get(fileUrl) expect(res.body).to.equal(constantFileContent) }) - it('copies the file to the primary', async function() { + it('copies the file to the primary', async function () { await rp.get(fileUrl) // wait for the file to copy in the background await msleep(1000) @@ -633,10 +633,10 @@ describe('Filestore', function() { }) }) - describe('when copying a file', function() { + describe('when copying a file', function () { let newFileId, newFileUrl, newFileKey, opts - beforeEach(function() { + beforeEach(function () { const newProjectID = ObjectId().toString() newFileId = ObjectId().toString() newFileUrl = `${filestoreUrl}/project/${newProjectID}/file/${newFileId}` @@ -654,15 +654,15 @@ describe('Filestore', function() { } }) - describe('when copyOnMiss is false', function() { - beforeEach(async function() { + describe('when copyOnMiss is false', function () { + beforeEach(async function () { app.persistor.settings.copyOnMiss = false const response = await rp(opts) expect(response.statusCode).to.equal(200) }) - it('should leave the old file in the old bucket', async function() { + it('should leave the old file in the old bucket', async function () { await TestHelper.expectPersistorToHaveFile( app.persistor.fallbackPersistor, fallbackBucket, @@ -671,7 +671,7 @@ describe('Filestore', function() { ) }) - it('should not create a new file in the old bucket', async function() { + it('should not create a new file in the old bucket', async function () { await TestHelper.expectPersistorNotToHaveFile( app.persistor.fallbackPersistor, fallbackBucket, @@ -679,7 +679,7 @@ describe('Filestore', function() { ) }) - it('should create a new file in the new bucket', async function() { + it('should create a new file in the new bucket', async function () { await TestHelper.expectPersistorToHaveFile( app.persistor.primaryPersistor, bucket, @@ -688,7 +688,7 @@ describe('Filestore', function() { ) }) - it('should not copy the old file to the primary with the old key', async function() { + it('should not copy the old file to the primary with the old key', async function () { // wait for the file to copy in the background await msleep(1000) @@ -700,15 +700,15 @@ describe('Filestore', function() { }) }) - describe('when copyOnMiss is true', function() { - beforeEach(async function() { + describe('when copyOnMiss is true', function () { + beforeEach(async function () { app.persistor.settings.copyOnMiss = true const response = await rp(opts) expect(response.statusCode).to.equal(200) }) - it('should leave the old file in the old bucket', async function() { + it('should leave the old file in the old bucket', async function () { await TestHelper.expectPersistorToHaveFile( app.persistor.fallbackPersistor, fallbackBucket, @@ -717,7 +717,7 @@ describe('Filestore', function() { ) }) - it('should not create a new file in the old bucket', async function() { + it('should not create a new file in the old bucket', async function () { await TestHelper.expectPersistorNotToHaveFile( app.persistor.fallbackPersistor, fallbackBucket, @@ -725,7 +725,7 @@ describe('Filestore', function() { ) }) - it('should create a new file in the new bucket', async function() { + it('should create a new file in the new bucket', async function () { await TestHelper.expectPersistorToHaveFile( app.persistor.primaryPersistor, bucket, @@ -734,7 +734,7 @@ describe('Filestore', function() { ) }) - it('should copy the old file to the primary with the old key', async function() { + it('should copy the old file to the primary with the old key', async function () { // wait for the file to copy in the background await msleep(1000) @@ -749,8 +749,8 @@ describe('Filestore', function() { }) }) - describe('when sending a file', function() { - beforeEach(async function() { + describe('when sending a file', function () { + beforeEach(async function () { const writeStream = request.post(fileUrl) const readStream = streamifier.createReadStream( constantFileContent @@ -760,7 +760,7 @@ describe('Filestore', function() { await pipeline(readStream, writeStream, resultStream) }) - it('should store the file on the primary', async function() { + it('should store the file on the primary', async function () { await TestHelper.expectPersistorToHaveFile( app.persistor.primaryPersistor, bucket, @@ -769,7 +769,7 @@ describe('Filestore', function() { ) }) - it('should not store the file on the fallback', async function() { + it('should not store the file on the fallback', async function () { await TestHelper.expectPersistorNotToHaveFile( app.persistor.fallbackPersistor, fallbackBucket, @@ -778,9 +778,9 @@ describe('Filestore', function() { }) }) - describe('when deleting a file', function() { - describe('when the file exists on the primary', function() { - beforeEach(async function() { + describe('when deleting a file', function () { + describe('when the file exists on the primary', function () { + beforeEach(async function () { await TestHelper.uploadStringToPersistor( app.persistor.primaryPersistor, bucket, @@ -789,7 +789,7 @@ describe('Filestore', function() { ) }) - it('should delete the file', async function() { + it('should delete the file', async function () { const response = await rp.del(fileUrl) expect(response.statusCode).to.equal(204) await expect( @@ -798,8 +798,8 @@ describe('Filestore', function() { }) }) - describe('when the file exists on the fallback', function() { - beforeEach(async function() { + describe('when the file exists on the fallback', function () { + beforeEach(async function () { await TestHelper.uploadStringToPersistor( app.persistor.fallbackPersistor, fallbackBucket, @@ -808,7 +808,7 @@ describe('Filestore', function() { ) }) - it('should delete the file', async function() { + it('should delete the file', async function () { const response = await rp.del(fileUrl) expect(response.statusCode).to.equal(204) await expect( @@ -817,8 +817,8 @@ describe('Filestore', function() { }) }) - describe('when the file exists on both the primary and the fallback', function() { - beforeEach(async function() { + describe('when the file exists on both the primary and the fallback', function () { + beforeEach(async function () { await TestHelper.uploadStringToPersistor( app.persistor.primaryPersistor, bucket, @@ -833,7 +833,7 @@ describe('Filestore', function() { ) }) - it('should delete the files', async function() { + it('should delete the files', async function () { const response = await rp.del(fileUrl) expect(response.statusCode).to.equal(204) await expect( @@ -842,8 +842,8 @@ describe('Filestore', function() { }) }) - describe('when the file does not exist', function() { - it('should return return 204', async function() { + describe('when the file does not exist', function () { + it('should return return 204', async function () { // S3 doesn't give us a 404 when the object doesn't exist, so to stay // consistent we merrily return 204 ourselves here as well const response = await rp.del(fileUrl) @@ -854,14 +854,14 @@ describe('Filestore', function() { }) } - describe('with a pdf file', function() { + describe('with a pdf file', function () { let fileId, fileUrl, localFileSize const localFileReadPath = Path.resolve( __dirname, '../../fixtures/test.pdf' ) - beforeEach(async function() { + beforeEach(async function () { fileId = ObjectId().toString() fileUrl = `${filestoreUrl}/project/${projectId}/file/${fileId}` const stat = await fsStat(localFileReadPath) @@ -872,13 +872,13 @@ describe('Filestore', function() { await pipeline(readStream, writeStream, endStream) }) - it('should be able get the file back', async function() { + it('should be able get the file back', async function () { const response = await rp.get(fileUrl) expect(response.body.substring(0, 8)).to.equal('%PDF-1.5') }) if (['S3Persistor', 'GcsPersistor'].includes(backend)) { - it('should record an egress metric for the upload', async function() { + it('should record an egress metric for the upload', async function () { const metric = await TestHelper.getMetric( filestoreUrl, `${metricPrefix}_egress` @@ -887,20 +887,20 @@ describe('Filestore', function() { }) } - describe('getting the preview image', function() { + describe('getting the preview image', function () { this.timeout(1000 * 20) let previewFileUrl - beforeEach(function() { + beforeEach(function () { previewFileUrl = `${fileUrl}?style=preview` }) - it('should not time out', async function() { + it('should not time out', async function () { const response = await rp.get(previewFileUrl) expect(response.statusCode).to.equal(200) }) - it('should respond with image data', async function() { + it('should respond with image data', async function () { // note: this test relies of the imagemagick conversion working const response = await rp.get(previewFileUrl) expect(response.body.length).to.be.greaterThan(400) @@ -908,20 +908,20 @@ describe('Filestore', function() { }) }) - describe('warming the cache', function() { + describe('warming the cache', function () { this.timeout(1000 * 20) let previewFileUrl - beforeEach(function() { + beforeEach(function () { previewFileUrl = `${fileUrl}?style=preview&cacheWarm=true` }) - it('should not time out', async function() { + it('should not time out', async function () { const response = await rp.get(previewFileUrl) expect(response.statusCode).to.equal(200) }) - it("should respond with only an 'OK'", async function() { + it("should respond with only an 'OK'", async function () { // note: this test relies of the imagemagick conversion working const response = await rp.get(previewFileUrl) expect(response.body).to.equal('OK') diff --git a/services/filestore/test/acceptance/js/TestHelper.js b/services/filestore/test/acceptance/js/TestHelper.js index 935a8b50e7..d09b68a053 100644 --- a/services/filestore/test/acceptance/js/TestHelper.js +++ b/services/filestore/test/acceptance/js/TestHelper.js @@ -25,7 +25,7 @@ async function getMetric(filestoreUrl, metric) { function streamToString(stream) { const chunks = [] return new Promise((resolve, reject) => { - stream.on('data', chunk => chunks.push(chunk)) + stream.on('data', (chunk) => chunks.push(chunk)) stream.on('error', reject) stream.on('end', () => resolve(Buffer.concat(chunks).toString('utf8'))) stream.resume() diff --git a/services/filestore/test/unit/js/FileControllerTests.js b/services/filestore/test/unit/js/FileControllerTests.js index fe446bd478..4ae65028c7 100644 --- a/services/filestore/test/unit/js/FileControllerTests.js +++ b/services/filestore/test/unit/js/FileControllerTests.js @@ -5,7 +5,7 @@ const SandboxedModule = require('sandboxed-module') const Errors = require('../../../app/js/Errors') const modulePath = '../../../app/js/FileController.js' -describe('FileController', function() { +describe('FileController', function () { let PersistorManager, FileHandler, LocalFileWriter, @@ -29,7 +29,7 @@ describe('FileController', function() { const key = `${projectId}/${fileId}` const error = new Error('incorrect utensil') - beforeEach(function() { + beforeEach(function () { PersistorManager = { sendStream: sinon.stub().yields(), copyObject: sinon.stub().resolves(), @@ -91,76 +91,76 @@ describe('FileController', function() { next = sinon.stub() }) - describe('getFile', function() { - it('should try and get a redirect url first', function() { + describe('getFile', function () { + it('should try and get a redirect url first', function () { FileController.getFile(req, res, next) expect(FileHandler.getRedirectUrl).to.have.been.calledWith(bucket, key) }) - it('should pipe the stream', function() { + it('should pipe the stream', function () { FileController.getFile(req, res, next) expect(stream.pipeline).to.have.been.calledWith(fileStream, res) }) - it('should send a 200 if the cacheWarm param is true', function(done) { + it('should send a 200 if the cacheWarm param is true', function (done) { req.query.cacheWarm = true - res.sendStatus = statusCode => { + res.sendStatus = (statusCode) => { statusCode.should.equal(200) done() } FileController.getFile(req, res, next) }) - it('should send an error if there is a problem', function() { + it('should send an error if there is a problem', function () { FileHandler.getFile.yields(error) FileController.getFile(req, res, next) expect(next).to.have.been.calledWith(error) }) - describe('with a redirect url', function() { + describe('with a redirect url', function () { const redirectUrl = 'https://wombat.potato/giraffe' - beforeEach(function() { + beforeEach(function () { FileHandler.getRedirectUrl.yields(null, redirectUrl) res.redirect = sinon.stub() }) - it('should redirect', function() { + it('should redirect', function () { FileController.getFile(req, res, next) expect(res.redirect).to.have.been.calledWith(redirectUrl) }) - it('should not get a file stream', function() { + it('should not get a file stream', function () { FileController.getFile(req, res, next) expect(FileHandler.getFile).not.to.have.been.called }) - describe('when there is an error getting the redirect url', function() { - beforeEach(function() { + describe('when there is an error getting the redirect url', function () { + beforeEach(function () { FileHandler.getRedirectUrl.yields(new Error('wombat herding error')) }) - it('should not redirect', function() { + it('should not redirect', function () { FileController.getFile(req, res, next) expect(res.redirect).not.to.have.been.called }) - it('should not return an error', function() { + it('should not return an error', function () { FileController.getFile(req, res, next) expect(next).not.to.have.been.called }) - it('should proxy the file', function() { + it('should proxy the file', function () { FileController.getFile(req, res, next) expect(FileHandler.getFile).to.have.been.calledWith(bucket, key) }) }) }) - describe('with a range header', function() { + describe('with a range header', function () { let expectedOptions - beforeEach(function() { + beforeEach(function () { expectedOptions = { bucket, key, @@ -169,7 +169,7 @@ describe('FileController', function() { } }) - it('should pass range options to FileHandler', function() { + it('should pass range options to FileHandler', function () { req.headers.range = 'bytes=0-8' expectedOptions.start = 0 expectedOptions.end = 8 @@ -182,7 +182,7 @@ describe('FileController', function() { ) }) - it('should ignore an invalid range header', function() { + it('should ignore an invalid range header', function () { req.headers.range = 'potato' FileController.getFile(req, res, next) expect(FileHandler.getFile).to.have.been.calledWith( @@ -192,7 +192,7 @@ describe('FileController', function() { ) }) - it("should ignore any type other than 'bytes'", function() { + it("should ignore any type other than 'bytes'", function () { req.headers.range = 'wombats=0-8' FileController.getFile(req, res, next) expect(FileHandler.getFile).to.have.been.calledWith( @@ -204,8 +204,8 @@ describe('FileController', function() { }) }) - describe('getFileHead', function() { - it('should return the file size in a Content-Length header', function(done) { + describe('getFileHead', function () { + it('should return the file size in a Content-Length header', function (done) { res.end = () => { expect(res.status).to.have.been.calledWith(200) expect(res.set).to.have.been.calledWith('Content-Length', fileSize) @@ -215,12 +215,12 @@ describe('FileController', function() { FileController.getFileHead(req, res, next) }) - it('should return a 404 is the file is not found', function(done) { + it('should return a 404 is the file is not found', function (done) { FileHandler.getFileSize.yields( new Errors.NotFoundError({ message: 'not found', info: {} }) ) - res.sendStatus = code => { + res.sendStatus = (code) => { expect(code).to.equal(404) done() } @@ -228,7 +228,7 @@ describe('FileController', function() { FileController.getFileHead(req, res, next) }) - it('should send an error on internal errors', function() { + it('should send an error on internal errors', function () { FileHandler.getFileSize.yields(error) FileController.getFileHead(req, res, next) @@ -236,9 +236,9 @@ describe('FileController', function() { }) }) - describe('insertFile', function() { - it('should send bucket name key and res to PersistorManager', function(done) { - res.sendStatus = code => { + describe('insertFile', function () { + it('should send bucket name key and res to PersistorManager', function (done) { + res.sendStatus = (code) => { expect(FileHandler.insertFile).to.have.been.calledWith(bucket, key, req) expect(code).to.equal(200) done() @@ -247,12 +247,12 @@ describe('FileController', function() { }) }) - describe('copyFile', function() { + describe('copyFile', function () { const oldFileId = 'oldFileId' const oldProjectId = 'oldProjectid' const oldKey = `${oldProjectId}/${oldFileId}` - beforeEach(function() { + beforeEach(function () { req.body = { source: { project_id: oldProjectId, @@ -261,8 +261,8 @@ describe('FileController', function() { } }) - it('should send bucket name and both keys to PersistorManager', function(done) { - res.sendStatus = code => { + it('should send bucket name and both keys to PersistorManager', function (done) { + res.sendStatus = (code) => { code.should.equal(200) expect(PersistorManager.copyObject).to.have.been.calledWith( bucket, @@ -274,29 +274,29 @@ describe('FileController', function() { FileController.copyFile(req, res, next) }) - it('should send a 404 if the original file was not found', function(done) { + it('should send a 404 if the original file was not found', function (done) { PersistorManager.copyObject.rejects( new Errors.NotFoundError({ message: 'not found', info: {} }) ) - res.sendStatus = code => { + res.sendStatus = (code) => { code.should.equal(404) done() } FileController.copyFile(req, res, next) }) - it('should send an error if there was an error', function(done) { + it('should send an error if there was an error', function (done) { PersistorManager.copyObject.rejects(error) - FileController.copyFile(req, res, err => { + FileController.copyFile(req, res, (err) => { expect(err).to.equal(error) done() }) }) }) - describe('delete file', function() { - it('should tell the file handler', function(done) { - res.sendStatus = code => { + describe('delete file', function () { + it('should tell the file handler', function (done) { + res.sendStatus = (code) => { code.should.equal(204) expect(FileHandler.deleteFile).to.have.been.calledWith(bucket, key) done() @@ -304,16 +304,16 @@ describe('FileController', function() { FileController.deleteFile(req, res, next) }) - it('should send a 500 if there was an error', function() { + it('should send a 500 if there was an error', function () { FileHandler.deleteFile.yields(error) FileController.deleteFile(req, res, next) expect(next).to.have.been.calledWith(error) }) }) - describe('delete project', function() { - it('should tell the file handler', function(done) { - res.sendStatus = code => { + describe('delete project', function () { + it('should tell the file handler', function (done) { + res.sendStatus = (code) => { code.should.equal(204) expect(FileHandler.deleteProject).to.have.been.calledWith(bucket, key) done() @@ -321,24 +321,24 @@ describe('FileController', function() { FileController.deleteProject(req, res, next) }) - it('should send a 500 if there was an error', function() { + it('should send a 500 if there was an error', function () { FileHandler.deleteProject.yields(error) FileController.deleteProject(req, res, next) expect(next).to.have.been.calledWith(error) }) }) - describe('directorySize', function() { - it('should return total directory size bytes', function(done) { + describe('directorySize', function () { + it('should return total directory size bytes', function (done) { FileController.directorySize(req, { - json: result => { + json: (result) => { expect(result['total bytes']).to.equal(fileSize) done() } }) }) - it('should send a 500 if there was an error', function() { + it('should send a 500 if there was an error', function () { FileHandler.getDirectorySize.yields(error) FileController.directorySize(req, res, next) expect(next).to.have.been.calledWith(error) diff --git a/services/filestore/test/unit/js/FileConverterTests.js b/services/filestore/test/unit/js/FileConverterTests.js index 671544098e..6d97783f0c 100644 --- a/services/filestore/test/unit/js/FileConverterTests.js +++ b/services/filestore/test/unit/js/FileConverterTests.js @@ -6,7 +6,7 @@ const { Errors } = require('@overleaf/object-persistor') const modulePath = '../../../app/js/FileConverter.js' -describe('FileConverter', function() { +describe('FileConverter', function () { let SafeExec, FileConverter const sourcePath = '/data/wombat.eps' const destPath = '/tmp/dest.png' @@ -18,7 +18,7 @@ describe('FileConverter', function() { } } - beforeEach(function() { + beforeEach(function () { SafeExec = { promises: sinon.stub().resolves(destPath) } @@ -38,20 +38,20 @@ describe('FileConverter', function() { }) }) - describe('convert', function() { - it('should convert the source to the requested format', async function() { + describe('convert', function () { + it('should convert the source to the requested format', async function () { await FileConverter.promises.convert(sourcePath, format) const args = SafeExec.promises.args[0][0] expect(args).to.include(`${sourcePath}[0]`) expect(args).to.include(`${sourcePath}.${format}`) }) - it('should return the dest path', async function() { + it('should return the dest path', async function () { const destPath = await FileConverter.promises.convert(sourcePath, format) destPath.should.equal(`${sourcePath}.${format}`) }) - it('should wrap the error from convert', async function() { + it('should wrap the error from convert', async function () { SafeExec.promises.rejects(errorMessage) try { await FileConverter.promises.convert(sourcePath, format) @@ -62,7 +62,7 @@ describe('FileConverter', function() { } }) - it('should not accept an non approved format', async function() { + it('should not accept an non approved format', async function () { try { await FileConverter.promises.convert(sourcePath, 'potato') expect('error should have been thrown').not.to.exist @@ -71,12 +71,12 @@ describe('FileConverter', function() { } }) - it('should prefix the command with Settings.commands.convertCommandPrefix', async function() { + it('should prefix the command with Settings.commands.convertCommandPrefix', async function () { Settings.commands.convertCommandPrefix = ['nice'] await FileConverter.promises.convert(sourcePath, format) }) - it('should convert the file when called as a callback', function(done) { + it('should convert the file when called as a callback', function (done) { FileConverter.convert(sourcePath, format, (err, destPath) => { expect(err).not.to.exist destPath.should.equal(`${sourcePath}.${format}`) @@ -89,16 +89,16 @@ describe('FileConverter', function() { }) }) - describe('thumbnail', function() { - it('should call converter resize with args', async function() { + describe('thumbnail', function () { + it('should call converter resize with args', async function () { await FileConverter.promises.thumbnail(sourcePath) const args = SafeExec.promises.args[0][0] expect(args).to.include(`${sourcePath}[0]`) }) }) - describe('preview', function() { - it('should call converter resize with args', async function() { + describe('preview', function () { + it('should call converter resize with args', async function () { await FileConverter.promises.preview(sourcePath) const args = SafeExec.promises.args[0][0] expect(args).to.include(`${sourcePath}[0]`) diff --git a/services/filestore/test/unit/js/FileHandlerTests.js b/services/filestore/test/unit/js/FileHandlerTests.js index ecce74d870..90bc06832e 100644 --- a/services/filestore/test/unit/js/FileHandlerTests.js +++ b/services/filestore/test/unit/js/FileHandlerTests.js @@ -9,7 +9,7 @@ const { Errors } = require('@overleaf/object-persistor') chai.use(require('sinon-chai')) chai.use(require('chai-as-promised')) -describe('FileHandler', function() { +describe('FileHandler', function () { let PersistorManager, LocalFileWriter, FileConverter, @@ -31,7 +31,7 @@ describe('FileHandler', function() { on: sinon.stub() } - beforeEach(function() { + beforeEach(function () { PersistorManager = { getObjectStream: sinon.stub().resolves(sourceStream), getRedirectUrl: sinon.stub().resolves(redirectUrl), @@ -89,11 +89,11 @@ describe('FileHandler', function() { }) }) - describe('insertFile', function() { + describe('insertFile', function () { const stream = 'stream' - it('should send file to the filestore', function(done) { - FileHandler.insertFile(bucket, key, stream, err => { + it('should send file to the filestore', function (done) { + FileHandler.insertFile(bucket, key, stream, (err) => { expect(err).not.to.exist expect(PersistorManager.sendStream).to.have.been.calledWith( bucket, @@ -104,39 +104,39 @@ describe('FileHandler', function() { }) }) - it('should not make a delete request for the convertedKey folder', function(done) { - FileHandler.insertFile(bucket, key, stream, err => { + it('should not make a delete request for the convertedKey folder', function (done) { + FileHandler.insertFile(bucket, key, stream, (err) => { expect(err).not.to.exist expect(PersistorManager.deleteDirectory).not.to.have.been.called done() }) }) - it('should accept templates-api key format', function(done) { + it('should accept templates-api key format', function (done) { KeyBuilder.getConvertedFolderKey.returns( '5ecba29f1a294e007d0bccb4/v/0/pdf' ) - FileHandler.insertFile(bucket, key, stream, err => { + FileHandler.insertFile(bucket, key, stream, (err) => { expect(err).not.to.exist done() }) }) - it('should throw an error when the key is in the wrong format', function(done) { + it('should throw an error when the key is in the wrong format', function (done) { KeyBuilder.getConvertedFolderKey.returns('wombat') - FileHandler.insertFile(bucket, key, stream, err => { + FileHandler.insertFile(bucket, key, stream, (err) => { expect(err).to.exist done() }) }) - describe('when conversions are enabled', function() { - beforeEach(function() { + describe('when conversions are enabled', function () { + beforeEach(function () { Settings.enableConversions = true }) - it('should delete the convertedKey folder', function(done) { - FileHandler.insertFile(bucket, key, stream, err => { + it('should delete the convertedKey folder', function (done) { + FileHandler.insertFile(bucket, key, stream, (err) => { expect(err).not.to.exist expect(PersistorManager.deleteDirectory).to.have.been.calledWith( bucket, @@ -148,9 +148,9 @@ describe('FileHandler', function() { }) }) - describe('deleteFile', function() { - it('should tell the filestore manager to delete the file', function(done) { - FileHandler.deleteFile(bucket, key, err => { + describe('deleteFile', function () { + it('should tell the filestore manager to delete the file', function (done) { + FileHandler.deleteFile(bucket, key, (err) => { expect(err).not.to.exist expect(PersistorManager.deleteObject).to.have.been.calledWith( bucket, @@ -160,39 +160,39 @@ describe('FileHandler', function() { }) }) - it('should not tell the filestore manager to delete the cached folder', function(done) { - FileHandler.deleteFile(bucket, key, err => { + it('should not tell the filestore manager to delete the cached folder', function (done) { + FileHandler.deleteFile(bucket, key, (err) => { expect(err).not.to.exist expect(PersistorManager.deleteDirectory).not.to.have.been.called done() }) }) - it('should accept templates-api key format', function(done) { + it('should accept templates-api key format', function (done) { KeyBuilder.getConvertedFolderKey.returns( '5ecba29f1a294e007d0bccb4/v/0/pdf' ) - FileHandler.deleteFile(bucket, key, err => { + FileHandler.deleteFile(bucket, key, (err) => { expect(err).not.to.exist done() }) }) - it('should throw an error when the key is in the wrong format', function(done) { + it('should throw an error when the key is in the wrong format', function (done) { KeyBuilder.getConvertedFolderKey.returns('wombat') - FileHandler.deleteFile(bucket, key, err => { + FileHandler.deleteFile(bucket, key, (err) => { expect(err).to.exist done() }) }) - describe('when conversions are enabled', function() { - beforeEach(function() { + describe('when conversions are enabled', function () { + beforeEach(function () { Settings.enableConversions = true }) - it('should delete the convertedKey folder', function(done) { - FileHandler.deleteFile(bucket, key, err => { + it('should delete the convertedKey folder', function (done) { + FileHandler.deleteFile(bucket, key, (err) => { expect(err).not.to.exist expect(PersistorManager.deleteDirectory).to.have.been.calledWith( bucket, @@ -204,9 +204,9 @@ describe('FileHandler', function() { }) }) - describe('deleteProject', function() { - it('should tell the filestore manager to delete the folder', function(done) { - FileHandler.deleteProject(bucket, projectKey, err => { + describe('deleteProject', function () { + it('should tell the filestore manager to delete the folder', function (done) { + FileHandler.deleteProject(bucket, projectKey, (err) => { expect(err).not.to.exist expect(PersistorManager.deleteDirectory).to.have.been.calledWith( bucket, @@ -216,16 +216,16 @@ describe('FileHandler', function() { }) }) - it('should throw an error when the key is in the wrong format', function(done) { - FileHandler.deleteProject(bucket, 'wombat', err => { + it('should throw an error when the key is in the wrong format', function (done) { + FileHandler.deleteProject(bucket, 'wombat', (err) => { expect(err).to.exist done() }) }) }) - describe('getFile', function() { - it('should return the source stream no format or style are defined', function(done) { + describe('getFile', function () { + it('should return the source stream no format or style are defined', function (done) { FileHandler.getFile(bucket, key, null, (err, stream) => { expect(err).not.to.exist expect(stream).to.equal(sourceStream) @@ -233,9 +233,9 @@ describe('FileHandler', function() { }) }) - it('should pass options through to PersistorManager', function(done) { + it('should pass options through to PersistorManager', function (done) { const options = { start: 0, end: 8 } - FileHandler.getFile(bucket, key, options, err => { + FileHandler.getFile(bucket, key, options, (err) => { expect(err).not.to.exist expect(PersistorManager.getObjectStream).to.have.been.calledWith( bucket, @@ -246,26 +246,26 @@ describe('FileHandler', function() { }) }) - describe('when a format is defined', function() { + describe('when a format is defined', function () { let result - describe('when the file is not cached', function() { - beforeEach(function(done) { + describe('when the file is not cached', function () { + beforeEach(function (done) { FileHandler.getFile(bucket, key, { format: 'png' }, (err, stream) => { result = { err, stream } done() }) }) - it('should convert the file', function() { + it('should convert the file', function () { expect(FileConverter.promises.convert).to.have.been.called }) - it('should compress the converted file', function() { + it('should compress the converted file', function () { expect(ImageOptimiser.promises.compressPng).to.have.been.called }) - it('should return the the converted stream', function() { + it('should return the the converted stream', function () { expect(result.err).not.to.exist expect(result.stream).to.equal(readStream) expect(PersistorManager.getObjectStream).to.have.been.calledWith( @@ -275,8 +275,8 @@ describe('FileHandler', function() { }) }) - describe('when the file is cached', function() { - beforeEach(function(done) { + describe('when the file is cached', function () { + beforeEach(function (done) { PersistorManager.checkIfObjectExists = sinon.stub().resolves(true) FileHandler.getFile(bucket, key, { format: 'png' }, (err, stream) => { result = { err, stream } @@ -284,15 +284,15 @@ describe('FileHandler', function() { }) }) - it('should not convert the file', function() { + it('should not convert the file', function () { expect(FileConverter.promises.convert).not.to.have.been.called }) - it('should not compress the converted file again', function() { + it('should not compress the converted file again', function () { expect(ImageOptimiser.promises.compressPng).not.to.have.been.called }) - it('should return the cached stream', function() { + it('should return the cached stream', function () { expect(result.err).not.to.exist expect(result.stream).to.equal(sourceStream) expect(PersistorManager.getObjectStream).to.have.been.calledWith( @@ -303,9 +303,9 @@ describe('FileHandler', function() { }) }) - describe('when a style is defined', function() { - it('generates a thumbnail when requested', function(done) { - FileHandler.getFile(bucket, key, { style: 'thumbnail' }, err => { + describe('when a style is defined', function () { + it('generates a thumbnail when requested', function (done) { + FileHandler.getFile(bucket, key, { style: 'thumbnail' }, (err) => { expect(err).not.to.exist expect(FileConverter.promises.thumbnail).to.have.been.called expect(FileConverter.promises.preview).not.to.have.been.called @@ -313,8 +313,8 @@ describe('FileHandler', function() { }) }) - it('generates a preview when requested', function(done) { - FileHandler.getFile(bucket, key, { style: 'preview' }, err => { + it('generates a preview when requested', function (done) { + FileHandler.getFile(bucket, key, { style: 'preview' }, (err) => { expect(err).not.to.exist expect(FileConverter.promises.thumbnail).not.to.have.been.called expect(FileConverter.promises.preview).to.have.been.called @@ -324,8 +324,8 @@ describe('FileHandler', function() { }) }) - describe('getRedirectUrl', function() { - beforeEach(function() { + describe('getRedirectUrl', function () { + beforeEach(function () { Settings.filestore = { allowRedirects: true, stores: { @@ -334,7 +334,7 @@ describe('FileHandler', function() { } }) - it('should return a redirect url', function(done) { + it('should return a redirect url', function (done) { FileHandler.getRedirectUrl(bucket, key, (err, url) => { expect(err).not.to.exist expect(url).to.equal(redirectUrl) @@ -342,7 +342,7 @@ describe('FileHandler', function() { }) }) - it('should call the persistor to get a redirect url', function(done) { + it('should call the persistor to get a redirect url', function (done) { FileHandler.getRedirectUrl(bucket, key, () => { expect(PersistorManager.getRedirectUrl).to.have.been.calledWith( bucket, @@ -352,7 +352,7 @@ describe('FileHandler', function() { }) }) - it('should return null if options are supplied', function(done) { + it('should return null if options are supplied', function (done) { FileHandler.getRedirectUrl( bucket, key, @@ -365,7 +365,7 @@ describe('FileHandler', function() { ) }) - it('should return null if the bucket is not one of the defined ones', function(done) { + it('should return null if the bucket is not one of the defined ones', function (done) { FileHandler.getRedirectUrl('a_different_bucket', key, (err, url) => { expect(err).not.to.exist expect(url).to.be.null @@ -373,7 +373,7 @@ describe('FileHandler', function() { }) }) - it('should return null if redirects are not enabled', function(done) { + it('should return null if redirects are not enabled', function (done) { Settings.filestore.allowRedirects = false FileHandler.getRedirectUrl(bucket, key, (err, url) => { expect(err).not.to.exist @@ -383,9 +383,9 @@ describe('FileHandler', function() { }) }) - describe('getDirectorySize', function() { - it('should call the filestore manager to get directory size', function(done) { - FileHandler.getDirectorySize(bucket, key, err => { + describe('getDirectorySize', function () { + it('should call the filestore manager to get directory size', function (done) { + FileHandler.getDirectorySize(bucket, key, (err) => { expect(err).not.to.exist expect(PersistorManager.directorySize).to.have.been.calledWith( bucket, diff --git a/services/filestore/test/unit/js/ImageOptimiserTests.js b/services/filestore/test/unit/js/ImageOptimiserTests.js index 947400d0d8..7d9fcfd5c5 100644 --- a/services/filestore/test/unit/js/ImageOptimiserTests.js +++ b/services/filestore/test/unit/js/ImageOptimiserTests.js @@ -5,11 +5,11 @@ const modulePath = '../../../app/js/ImageOptimiser.js' const { FailedCommandError } = require('../../../app/js/Errors') const SandboxedModule = require('sandboxed-module') -describe('ImageOptimiser', function() { +describe('ImageOptimiser', function () { let ImageOptimiser, SafeExec, logger const sourcePath = '/wombat/potato.eps' - beforeEach(function() { + beforeEach(function () { SafeExec = { promises: sinon.stub().resolves() } @@ -27,9 +27,9 @@ describe('ImageOptimiser', function() { }) }) - describe('compressPng', function() { - it('should convert the file', function(done) { - ImageOptimiser.compressPng(sourcePath, err => { + describe('compressPng', function () { + it('should convert the file', function (done) { + ImageOptimiser.compressPng(sourcePath, (err) => { expect(err).not.to.exist expect(SafeExec.promises).to.have.been.calledWith([ 'optipng', @@ -39,32 +39,32 @@ describe('ImageOptimiser', function() { }) }) - it('should return the error', function(done) { + it('should return the error', function (done) { SafeExec.promises.rejects('wombat herding failure') - ImageOptimiser.compressPng(sourcePath, err => { + ImageOptimiser.compressPng(sourcePath, (err) => { expect(err.toString()).to.equal('wombat herding failure') done() }) }) }) - describe('when optimiser is sigkilled', function() { + describe('when optimiser is sigkilled', function () { const expectedError = new FailedCommandError('', 'SIGKILL', '', '') let error - beforeEach(function(done) { + beforeEach(function (done) { SafeExec.promises.rejects(expectedError) - ImageOptimiser.compressPng(sourcePath, err => { + ImageOptimiser.compressPng(sourcePath, (err) => { error = err done() }) }) - it('should not produce an error', function() { + it('should not produce an error', function () { expect(error).not.to.exist }) - it('should log a warning', function() { + it('should log a warning', function () { expect(logger.warn).to.have.been.calledOnce }) }) diff --git a/services/filestore/test/unit/js/KeybuilderTests.js b/services/filestore/test/unit/js/KeybuilderTests.js index 774fc2f366..d72e65df5b 100644 --- a/services/filestore/test/unit/js/KeybuilderTests.js +++ b/services/filestore/test/unit/js/KeybuilderTests.js @@ -2,30 +2,30 @@ const SandboxedModule = require('sandboxed-module') const modulePath = '../../../app/js/KeyBuilder.js' -describe('KeybuilderTests', function() { +describe('KeybuilderTests', function () { let KeyBuilder const key = 'wombat/potato' - beforeEach(function() { + beforeEach(function () { KeyBuilder = SandboxedModule.require(modulePath, { requires: { 'settings-sharelatex': {} } }) }) - describe('cachedKey', function() { - it('should add the format to the key', function() { + describe('cachedKey', function () { + it('should add the format to the key', function () { const opts = { format: 'png' } const newKey = KeyBuilder.addCachingToKey(key, opts) newKey.should.equal(`${key}-converted-cache/format-png`) }) - it('should add the style to the key', function() { + it('should add the style to the key', function () { const opts = { style: 'thumbnail' } const newKey = KeyBuilder.addCachingToKey(key, opts) newKey.should.equal(`${key}-converted-cache/style-thumbnail`) }) - it('should add format first, then style', function() { + it('should add format first, then style', function () { const opts = { style: 'thumbnail', format: 'png' diff --git a/services/filestore/test/unit/js/LocalFileWriterTests.js b/services/filestore/test/unit/js/LocalFileWriterTests.js index 0316b5d800..5041e79288 100644 --- a/services/filestore/test/unit/js/LocalFileWriterTests.js +++ b/services/filestore/test/unit/js/LocalFileWriterTests.js @@ -6,7 +6,7 @@ const SandboxedModule = require('sandboxed-module') const { Errors } = require('@overleaf/object-persistor') chai.use(require('sinon-chai')) -describe('LocalFileWriter', function() { +describe('LocalFileWriter', function () { const writeStream = 'writeStream' const readStream = 'readStream' const settings = { path: { uploadFolder: '/uploads' } } @@ -14,7 +14,7 @@ describe('LocalFileWriter', function() { const filename = 'wombat' let stream, fs, LocalFileWriter - beforeEach(function() { + beforeEach(function () { fs = { createWriteStream: sinon.stub().returns(writeStream), unlink: sinon.stub().yields() @@ -39,8 +39,8 @@ describe('LocalFileWriter', function() { }) }) - describe('writeStream', function() { - it('writes the stream to the upload folder', function(done) { + describe('writeStream', function () { + it('writes the stream to the upload folder', function (done) { LocalFileWriter.writeStream(readStream, filename, (err, path) => { expect(err).not.to.exist expect(fs.createWriteStream).to.have.been.calledWith(fsPath) @@ -50,20 +50,20 @@ describe('LocalFileWriter', function() { }) }) - describe('when there is an error', function() { + describe('when there is an error', function () { const error = new Error('not enough ketchup') - beforeEach(function() { + beforeEach(function () { stream.pipeline.yields(error) }) - it('should wrap the error', function() { - LocalFileWriter.writeStream(readStream, filename, err => { + it('should wrap the error', function () { + LocalFileWriter.writeStream(readStream, filename, (err) => { expect(err).to.exist expect(err.cause).to.equal(error) }) }) - it('should delete the temporary file', function() { + it('should delete the temporary file', function () { LocalFileWriter.writeStream(readStream, filename, () => { expect(fs.unlink).to.have.been.calledWith(fsPath) }) @@ -71,37 +71,37 @@ describe('LocalFileWriter', function() { }) }) - describe('deleteFile', function() { - it('should unlink the file', function(done) { - LocalFileWriter.deleteFile(fsPath, err => { + describe('deleteFile', function () { + it('should unlink the file', function (done) { + LocalFileWriter.deleteFile(fsPath, (err) => { expect(err).not.to.exist expect(fs.unlink).to.have.been.calledWith(fsPath) done() }) }) - it('should not call unlink with an empty path', function(done) { - LocalFileWriter.deleteFile('', err => { + it('should not call unlink with an empty path', function (done) { + LocalFileWriter.deleteFile('', (err) => { expect(err).not.to.exist expect(fs.unlink).not.to.have.been.called done() }) }) - it('should not throw a error if the file does not exist', function(done) { + it('should not throw a error if the file does not exist', function (done) { const error = new Error('file not found') error.code = 'ENOENT' fs.unlink = sinon.stub().yields(error) - LocalFileWriter.deleteFile(fsPath, err => { + LocalFileWriter.deleteFile(fsPath, (err) => { expect(err).not.to.exist done() }) }) - it('should wrap the error', function(done) { + it('should wrap the error', function (done) { const error = new Error('failed to reticulate splines') fs.unlink = sinon.stub().yields(error) - LocalFileWriter.deleteFile(fsPath, err => { + LocalFileWriter.deleteFile(fsPath, (err) => { expect(err).to.exist expect(err.cause).to.equal(error) done() diff --git a/services/filestore/test/unit/js/SafeExecTests.js b/services/filestore/test/unit/js/SafeExecTests.js index 4d31f6f57f..c4b59e70e4 100644 --- a/services/filestore/test/unit/js/SafeExecTests.js +++ b/services/filestore/test/unit/js/SafeExecTests.js @@ -5,10 +5,10 @@ const modulePath = '../../../app/js/SafeExec' const { Errors } = require('@overleaf/object-persistor') const SandboxedModule = require('sandboxed-module') -describe('SafeExec', function() { +describe('SafeExec', function () { let settings, options, safeExec - beforeEach(function() { + beforeEach(function () { settings = { enableConversions: true } options = { timeout: 10 * 1000, killSignal: 'SIGTERM' } @@ -23,8 +23,8 @@ describe('SafeExec', function() { }) }) - describe('safeExec', function() { - it('should execute a valid command', function(done) { + describe('safeExec', function () { + it('should execute a valid command', function (done) { safeExec(['/bin/echo', 'hello'], options, (err, stdout, stderr) => { stdout.should.equal('hello\n') stderr.should.equal('') @@ -33,16 +33,16 @@ describe('SafeExec', function() { }) }) - it('should error when conversions are disabled', function(done) { + it('should error when conversions are disabled', function (done) { settings.enableConversions = false - safeExec(['/bin/echo', 'hello'], options, err => { + safeExec(['/bin/echo', 'hello'], options, (err) => { expect(err).to.exist done() }) }) - it('should execute a command with non-zero exit status', function(done) { - safeExec(['/usr/bin/env', 'false'], options, err => { + it('should execute a command with non-zero exit status', function (done) { + safeExec(['/usr/bin/env', 'false'], options, (err) => { expect(err).to.exist expect(err.name).to.equal('FailedCommandError') expect(err.code).to.equal(1) @@ -52,18 +52,18 @@ describe('SafeExec', function() { }) }) - it('should handle an invalid command', function(done) { - safeExec(['/bin/foobar'], options, err => { + it('should handle an invalid command', function (done) { + safeExec(['/bin/foobar'], options, (err) => { err.code.should.equal('ENOENT') done() }) }) - it('should handle a command that runs too long', function(done) { + it('should handle a command that runs too long', function (done) { safeExec( ['/bin/sleep', '10'], { timeout: 500, killSignal: 'SIGTERM' }, - err => { + (err) => { expect(err).to.exist expect(err.name).to.equal('FailedCommandError') expect(err.code).to.equal('SIGTERM') @@ -73,19 +73,19 @@ describe('SafeExec', function() { }) }) - describe('as a promise', function() { - beforeEach(function() { + describe('as a promise', function () { + beforeEach(function () { safeExec = safeExec.promises }) - it('should execute a valid command', async function() { + it('should execute a valid command', async function () { const { stdout, stderr } = await safeExec(['/bin/echo', 'hello'], options) stdout.should.equal('hello\n') stderr.should.equal('') }) - it('should throw a ConversionsDisabledError when appropriate', async function() { + it('should throw a ConversionsDisabledError when appropriate', async function () { settings.enableConversions = false try { await safeExec(['/bin/echo', 'hello'], options) @@ -96,7 +96,7 @@ describe('SafeExec', function() { expect('method did not throw an error').not.to.exist }) - it('should throw a FailedCommandError when appropriate', async function() { + it('should throw a FailedCommandError when appropriate', async function () { try { await safeExec(['/usr/bin/env', 'false'], options) } catch (err) { diff --git a/services/filestore/test/unit/js/SettingsTests.js b/services/filestore/test/unit/js/SettingsTests.js index 230ea45d15..68bc580ca6 100644 --- a/services/filestore/test/unit/js/SettingsTests.js +++ b/services/filestore/test/unit/js/SettingsTests.js @@ -2,9 +2,9 @@ const chai = require('chai') const { expect } = chai const SandboxedModule = require('sandboxed-module') -describe('Settings', function() { - describe('s3', function() { - it('should use JSONified env var if present', function() { +describe('Settings', function () { + describe('s3', function () { + it('should use JSONified env var if present', function () { const s3Settings = { bucket1: { auth_key: 'bucket1_key', From 7b648204848196597a1e9f6426f54aca96e39110 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Mon, 10 Aug 2020 18:00:29 +0100 Subject: [PATCH 525/555] [misc] add back environment variables for gcs buckets --- services/filestore/buildscript.txt | 2 +- services/filestore/docker-compose.ci.yml | 3 +++ services/filestore/docker-compose.yml | 3 +++ 3 files changed, 7 insertions(+), 1 deletion(-) diff --git a/services/filestore/buildscript.txt b/services/filestore/buildscript.txt index c3dbf05e1d..cc29f21870 100644 --- a/services/filestore/buildscript.txt +++ b/services/filestore/buildscript.txt @@ -2,7 +2,7 @@ filestore --data-dirs=uploads,user_files,template_files --dependencies=s3,gcs --docker-repos=gcr.io/overleaf-ops ---env-add=ENABLE_CONVERSIONS="true",USE_PROM_METRICS="true",AWS_S3_USER_FILES_BUCKET_NAME=fake_user_files,AWS_S3_TEMPLATE_FILES_BUCKET_NAME=fake_template_files,AWS_S3_PUBLIC_FILES_BUCKET_NAME=fake_public_files +--env-add=ENABLE_CONVERSIONS="true",USE_PROM_METRICS="true",AWS_S3_USER_FILES_BUCKET_NAME=fake_user_files,AWS_S3_TEMPLATE_FILES_BUCKET_NAME=fake_template_files,AWS_S3_PUBLIC_FILES_BUCKET_NAME=fake_public_files,GCS_USER_FILES_BUCKET_NAME=fake_userfiles,GCS_TEMPLATE_FILES_BUCKET_NAME=fake_templatefiles,GCS_PUBLIC_FILES_BUCKET_NAME=fake_publicfiles --env-pass-through= --node-version=12.18.0 --public-repo=True diff --git a/services/filestore/docker-compose.ci.yml b/services/filestore/docker-compose.ci.yml index 5f2d01e9a6..a53beb7636 100644 --- a/services/filestore/docker-compose.ci.yml +++ b/services/filestore/docker-compose.ci.yml @@ -38,6 +38,9 @@ services: AWS_S3_USER_FILES_BUCKET_NAME: fake_user_files AWS_S3_TEMPLATE_FILES_BUCKET_NAME: fake_template_files AWS_S3_PUBLIC_FILES_BUCKET_NAME: fake_public_files + GCS_USER_FILES_BUCKET_NAME: fake_userfiles + GCS_TEMPLATE_FILES_BUCKET_NAME: fake_templatefiles + GCS_PUBLIC_FILES_BUCKET_NAME: fake_publicfiles depends_on: s3: condition: service_healthy diff --git a/services/filestore/docker-compose.yml b/services/filestore/docker-compose.yml index 32424d97aa..1b21354b52 100644 --- a/services/filestore/docker-compose.yml +++ b/services/filestore/docker-compose.yml @@ -48,6 +48,9 @@ services: AWS_S3_USER_FILES_BUCKET_NAME: fake_user_files AWS_S3_TEMPLATE_FILES_BUCKET_NAME: fake_template_files AWS_S3_PUBLIC_FILES_BUCKET_NAME: fake_public_files + GCS_USER_FILES_BUCKET_NAME: fake_userfiles + GCS_TEMPLATE_FILES_BUCKET_NAME: fake_templatefiles + GCS_PUBLIC_FILES_BUCKET_NAME: fake_publicfiles user: node depends_on: s3: From 6968d5d17bb00c0beb2af90fd981172fc2fd7a5d Mon Sep 17 00:00:00 2001 From: Christopher Hoskin Date: Wed, 12 Aug 2020 15:38:07 +0100 Subject: [PATCH 526/555] [misc] bump logger-sharelatex to version 2.2.0 --- services/filestore/package-lock.json | 311 ++++++++++++++++----------- services/filestore/package.json | 2 +- 2 files changed, 181 insertions(+), 132 deletions(-) diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index 70fc188732..b747e51228 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -149,6 +149,24 @@ "google-auth-library": "^5.5.0", "retry-request": "^4.0.0", "teeny-request": "^6.0.0" + }, + "dependencies": { + "google-auth-library": { + "version": "5.10.1", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-5.10.1.tgz", + "integrity": "sha512-rOlaok5vlpV9rSiUu5EpR0vVpc+PhN62oF4RyX/6++DG1VsaulAFEMlDYBLjJDDPI6OcNOCGAKy9UVB/3NIDXg==", + "requires": { + "arrify": "^2.0.0", + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "fast-text-encoding": "^1.0.0", + "gaxios": "^2.1.0", + "gcp-metadata": "^3.4.0", + "gtoken": "^4.1.0", + "jws": "^4.0.0", + "lru-cache": "^5.0.0" + } + } } }, "@google-cloud/debug-agent": { @@ -369,6 +387,24 @@ "stream-events": "^1.0.4", "through2": "^3.0.0", "type-fest": "^0.12.0" + }, + "dependencies": { + "google-auth-library": { + "version": "5.10.1", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-5.10.1.tgz", + "integrity": "sha512-rOlaok5vlpV9rSiUu5EpR0vVpc+PhN62oF4RyX/6++DG1VsaulAFEMlDYBLjJDDPI6OcNOCGAKy9UVB/3NIDXg==", + "requires": { + "arrify": "^2.0.0", + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "fast-text-encoding": "^1.0.0", + "gaxios": "^2.1.0", + "gcp-metadata": "^3.4.0", + "gtoken": "^4.1.0", + "jws": "^4.0.0", + "lru-cache": "^5.0.0" + } + } } }, "@google-cloud/logging-bunyan": { @@ -378,69 +414,6 @@ "requires": { "@google-cloud/logging": "^7.0.0", "google-auth-library": "^6.0.0" - }, - "dependencies": { - "gaxios": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-3.0.3.tgz", - "integrity": "sha512-PkzQludeIFhd535/yucALT/Wxyj/y2zLyrMwPcJmnLHDugmV49NvAi/vb+VUq/eWztATZCNcb8ue+ywPG+oLuw==", - "requires": { - "abort-controller": "^3.0.0", - "extend": "^3.0.2", - "https-proxy-agent": "^5.0.0", - "is-stream": "^2.0.0", - "node-fetch": "^2.3.0" - } - }, - "gcp-metadata": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.1.0.tgz", - "integrity": "sha512-r57SV28+olVsflPlKyVig3Muo/VDlcsObMtvDGOEtEJXj+DDE8bEl0coIkXh//hbkSDTvo+f5lbihZOndYXQQQ==", - "requires": { - "gaxios": "^3.0.0", - "json-bigint": "^0.3.0" - } - }, - "google-auth-library": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.0.3.tgz", - "integrity": "sha512-2Np6ojPmaJGXHSMsBhtTQEKfSMdLc8hefoihv7N2cwEr8E5bq39fhoat6TcXHwa0XoGO5Guh9sp3nxHFPmibMw==", - "requires": { - "arrify": "^2.0.0", - "base64-js": "^1.3.0", - "ecdsa-sig-formatter": "^1.0.11", - "fast-text-encoding": "^1.0.0", - "gaxios": "^3.0.0", - "gcp-metadata": "^4.1.0", - "gtoken": "^5.0.0", - "jws": "^4.0.0", - "lru-cache": "^5.0.0" - } - }, - "google-p12-pem": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.1.tgz", - "integrity": "sha512-VlQgtozgNVVVcYTXS36eQz4PXPt9gIPqLOhHN0QiV6W6h4qSCNVKPtKC5INtJsaHHF2r7+nOIa26MJeJMTaZEQ==", - "requires": { - "node-forge": "^0.9.0" - } - }, - "gtoken": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.0.1.tgz", - "integrity": "sha512-33w4FNDkUcyIOq/TqyC+drnKdI4PdXmWp9lZzssyEQKuvu9ZFN3KttaSnDKo52U3E51oujVGop93mKxmqO8HHg==", - "requires": { - "gaxios": "^3.0.0", - "google-p12-pem": "^3.0.0", - "jws": "^4.0.0", - "mime": "^2.2.0" - } - }, - "mime": { - "version": "2.4.6", - "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.6.tgz", - "integrity": "sha512-RZKhC3EmpBchfTGBVb8fb+RL2cWyw/32lshnsETttkBAyAUXSGHxbEJWWRXc751DrIxG1q04b8QwMbAwkRPpUA==" - } } }, "@google-cloud/paginator": { @@ -1097,9 +1070,9 @@ } }, "@grpc/proto-loader": { - "version": "0.5.4", - "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.5.4.tgz", - "integrity": "sha512-HTM4QpI9B2XFkPz7pjwMyMgZchJ93TVkL3kWPW8GDMDKYxsMnmf4w2TNMJK7+KNiYHS5cJrCEAFlF+AwtXWVPA==", + "version": "0.5.5", + "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.5.5.tgz", + "integrity": "sha512-WwN9jVNdHRQoOBo9FDH7qU+mgfjPc8GygPYms3M+y3fbQLfnCe/Kv/E01t7JRgnrsOHH8euvSbed3mIalXhwqQ==", "requires": { "lodash.camelcase": "^4.3.0", "protobufjs": "^6.8.6" @@ -1821,7 +1794,6 @@ "version": "1.8.14", "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.14.tgz", "integrity": "sha512-LlahJUxXzZLuw/hetUQJmRgZ1LF6+cr5TPpRj6jf327AsiIq2jhYEH4oqUUkVKTor+9w2BT3oxVwhzE5lw9tcg==", - "dev": true, "requires": { "dtrace-provider": "~0.8", "moment": "^2.19.3", @@ -1905,7 +1877,7 @@ "charenc": { "version": "0.0.2", "resolved": "https://registry.npmjs.org/charenc/-/charenc-0.0.2.tgz", - "integrity": "sha512-yrLQ/yVUFXkzg7EDQsPieE/53+0RlaWTs+wBrvW36cyilJ2SaDWfl4Yj7MtLTXleV9uEKefbAGUPv2/iWSooRA==" + "integrity": "sha1-wKHS86cJLgN3S/qD8UwPxXkKhmc=" }, "check-error": { "version": "1.0.2", @@ -2141,7 +2113,7 @@ "crypt": { "version": "0.0.2", "resolved": "https://registry.npmjs.org/crypt/-/crypt-0.0.2.tgz", - "integrity": "sha512-mCxBlsHFYh9C+HVpiEacem8FEBnMXgU9gy4zmNC+SXAZNB/1idgp/aulFJ4FgCi7GPEVbfyng092GqL2k2rmow==" + "integrity": "sha1-iNf/fsDfuG9xPch7u0LQRNPmxBs=" }, "crypto-random-string": { "version": "2.0.0", @@ -2151,7 +2123,7 @@ "d64": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/d64/-/d64-1.0.0.tgz", - "integrity": "sha512-5eNy3WZziVYnrogqgXhcdEmqcDB2IHurTqLcrgssJsfkMVCUoUaZpK6cJjxxvLV2dUm5SuJMNcYfVGoin9UIRw==" + "integrity": "sha1-QAKofoUMv8n52XBrYPymE6MzbpA=" }, "dashdash": { "version": "1.14.1", @@ -3037,9 +3009,9 @@ "dev": true }, "gaxios": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-2.3.1.tgz", - "integrity": "sha512-DQOesWEx59/bm63lTX0uHDDXpGTW9oKqNsoigwCoRe2lOb5rFqxzHjLTa6aqEBecLcz69dHLw7rbS068z1fvIQ==", + "version": "2.3.4", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-2.3.4.tgz", + "integrity": "sha512-US8UMj8C5pRnao3Zykc4AAVr+cffoNKRTg9Rsf2GiuZCW69vgJj38VK2PzlPuQU73FZ/nTk9/Av6/JGcE1N9vA==", "requires": { "abort-controller": "^3.0.0", "extend": "^3.0.2", @@ -3049,9 +3021,9 @@ } }, "gcp-metadata": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-3.4.0.tgz", - "integrity": "sha512-fizmBtCXHp8b7FZuzbgKaixO8DzsSYoEVmMgZIna7x8t6cfBF3eqirODWYxVbgmasA5qudCAKiszfB7yVwroIQ==", + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-3.5.0.tgz", + "integrity": "sha512-ZQf+DLZ5aKcRpLzYUyBS3yo3N0JSa82lNDO8rj3nMSlovLcz2riKFBsYgDzeXcv75oo5eqB2lx+B14UvPoCRnA==", "requires": { "gaxios": "^2.1.0", "json-bigint": "^0.3.0" @@ -3215,19 +3187,92 @@ "dev": true }, "google-auth-library": { - "version": "5.10.1", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-5.10.1.tgz", - "integrity": "sha512-rOlaok5vlpV9rSiUu5EpR0vVpc+PhN62oF4RyX/6++DG1VsaulAFEMlDYBLjJDDPI6OcNOCGAKy9UVB/3NIDXg==", + "version": "6.0.6", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.0.6.tgz", + "integrity": "sha512-fWYdRdg55HSJoRq9k568jJA1lrhg9i2xgfhVIMJbskUmbDpJGHsbv9l41DGhCDXM21F9Kn4kUwdysgxSYBYJUw==", "requires": { "arrify": "^2.0.0", "base64-js": "^1.3.0", "ecdsa-sig-formatter": "^1.0.11", "fast-text-encoding": "^1.0.0", - "gaxios": "^2.1.0", - "gcp-metadata": "^3.4.0", - "gtoken": "^4.1.0", + "gaxios": "^3.0.0", + "gcp-metadata": "^4.1.0", + "gtoken": "^5.0.0", "jws": "^4.0.0", - "lru-cache": "^5.0.0" + "lru-cache": "^6.0.0" + }, + "dependencies": { + "bignumber.js": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.0.tgz", + "integrity": "sha512-t/OYhhJ2SD+YGBQcjY8GzzDHEk9f3nerxjtfa6tlMXfe7frs/WozhvCNoGvpM0P3bNf3Gq5ZRMlGr5f3r4/N8A==" + }, + "gaxios": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-3.1.0.tgz", + "integrity": "sha512-DDTn3KXVJJigtz+g0J3vhcfbDbKtAroSTxauWsdnP57sM5KZ3d2c/3D9RKFJ86s43hfw6WULg6TXYw/AYiBlpA==", + "requires": { + "abort-controller": "^3.0.0", + "extend": "^3.0.2", + "https-proxy-agent": "^5.0.0", + "is-stream": "^2.0.0", + "node-fetch": "^2.3.0" + } + }, + "gcp-metadata": { + "version": "4.1.4", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.1.4.tgz", + "integrity": "sha512-5J/GIH0yWt/56R3dNaNWPGQ/zXsZOddYECfJaqxFWgrZ9HC2Kvc5vl9upOgUUHKzURjAVf2N+f6tEJiojqXUuA==", + "requires": { + "gaxios": "^3.0.0", + "json-bigint": "^1.0.0" + } + }, + "google-p12-pem": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.2.tgz", + "integrity": "sha512-tbjzndQvSIHGBLzHnhDs3cL4RBjLbLXc2pYvGH+imGVu5b4RMAttUTdnmW2UH0t11QeBTXZ7wlXPS7hrypO/tg==", + "requires": { + "node-forge": "^0.9.0" + } + }, + "gtoken": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.0.3.tgz", + "integrity": "sha512-Nyd1wZCMRc2dj/mAD0LlfQLcAO06uKdpKJXvK85SGrF5+5+Bpfil9u/2aw35ltvEHjvl0h5FMKN5knEU+9JrOg==", + "requires": { + "gaxios": "^3.0.0", + "google-p12-pem": "^3.0.0", + "jws": "^4.0.0", + "mime": "^2.2.0" + } + }, + "json-bigint": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", + "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", + "requires": { + "bignumber.js": "^9.0.0" + } + }, + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "requires": { + "yallist": "^4.0.0" + } + }, + "mime": { + "version": "2.4.6", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.6.tgz", + "integrity": "sha512-RZKhC3EmpBchfTGBVb8fb+RL2cWyw/32lshnsETttkBAyAUXSGHxbEJWWRXc751DrIxG1q04b8QwMbAwkRPpUA==" + }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + } } }, "google-gax": { @@ -3250,6 +3295,24 @@ "retry-request": "^4.0.0", "semver": "^6.0.0", "walkdir": "^0.4.0" + }, + "dependencies": { + "google-auth-library": { + "version": "5.10.1", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-5.10.1.tgz", + "integrity": "sha512-rOlaok5vlpV9rSiUu5EpR0vVpc+PhN62oF4RyX/6++DG1VsaulAFEMlDYBLjJDDPI6OcNOCGAKy9UVB/3NIDXg==", + "requires": { + "arrify": "^2.0.0", + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "fast-text-encoding": "^1.0.0", + "gaxios": "^2.1.0", + "gcp-metadata": "^3.4.0", + "gtoken": "^4.1.0", + "jws": "^4.0.0", + "lru-cache": "^5.0.0" + } + } } }, "google-p12-pem": { @@ -3283,9 +3346,9 @@ }, "dependencies": { "mime": { - "version": "2.4.4", - "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.4.tgz", - "integrity": "sha512-LRxmNwziLPT828z+4YkNzloCFC2YM4wrB99k+AV5ZbEyfGNWfG8SO1FUXLmLDBSo89NrJZ4DIWeLjy1CHGhMGA==" + "version": "2.4.6", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.6.tgz", + "integrity": "sha512-RZKhC3EmpBchfTGBVb8fb+RL2cWyw/32lshnsETttkBAyAUXSGHxbEJWWRXc751DrIxG1q04b8QwMbAwkRPpUA==" } } }, @@ -3812,12 +3875,12 @@ "lodash.at": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/lodash.at/-/lodash.at-4.6.0.tgz", - "integrity": "sha512-GOTh0SEp+Yosnlpjic+8cl2WM9MykorogkGA9xyIFkkObQ3H3kNZqZ+ohuq4K3FrSVo7hMcZBMataJemrxC3BA==" + "integrity": "sha1-k83OZk8KGZTqM9181A4jr9EbD/g=" }, "lodash.camelcase": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", - "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==" + "integrity": "sha1-soqmKIorn8ZRA1x3EfZathkDMaY=" }, "lodash.get": { "version": "4.4.2", @@ -3828,7 +3891,7 @@ "lodash.has": { "version": "4.5.2", "resolved": "https://registry.npmjs.org/lodash.has/-/lodash.has-4.5.2.tgz", - "integrity": "sha512-rnYUdIo6xRCJnQmbVFEwcxF144erlD+M3YcJUVesflU9paQaE8p+fJDcIQrlMYbxoANFL+AB9hZrzSBBk5PL+g==" + "integrity": "sha1-0Z9NwQlQWMzL4rDN9O4P5Ko3yGI=" }, "lodash.memoize": { "version": "4.1.2", @@ -3873,28 +3936,16 @@ } }, "logger-sharelatex": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-2.1.1.tgz", - "integrity": "sha512-qqSrBqUgHWnStxtTZ/fSsqPxj9Ju9onok7Vfm3bv5MS702jH+hRsCSA9oXOMvOLcWJrZFnhCZaLGeOvXToUaxw==", + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-2.2.0.tgz", + "integrity": "sha512-ko+OmE25XHJJCiz1R9EgwlfM7J/5olpunUfR3WcfuqOQrcUqsdBrDA2sOytngT0ViwjCR0Fh4qZVPwEWfmrvwA==", "requires": { "@google-cloud/logging-bunyan": "^3.0.0", "@overleaf/o-error": "^3.0.0", "bunyan": "^1.8.14", + "node-fetch": "^2.6.0", "raven": "^2.6.4", "yn": "^4.0.0" - }, - "dependencies": { - "bunyan": { - "version": "1.8.14", - "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.14.tgz", - "integrity": "sha512-LlahJUxXzZLuw/hetUQJmRgZ1LF6+cr5TPpRj6jf327AsiIq2jhYEH4oqUUkVKTor+9w2BT3oxVwhzE5lw9tcg==", - "requires": { - "dtrace-provider": "~0.8", - "moment": "^2.19.3", - "mv": "~2", - "safe-json-stringify": "~1" - } - } } }, "loglevel": { @@ -4009,13 +4060,13 @@ "integrity": "sha512-glc9y00wgtwcDmp7GaE/0b0OnxpNJsVf3ael/An6Fe2Q51LLwN1er6sdomLRzz5h0+yMpiYLhWYF5R7HeqVd4g==" }, "md5": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/md5/-/md5-2.2.1.tgz", - "integrity": "sha512-PlGG4z5mBANDGCKsYQe0CaUYHdZYZt8ZPZLmEt+Urf0W4GlpTX4HescwHU+dc9+Z/G/vZKYZYFrwgm9VxK6QOQ==", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/md5/-/md5-2.3.0.tgz", + "integrity": "sha512-T1GITYmFaKuO91vxyoQMFETst+O71VUPEU3ze5GNzDm0OWdP8v1ziTaAEPUr/3kLsY3Sftgz242A1SetQiDL7g==", "requires": { - "charenc": "~0.0.1", - "crypt": "~0.0.1", - "is-buffer": "~1.1.1" + "charenc": "0.0.2", + "crypt": "0.0.2", + "is-buffer": "~1.1.6" }, "dependencies": { "is-buffer": { @@ -5339,9 +5390,9 @@ } }, "protobufjs": { - "version": "6.9.0", - "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.9.0.tgz", - "integrity": "sha512-LlGVfEWDXoI/STstRDdZZKb/qusoAWUnmLg9R8OLSO473mBLWHowx8clbX5/+mKDEI+v7GzjoK9tRPZMMcoTrg==", + "version": "6.10.1", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.10.1.tgz", + "integrity": "sha512-pb8kTchL+1Ceg4lFd5XUpK8PdWacbvV5SK2ULH2ebrYtl4GjJmS24m6CKME67jzV53tbJxHlnNOSqQHbTsR9JQ==", "requires": { "@protobufjs/aspromise": "^1.1.2", "@protobufjs/base64": "^1.1.2", @@ -5461,12 +5512,12 @@ "cookie": { "version": "0.3.1", "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz", - "integrity": "sha512-+IJOX0OqlHCszo2mBUq+SrEbCj6w7Kpffqx60zYbPTFaO4+yYgRjHwcZNpWvaTylDHaV7PPmBHzSecZiMhtPgw==" + "integrity": "sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s=" }, "stack-trace": { "version": "0.0.10", "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.10.tgz", - "integrity": "sha512-KGzahc7puUKkzyMt+IqAep+TVNbKP+k2Lmwhub39m1AsTSkaDutx56aDCo+HLDzf/D26BIHTJWNiTG1KAJiQCg==" + "integrity": "sha1-VHxws0fo0ytOEI6hoqFZ5f3eGcA=" }, "uuid": { "version": "3.3.2", @@ -6227,22 +6278,15 @@ } }, "teeny-request": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-6.0.2.tgz", - "integrity": "sha512-B6fxA0fSnY/bul06NggdN1nywtr5U5Uvt96pHfTi8pi4MNe6++VUWcAAFBrcMeha94s+gULwA5WvagoSZ+AcYg==", + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-6.0.3.tgz", + "integrity": "sha512-TZG/dfd2r6yeji19es1cUIwAlVD8y+/svB1kAC2Y0bjEyysrfbO8EZvJBRwIE6WkwmUoB7uvWLwTIhJbMXZ1Dw==", "requires": { "http-proxy-agent": "^4.0.0", "https-proxy-agent": "^5.0.0", "node-fetch": "^2.2.0", "stream-events": "^1.0.5", - "uuid": "^3.3.2" - }, - "dependencies": { - "uuid": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", - "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" - } + "uuid": "^7.0.0" } }, "text-table": { @@ -6267,7 +6311,7 @@ "timed-out": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/timed-out/-/timed-out-4.0.1.tgz", - "integrity": "sha512-G7r3AhovYtr5YKOWQkta8RKAPb+J9IsO4uVmzjl8AZwfhs8UcUwTiD6gcJYSgOtzyjvQKrKYn41syHbUWMkafA==" + "integrity": "sha1-8y6srFoXW+ol1/q1Zas+2HQe9W8=" }, "timekeeper": { "version": "2.2.0", @@ -6309,7 +6353,7 @@ "to-no-case": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/to-no-case/-/to-no-case-1.0.2.tgz", - "integrity": "sha512-Z3g735FxuZY8rodxV4gH7LxClE4H0hTIyHNIHdk+vpQxjLm0cwnKXq/OFVZ76SOQmto7txVcwSCwkU5kqp+FKg==" + "integrity": "sha1-xyKQcWTvaxeBMsjmmTAhLRtKoWo=" }, "to-regex-range": { "version": "5.0.1", @@ -6323,7 +6367,7 @@ "to-snake-case": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/to-snake-case/-/to-snake-case-1.0.0.tgz", - "integrity": "sha512-joRpzBAk1Bhi2eGEYBjukEWHOe/IvclOkiJl3DtA91jV6NwQ3MwXA4FHYeqk8BNp/D8bmi9tcNbRu/SozP0jbQ==", + "integrity": "sha1-znRpE4l5RgGah+Yu366upMYIq4w=", "requires": { "to-space-case": "^1.0.0" } @@ -6331,7 +6375,7 @@ "to-space-case": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/to-space-case/-/to-space-case-1.0.0.tgz", - "integrity": "sha512-rLdvwXZ39VOn1IxGL3V6ZstoTbwLRckQmn/U8ZDLuWwIXNpuZDhQ3AiRUlhTbOXFVE9C+dR51wM0CBDhk31VcA==", + "integrity": "sha1-sFLar7Gysp3HcM6gFj5ewOvJ/Bc=", "requires": { "to-no-case": "^1.0.0" } @@ -6488,6 +6532,11 @@ "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", "integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==" }, + "uuid": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-7.0.3.tgz", + "integrity": "sha512-DPSke0pXhTZgoF/d+WSt2QaKMCFSfx7QegxEWT+JOuHF5aWrKEn0G+ztjuJg/gG8/ItK+rbPCD/yNv8yyih6Cg==" + }, "v8-compile-cache": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.1.0.tgz", diff --git a/services/filestore/package.json b/services/filestore/package.json index 6408114f4f..17915dbde5 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -27,7 +27,7 @@ "fast-crc32c": "^2.0.0", "glob": "^7.1.6", "lodash.once": "^4.1.1", - "logger-sharelatex": "^2.1.1", + "logger-sharelatex": "^2.2.0", "metrics-sharelatex": "^2.7.0", "node-uuid": "~1.4.8", "range-parser": "^1.2.1", From cc3ccc27130684232843d8dae651f1fa5609df7e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 3 Sep 2020 19:03:30 +0000 Subject: [PATCH 527/555] Bump bl from 2.2.0 to 2.2.1 Bumps [bl](https://github.com/rvagg/bl) from 2.2.0 to 2.2.1. - [Release notes](https://github.com/rvagg/bl/releases) - [Commits](https://github.com/rvagg/bl/compare/v2.2.0...v2.2.1) Signed-off-by: dependabot[bot] --- services/filestore/package-lock.json | 33 ++++------------------------ 1 file changed, 4 insertions(+), 29 deletions(-) diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index b747e51228..346bdae6ed 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -1132,29 +1132,6 @@ "node-uuid": "^1.4.8", "range-parser": "^1.2.1", "tiny-async-pool": "^1.1.0" - }, - "dependencies": { - "aws-sdk": { - "version": "2.718.0", - "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.718.0.tgz", - "integrity": "sha512-YMWR1RJ3VuSbUOGeOfDw2QqRzwX51oa9TCm2G6SW+JywJUy0FTxi/Nj0VjVEQvKC0GqGu5QCgUTaarF7S0nQdw==", - "requires": { - "buffer": "4.9.2", - "events": "1.1.1", - "ieee754": "1.1.13", - "jmespath": "0.15.0", - "querystring": "0.2.0", - "sax": "1.2.1", - "url": "0.10.3", - "uuid": "3.3.2", - "xml2js": "0.4.19" - } - }, - "uuid": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", - "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==" - } } }, "@protobufjs/aspromise": { @@ -1606,7 +1583,6 @@ "version": "2.718.0", "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.718.0.tgz", "integrity": "sha512-YMWR1RJ3VuSbUOGeOfDw2QqRzwX51oa9TCm2G6SW+JywJUy0FTxi/Nj0VjVEQvKC0GqGu5QCgUTaarF7S0nQdw==", - "dev": true, "requires": { "buffer": "4.9.2", "events": "1.1.1", @@ -1622,8 +1598,7 @@ "uuid": { "version": "3.3.2", "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", - "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==", - "dev": true + "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==" } } }, @@ -1703,9 +1678,9 @@ "integrity": "sha512-tbaUB1QpTIj4cKY8c1rvNAvEQXA+ekzHmbe4jzNfW3QWsF9GnnP/BRWyl6/qqS53heoYJ93naaFcm/jooONH8g==" }, "bl": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/bl/-/bl-2.2.0.tgz", - "integrity": "sha512-wbgvOpqopSr7uq6fJrLH8EsvYMJf9gzfo2jCsL2eTy75qXPukA4pCgHamOQkZtY5vmfVtjB+P3LNlMHW5CEZXA==", + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/bl/-/bl-2.2.1.tgz", + "integrity": "sha512-6Pesp1w0DEX1N550i/uGV/TqucVL4AM/pgThFSN/Qq9si1/DF9aIHs1BxD8V/QU0HoeHO6cQRTAuYnLPKq1e4g==", "dev": true, "requires": { "readable-stream": "^2.3.5", From 4086d38a098f13c07248a06595c9290cf15adf49 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 12 Sep 2020 15:07:58 +0000 Subject: [PATCH 528/555] Bump node-fetch from 2.6.0 to 2.6.1 Bumps [node-fetch](https://github.com/bitinn/node-fetch) from 2.6.0 to 2.6.1. - [Release notes](https://github.com/bitinn/node-fetch/releases) - [Changelog](https://github.com/node-fetch/node-fetch/blob/master/docs/CHANGELOG.md) - [Commits](https://github.com/bitinn/node-fetch/compare/v2.6.0...v2.6.1) Signed-off-by: dependabot[bot] --- services/filestore/package-lock.json | 33 ++++------------------------ 1 file changed, 4 insertions(+), 29 deletions(-) diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index b747e51228..0eeca461c7 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -1132,29 +1132,6 @@ "node-uuid": "^1.4.8", "range-parser": "^1.2.1", "tiny-async-pool": "^1.1.0" - }, - "dependencies": { - "aws-sdk": { - "version": "2.718.0", - "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.718.0.tgz", - "integrity": "sha512-YMWR1RJ3VuSbUOGeOfDw2QqRzwX51oa9TCm2G6SW+JywJUy0FTxi/Nj0VjVEQvKC0GqGu5QCgUTaarF7S0nQdw==", - "requires": { - "buffer": "4.9.2", - "events": "1.1.1", - "ieee754": "1.1.13", - "jmespath": "0.15.0", - "querystring": "0.2.0", - "sax": "1.2.1", - "url": "0.10.3", - "uuid": "3.3.2", - "xml2js": "0.4.19" - } - }, - "uuid": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", - "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==" - } } }, "@protobufjs/aspromise": { @@ -1606,7 +1583,6 @@ "version": "2.718.0", "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.718.0.tgz", "integrity": "sha512-YMWR1RJ3VuSbUOGeOfDw2QqRzwX51oa9TCm2G6SW+JywJUy0FTxi/Nj0VjVEQvKC0GqGu5QCgUTaarF7S0nQdw==", - "dev": true, "requires": { "buffer": "4.9.2", "events": "1.1.1", @@ -1622,8 +1598,7 @@ "uuid": { "version": "3.3.2", "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", - "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==", - "dev": true + "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==" } } }, @@ -4493,9 +4468,9 @@ } }, "node-fetch": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.0.tgz", - "integrity": "sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA==" + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.1.tgz", + "integrity": "sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw==" }, "node-forge": { "version": "0.9.1", From 02c4d9304a3d32bc3de924b4ea57d1c36a49c347 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 16 Sep 2020 10:23:11 +0000 Subject: [PATCH 529/555] Bump lodash from 4.17.15 to 4.17.20 Bumps [lodash](https://github.com/lodash/lodash) from 4.17.15 to 4.17.20. - [Release notes](https://github.com/lodash/lodash/releases) - [Commits](https://github.com/lodash/lodash/compare/4.17.15...4.17.20) Signed-off-by: dependabot[bot] --- services/filestore/package-lock.json | 33 ++++------------------------ 1 file changed, 4 insertions(+), 29 deletions(-) diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index b747e51228..4e077a454a 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -1132,29 +1132,6 @@ "node-uuid": "^1.4.8", "range-parser": "^1.2.1", "tiny-async-pool": "^1.1.0" - }, - "dependencies": { - "aws-sdk": { - "version": "2.718.0", - "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.718.0.tgz", - "integrity": "sha512-YMWR1RJ3VuSbUOGeOfDw2QqRzwX51oa9TCm2G6SW+JywJUy0FTxi/Nj0VjVEQvKC0GqGu5QCgUTaarF7S0nQdw==", - "requires": { - "buffer": "4.9.2", - "events": "1.1.1", - "ieee754": "1.1.13", - "jmespath": "0.15.0", - "querystring": "0.2.0", - "sax": "1.2.1", - "url": "0.10.3", - "uuid": "3.3.2", - "xml2js": "0.4.19" - } - }, - "uuid": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", - "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==" - } } }, "@protobufjs/aspromise": { @@ -1606,7 +1583,6 @@ "version": "2.718.0", "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.718.0.tgz", "integrity": "sha512-YMWR1RJ3VuSbUOGeOfDw2QqRzwX51oa9TCm2G6SW+JywJUy0FTxi/Nj0VjVEQvKC0GqGu5QCgUTaarF7S0nQdw==", - "dev": true, "requires": { "buffer": "4.9.2", "events": "1.1.1", @@ -1622,8 +1598,7 @@ "uuid": { "version": "3.3.2", "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", - "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==", - "dev": true + "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==" } } }, @@ -3868,9 +3843,9 @@ } }, "lodash": { - "version": "4.17.15", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", - "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==" + "version": "4.17.20", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.20.tgz", + "integrity": "sha512-PlhdFcillOINfeV7Ni6oF1TAEayyZBoZ8bcshTHqOYJYlrqzRK5hagpagky5o4HfCzzd1TRkXPMFq6cKk9rGmA==" }, "lodash.at": { "version": "4.6.0", From 473a0442093c6134f02b65c80e904c79044ce45d Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Thu, 17 Sep 2020 11:41:42 +0100 Subject: [PATCH 530/555] Upgrade node to 12.18.4 --- services/filestore/.github/dependabot.yml | 6 ++++++ services/filestore/.nvmrc | 2 +- services/filestore/Dockerfile | 2 +- services/filestore/buildscript.txt | 4 ++-- services/filestore/package-lock.json | 18 +++++++++--------- .../test/acceptance/deps/Dockerfile.fake-gcs | 2 +- 6 files changed, 20 insertions(+), 14 deletions(-) diff --git a/services/filestore/.github/dependabot.yml b/services/filestore/.github/dependabot.yml index c6f98d843d..e2c64a3351 100644 --- a/services/filestore/.github/dependabot.yml +++ b/services/filestore/.github/dependabot.yml @@ -15,3 +15,9 @@ updates: # Block informal upgrades -- security upgrades use a separate queue. # https://docs.github.com/en/github/administering-a-repository/configuration-options-for-dependency-updates#open-pull-requests-limit open-pull-requests-limit: 0 + + # currently assign team-magma to all dependabot PRs - this may change in + # future if we reorganise teams + labels: + - "dependencies" + - "Team-Magma" diff --git a/services/filestore/.nvmrc b/services/filestore/.nvmrc index b06cd07c44..ce122aaa98 100644 --- a/services/filestore/.nvmrc +++ b/services/filestore/.nvmrc @@ -1 +1 @@ -12.18.0 +12.18.4 diff --git a/services/filestore/Dockerfile b/services/filestore/Dockerfile index 60264147af..b6b31f2022 100644 --- a/services/filestore/Dockerfile +++ b/services/filestore/Dockerfile @@ -2,7 +2,7 @@ # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -FROM node:12.18.0 as base +FROM node:12.18.4 as base WORKDIR /app COPY install_deps.sh /app diff --git a/services/filestore/buildscript.txt b/services/filestore/buildscript.txt index cc29f21870..bb25377d75 100644 --- a/services/filestore/buildscript.txt +++ b/services/filestore/buildscript.txt @@ -4,6 +4,6 @@ filestore --docker-repos=gcr.io/overleaf-ops --env-add=ENABLE_CONVERSIONS="true",USE_PROM_METRICS="true",AWS_S3_USER_FILES_BUCKET_NAME=fake_user_files,AWS_S3_TEMPLATE_FILES_BUCKET_NAME=fake_template_files,AWS_S3_PUBLIC_FILES_BUCKET_NAME=fake_public_files,GCS_USER_FILES_BUCKET_NAME=fake_userfiles,GCS_TEMPLATE_FILES_BUCKET_NAME=fake_templatefiles,GCS_PUBLIC_FILES_BUCKET_NAME=fake_publicfiles --env-pass-through= ---node-version=12.18.0 +--node-version=12.18.4 --public-repo=True ---script-version=3.3.2 +--script-version=3.3.4 diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index b747e51228..4394990902 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -2123,7 +2123,7 @@ "d64": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/d64/-/d64-1.0.0.tgz", - "integrity": "sha1-QAKofoUMv8n52XBrYPymE6MzbpA=" + "integrity": "sha512-5eNy3WZziVYnrogqgXhcdEmqcDB2IHurTqLcrgssJsfkMVCUoUaZpK6cJjxxvLV2dUm5SuJMNcYfVGoin9UIRw==" }, "dashdash": { "version": "1.14.1", @@ -3875,12 +3875,12 @@ "lodash.at": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/lodash.at/-/lodash.at-4.6.0.tgz", - "integrity": "sha1-k83OZk8KGZTqM9181A4jr9EbD/g=" + "integrity": "sha512-GOTh0SEp+Yosnlpjic+8cl2WM9MykorogkGA9xyIFkkObQ3H3kNZqZ+ohuq4K3FrSVo7hMcZBMataJemrxC3BA==" }, "lodash.camelcase": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", - "integrity": "sha1-soqmKIorn8ZRA1x3EfZathkDMaY=" + "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==" }, "lodash.get": { "version": "4.4.2", @@ -3891,7 +3891,7 @@ "lodash.has": { "version": "4.5.2", "resolved": "https://registry.npmjs.org/lodash.has/-/lodash.has-4.5.2.tgz", - "integrity": "sha1-0Z9NwQlQWMzL4rDN9O4P5Ko3yGI=" + "integrity": "sha512-rnYUdIo6xRCJnQmbVFEwcxF144erlD+M3YcJUVesflU9paQaE8p+fJDcIQrlMYbxoANFL+AB9hZrzSBBk5PL+g==" }, "lodash.memoize": { "version": "4.1.2", @@ -6005,7 +6005,7 @@ "snakeize": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/snakeize/-/snakeize-0.1.0.tgz", - "integrity": "sha512-ot3bb6pQt6IVq5G/JQ640ceSYTPtriVrwNyfoUw1LmQQGzPMAGxE5F+ded2UwSUCyf2PW1fFAYUnVEX21PWbpQ==" + "integrity": "sha1-EMCI2LWOsHazIpu1oE4jLOEmQi0=" }, "source-map": { "version": "0.6.1", @@ -6353,7 +6353,7 @@ "to-no-case": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/to-no-case/-/to-no-case-1.0.2.tgz", - "integrity": "sha1-xyKQcWTvaxeBMsjmmTAhLRtKoWo=" + "integrity": "sha512-Z3g735FxuZY8rodxV4gH7LxClE4H0hTIyHNIHdk+vpQxjLm0cwnKXq/OFVZ76SOQmto7txVcwSCwkU5kqp+FKg==" }, "to-regex-range": { "version": "5.0.1", @@ -6367,7 +6367,7 @@ "to-snake-case": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/to-snake-case/-/to-snake-case-1.0.0.tgz", - "integrity": "sha1-znRpE4l5RgGah+Yu366upMYIq4w=", + "integrity": "sha512-joRpzBAk1Bhi2eGEYBjukEWHOe/IvclOkiJl3DtA91jV6NwQ3MwXA4FHYeqk8BNp/D8bmi9tcNbRu/SozP0jbQ==", "requires": { "to-space-case": "^1.0.0" } @@ -6375,7 +6375,7 @@ "to-space-case": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/to-space-case/-/to-space-case-1.0.0.tgz", - "integrity": "sha1-sFLar7Gysp3HcM6gFj5ewOvJ/Bc=", + "integrity": "sha512-rLdvwXZ39VOn1IxGL3V6ZstoTbwLRckQmn/U8ZDLuWwIXNpuZDhQ3AiRUlhTbOXFVE9C+dR51wM0CBDhk31VcA==", "requires": { "to-no-case": "^1.0.0" } @@ -6464,7 +6464,7 @@ "typedarray": { "version": "0.0.6", "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz", - "integrity": "sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA==" + "integrity": "sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c=" }, "typedarray-to-buffer": { "version": "3.1.5", diff --git a/services/filestore/test/acceptance/deps/Dockerfile.fake-gcs b/services/filestore/test/acceptance/deps/Dockerfile.fake-gcs index 0e6de7e735..2dac8cdb2c 100644 --- a/services/filestore/test/acceptance/deps/Dockerfile.fake-gcs +++ b/services/filestore/test/acceptance/deps/Dockerfile.fake-gcs @@ -1,4 +1,4 @@ -FROM fsouza/fake-gcs-server:latest +FROM fsouza/fake-gcs-server:1.20 RUN apk add --update --no-cache curl COPY healthcheck.sh /healthcheck.sh HEALTHCHECK --interval=1s --timeout=1s --retries=30 CMD /healthcheck.sh http://localhost:9090 From 7925f217b4fada773e6fbe6c62b8a2aa2edcfdf3 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Tue, 13 Oct 2020 09:42:06 +0100 Subject: [PATCH 531/555] Upgrade object-persistor to latest version --- services/filestore/package-lock.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index 558a2e7c4a..ca4805bb7e 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -1120,7 +1120,7 @@ "integrity": "sha512-LsM2s6Iy9G97ktPo0ys4VxtI/m3ahc1ZHwjo5XnhXtjeIkkkVAehsrcRRoV/yWepPjymB0oZonhcfojpjYR/tg==" }, "@overleaf/object-persistor": { - "version": "git+https://github.com/overleaf/object-persistor.git#8b8bc4b8d1e8b8aa3ca9245691d6ddd69d663d06", + "version": "git+https://github.com/overleaf/object-persistor.git#8fbc9ed03206bfb54368578d22b7ac4f285baa25", "from": "git+https://github.com/overleaf/object-persistor.git", "requires": { "@google-cloud/storage": "^5.1.2", From d83e1e88c6eae2d267e13d818ac3a27c7cc21b83 Mon Sep 17 00:00:00 2001 From: Eric Mc Sween Date: Mon, 23 Nov 2020 10:56:33 -0500 Subject: [PATCH 532/555] Upgrade build-scripts to 3.4.0 This version fixes docker-compose health checks for dependent services. See https://github.com/overleaf/dev-environment/pull/409 for details. --- services/filestore/buildscript.txt | 2 +- services/filestore/docker-compose.ci.yml | 18 ++++++++++++------ services/filestore/docker-compose.yml | 18 ++++++++++++------ 3 files changed, 25 insertions(+), 13 deletions(-) diff --git a/services/filestore/buildscript.txt b/services/filestore/buildscript.txt index bb25377d75..2e464749ec 100644 --- a/services/filestore/buildscript.txt +++ b/services/filestore/buildscript.txt @@ -6,4 +6,4 @@ filestore --env-pass-through= --node-version=12.18.4 --public-repo=True ---script-version=3.3.4 +--script-version=3.4.0 diff --git a/services/filestore/docker-compose.ci.yml b/services/filestore/docker-compose.ci.yml index a53beb7636..bb16114cf9 100644 --- a/services/filestore/docker-compose.ci.yml +++ b/services/filestore/docker-compose.ci.yml @@ -20,6 +20,7 @@ services: environment: ELASTIC_SEARCH_DSN: es:9200 REDIS_HOST: redis + QUEUES_REDIS_HOST: redis MONGO_HOST: mongo POSTGRES_HOST: postgres AWS_S3_ENDPOINT: http://s3:9090 @@ -58,12 +59,17 @@ services: command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . user: root s3: - build: - context: test/acceptance/deps - dockerfile: Dockerfile.s3mock + image: adobe/s3mock environment: - initialBuckets=fake_user_files,fake_template_files,fake_public_files,bucket + healthcheck: + test: wget --quiet --output-document=/dev/null http://localhost:9090 + interval: 1s + retries: 20 gcs: - build: - context: test/acceptance/deps - dockerfile: Dockerfile.fake-gcs + image: fsouza/fake-gcs-server + command: ["--port=9090", "--scheme=http"] + healthcheck: + test: wget --quiet --output-document=/dev/null http://localhost:9090/storage/v1/b + interval: 1s + retries: 20 diff --git a/services/filestore/docker-compose.yml b/services/filestore/docker-compose.yml index 1b21354b52..6ad3ceb2a8 100644 --- a/services/filestore/docker-compose.yml +++ b/services/filestore/docker-compose.yml @@ -29,6 +29,7 @@ services: environment: ELASTIC_SEARCH_DSN: es:9200 REDIS_HOST: redis + QUEUES_REDIS_HOST: redis MONGO_HOST: mongo POSTGRES_HOST: postgres AWS_S3_ENDPOINT: http://s3:9090 @@ -60,12 +61,17 @@ services: command: npm run --silent test:acceptance s3: - build: - context: test/acceptance/deps - dockerfile: Dockerfile.s3mock + image: adobe/s3mock environment: - initialBuckets=fake_user_files,fake_template_files,fake_public_files,bucket + healthcheck: + test: wget --quiet --output-document=/dev/null http://localhost:9090 + interval: 1s + retries: 20 gcs: - build: - context: test/acceptance/deps - dockerfile: Dockerfile.fake-gcs + image: fsouza/fake-gcs-server + command: ["--port=9090", "--scheme=http"] + healthcheck: + test: wget --quiet --output-document=/dev/null http://localhost:9090/storage/v1/b + interval: 1s + retries: 20 From 95f3bcef211178a023d94f49b9b80abffe366ad1 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Wed, 25 Nov 2020 11:57:23 +0000 Subject: [PATCH 533/555] [misc] bump metrics module to 3.4.1 - renamed package from `metrics-sharelatex` to `@overleaf/metrics` - drop support for statsd backend - decaffeinate - compress `/metrics` response using gzip - bump debugging agents to latest versions - expose prometheus interfaces for custom metrics (custom tags) - cleanup of open sockets metrics - fix deprecation warnings for header access --- services/filestore/app.js | 2 +- services/filestore/app/js/FileController.js | 2 +- services/filestore/app/js/FileConverter.js | 2 +- services/filestore/app/js/ImageOptimiser.js | 2 +- services/filestore/app/js/LocalFileWriter.js | 2 +- services/filestore/app/js/PersistorManager.js | 2 +- services/filestore/app/js/RequestLogger.js | 2 +- services/filestore/package-lock.json | 1530 ++++++++--------- services/filestore/package.json | 2 +- .../test/unit/js/FileControllerTests.js | 2 +- .../test/unit/js/FileConverterTests.js | 2 +- .../test/unit/js/ImageOptimiserTests.js | 2 +- .../test/unit/js/LocalFileWriterTests.js | 2 +- 13 files changed, 728 insertions(+), 826 deletions(-) diff --git a/services/filestore/app.js b/services/filestore/app.js index 7a2a866ea2..5f59b13bed 100644 --- a/services/filestore/app.js +++ b/services/filestore/app.js @@ -1,4 +1,4 @@ -const Metrics = require('metrics-sharelatex') +const Metrics = require('@overleaf/metrics') Metrics.initialize(process.env.METRICS_APP_NAME || 'filestore') const logger = require('logger-sharelatex') diff --git a/services/filestore/app/js/FileController.js b/services/filestore/app/js/FileController.js index 80b67d7e9c..e087746811 100644 --- a/services/filestore/app/js/FileController.js +++ b/services/filestore/app/js/FileController.js @@ -1,6 +1,6 @@ const PersistorManager = require('./PersistorManager') const FileHandler = require('./FileHandler') -const metrics = require('metrics-sharelatex') +const metrics = require('@overleaf/metrics') const parseRange = require('range-parser') const Errors = require('./Errors') const { pipeline } = require('stream') diff --git a/services/filestore/app/js/FileConverter.js b/services/filestore/app/js/FileConverter.js index 3258fa62fd..18137f16b7 100644 --- a/services/filestore/app/js/FileConverter.js +++ b/services/filestore/app/js/FileConverter.js @@ -1,4 +1,4 @@ -const metrics = require('metrics-sharelatex') +const metrics = require('@overleaf/metrics') const Settings = require('settings-sharelatex') const { callbackify } = require('util') diff --git a/services/filestore/app/js/ImageOptimiser.js b/services/filestore/app/js/ImageOptimiser.js index 9c2ee95c5a..1ee9d6acc7 100644 --- a/services/filestore/app/js/ImageOptimiser.js +++ b/services/filestore/app/js/ImageOptimiser.js @@ -1,5 +1,5 @@ const logger = require('logger-sharelatex') -const metrics = require('metrics-sharelatex') +const metrics = require('@overleaf/metrics') const { callbackify } = require('util') const safeExec = require('./SafeExec').promises diff --git a/services/filestore/app/js/LocalFileWriter.js b/services/filestore/app/js/LocalFileWriter.js index 6338dcecb0..501856e2f1 100644 --- a/services/filestore/app/js/LocalFileWriter.js +++ b/services/filestore/app/js/LocalFileWriter.js @@ -3,7 +3,7 @@ const uuid = require('node-uuid') const path = require('path') const Stream = require('stream') const { callbackify, promisify } = require('util') -const metrics = require('metrics-sharelatex') +const metrics = require('@overleaf/metrics') const Settings = require('settings-sharelatex') const { WriteError } = require('./Errors') diff --git a/services/filestore/app/js/PersistorManager.js b/services/filestore/app/js/PersistorManager.js index c17e251ce9..1af996a7ab 100644 --- a/services/filestore/app/js/PersistorManager.js +++ b/services/filestore/app/js/PersistorManager.js @@ -1,7 +1,7 @@ const settings = require('settings-sharelatex') const persistorSettings = settings.filestore -persistorSettings.Metrics = require('metrics-sharelatex') +persistorSettings.Metrics = require('@overleaf/metrics') persistorSettings.paths = settings.path const ObjectPersistor = require('@overleaf/object-persistor') diff --git a/services/filestore/app/js/RequestLogger.js b/services/filestore/app/js/RequestLogger.js index ae706c2829..956fd3287d 100644 --- a/services/filestore/app/js/RequestLogger.js +++ b/services/filestore/app/js/RequestLogger.js @@ -1,5 +1,5 @@ const logger = require('logger-sharelatex') -const metrics = require('metrics-sharelatex') +const metrics = require('@overleaf/metrics') class RequestLogger { constructor() { diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index ca4805bb7e..eba7645b4a 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -169,200 +169,6 @@ } } }, - "@google-cloud/debug-agent": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/@google-cloud/debug-agent/-/debug-agent-3.2.0.tgz", - "integrity": "sha512-fP87kYbS6aeDna08BivwQ1J260mwJGchRi99XdWCgqbRwuFac8ul0OT5i2wEeDSc5QaDX8ZuWQQ0igZvh1rTyQ==", - "requires": { - "@google-cloud/common": "^0.32.0", - "@sindresorhus/is": "^0.15.0", - "acorn": "^6.0.0", - "coffeescript": "^2.0.0", - "console-log-level": "^1.4.0", - "extend": "^3.0.1", - "findit2": "^2.2.3", - "gcp-metadata": "^1.0.0", - "lodash.pickby": "^4.6.0", - "p-limit": "^2.2.0", - "pify": "^4.0.1", - "semver": "^6.0.0", - "source-map": "^0.6.1", - "split": "^1.0.0" - }, - "dependencies": { - "@google-cloud/common": { - "version": "0.32.1", - "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.32.1.tgz", - "integrity": "sha512-bLdPzFvvBMtVkwsoBtygE9oUm3yrNmPa71gvOgucYI/GqvNP2tb6RYsDHPq98kvignhcgHGDI5wyNgxaCo8bKQ==", - "requires": { - "@google-cloud/projectify": "^0.3.3", - "@google-cloud/promisify": "^0.4.0", - "@types/request": "^2.48.1", - "arrify": "^2.0.0", - "duplexify": "^3.6.0", - "ent": "^2.2.0", - "extend": "^3.0.2", - "google-auth-library": "^3.1.1", - "pify": "^4.0.1", - "retry-request": "^4.0.0", - "teeny-request": "^3.11.3" - } - }, - "@google-cloud/projectify": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-0.3.3.tgz", - "integrity": "sha512-7522YHQ4IhaafgSunsFF15nG0TGVmxgXidy9cITMe+256RgqfcrfWphiMufW+Ou4kqagW/u3yxwbzVEW3dk2Uw==" - }, - "@google-cloud/promisify": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-0.4.0.tgz", - "integrity": "sha512-4yAHDC52TEMCNcMzVC8WlqnKKKq+Ssi2lXoUg9zWWkZ6U6tq9ZBRYLHHCRdfU+EU9YJsVmivwGcKYCjRGjnf4Q==" - }, - "agent-base": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.3.0.tgz", - "integrity": "sha512-salcGninV0nPrwpGNn4VTXBb1SOuXQBiqbrNXoeizJsHrsL6ERFM2Ne3JUSBWRE6aeNJI2ROP/WEEIDUiDe3cg==", - "requires": { - "es6-promisify": "^5.0.0" - } - }, - "coffeescript": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/coffeescript/-/coffeescript-2.5.1.tgz", - "integrity": "sha512-J2jRPX0eeFh5VKyVnoLrfVFgLZtnnmp96WQSLAS8OrLm2wtQLcnikYKe1gViJKDH7vucjuhHvBKKBP3rKcD1tQ==" - }, - "debug": { - "version": "3.2.6", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", - "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", - "requires": { - "ms": "^2.1.1" - } - }, - "gaxios": { - "version": "1.8.4", - "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-1.8.4.tgz", - "integrity": "sha512-BoENMnu1Gav18HcpV9IleMPZ9exM+AvUjrAOV4Mzs/vfz2Lu/ABv451iEXByKiMPn2M140uul1txXCg83sAENw==", - "requires": { - "abort-controller": "^3.0.0", - "extend": "^3.0.2", - "https-proxy-agent": "^2.2.1", - "node-fetch": "^2.3.0" - } - }, - "gcp-metadata": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-1.0.0.tgz", - "integrity": "sha512-Q6HrgfrCQeEircnNP3rCcEgiDv7eF9+1B+1MMgpE190+/+0mjQR8PxeOaRgxZWmdDAF9EIryHB9g1moPiw1SbQ==", - "requires": { - "gaxios": "^1.0.2", - "json-bigint": "^0.3.0" - } - }, - "google-auth-library": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-3.1.2.tgz", - "integrity": "sha512-cDQMzTotwyWMrg5jRO7q0A4TL/3GWBgO7I7q5xGKNiiFf9SmGY/OJ1YsLMgI2MVHHsEGyrqYnbnmV1AE+Z6DnQ==", - "requires": { - "base64-js": "^1.3.0", - "fast-text-encoding": "^1.0.0", - "gaxios": "^1.2.1", - "gcp-metadata": "^1.0.0", - "gtoken": "^2.3.2", - "https-proxy-agent": "^2.2.1", - "jws": "^3.1.5", - "lru-cache": "^5.0.0", - "semver": "^5.5.0" - }, - "dependencies": { - "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" - } - } - }, - "google-p12-pem": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-1.0.4.tgz", - "integrity": "sha512-SwLAUJqUfTB2iS+wFfSS/G9p7bt4eWcc2LyfvmUXe7cWp6p3mpxDo6LLI29MXdU6wvPcQ/up298X7GMC5ylAlA==", - "requires": { - "node-forge": "^0.8.0", - "pify": "^4.0.0" - } - }, - "gtoken": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-2.3.3.tgz", - "integrity": "sha512-EaB49bu/TCoNeQjhCYKI/CurooBKkGxIqFHsWABW0b25fobBYVTMe84A8EBVVZhl8emiUdNypil9huMOTmyAnw==", - "requires": { - "gaxios": "^1.0.4", - "google-p12-pem": "^1.0.0", - "jws": "^3.1.5", - "mime": "^2.2.0", - "pify": "^4.0.0" - } - }, - "https-proxy-agent": { - "version": "2.2.4", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-2.2.4.tgz", - "integrity": "sha512-OmvfoQ53WLjtA9HeYP9RNrWMJzzAz1JGaSFr1nijg0PVR1JaD/xbJq1mdEIIlxGpXp9eSe/O2LgU9DJmTPd0Eg==", - "requires": { - "agent-base": "^4.3.0", - "debug": "^3.1.0" - } - }, - "jwa": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", - "integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==", - "requires": { - "buffer-equal-constant-time": "1.0.1", - "ecdsa-sig-formatter": "1.0.11", - "safe-buffer": "^5.0.1" - } - }, - "jws": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", - "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", - "requires": { - "jwa": "^1.4.1", - "safe-buffer": "^5.0.1" - } - }, - "mime": { - "version": "2.4.6", - "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.6.tgz", - "integrity": "sha512-RZKhC3EmpBchfTGBVb8fb+RL2cWyw/32lshnsETttkBAyAUXSGHxbEJWWRXc751DrIxG1q04b8QwMbAwkRPpUA==" - }, - "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" - }, - "node-forge": { - "version": "0.8.5", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.8.5.tgz", - "integrity": "sha512-vFMQIWt+J/7FLNyKouZ9TazT74PRV3wgv9UT4cRjC8BffxFbKXkgIWR42URCPSnHm/QDz6BOlb2Q0U4+VQT67Q==" - }, - "teeny-request": { - "version": "3.11.3", - "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-3.11.3.tgz", - "integrity": "sha512-CKncqSF7sH6p4rzCgkb/z/Pcos5efl0DmolzvlqRQUNcpRIruOhY9+T1FsIlyEbfWd7MsFpodROOwHYh2BaXzw==", - "requires": { - "https-proxy-agent": "^2.2.1", - "node-fetch": "^2.2.0", - "uuid": "^3.3.2" - } - }, - "uuid": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", - "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" - } - } - }, "@google-cloud/logging": { "version": "7.3.0", "resolved": "https://registry.npmjs.org/@google-cloud/logging/-/logging-7.3.0.tgz", @@ -425,235 +231,6 @@ "extend": "^3.0.2" } }, - "@google-cloud/profiler": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/@google-cloud/profiler/-/profiler-0.2.3.tgz", - "integrity": "sha512-rNvtrFtIebIxZEJ/O0t8n7HciZGIXBo8DvHxWqAmsCaeLvkTtsaL6HmPkwxrNQ1IhbYWAxF+E/DwCiHyhKmgTg==", - "requires": { - "@google-cloud/common": "^0.26.0", - "@types/console-log-level": "^1.4.0", - "@types/semver": "^5.5.0", - "bindings": "^1.2.1", - "console-log-level": "^1.4.0", - "delay": "^4.0.1", - "extend": "^3.0.1", - "gcp-metadata": "^0.9.0", - "nan": "^2.11.1", - "parse-duration": "^0.1.1", - "pify": "^4.0.0", - "pretty-ms": "^4.0.0", - "protobufjs": "~6.8.6", - "semver": "^5.5.0", - "teeny-request": "^3.3.0" - }, - "dependencies": { - "@google-cloud/common": { - "version": "0.26.2", - "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.26.2.tgz", - "integrity": "sha512-xJ2M/q3MrUbnYZuFlpF01caAlEhAUoRn0NXp93Hn3pkFpfSOG8YfbKbpBAHvcKVbBOAKVIwPsleNtuyuabUwLQ==", - "requires": { - "@google-cloud/projectify": "^0.3.2", - "@google-cloud/promisify": "^0.3.0", - "@types/duplexify": "^3.5.0", - "@types/request": "^2.47.0", - "arrify": "^1.0.1", - "duplexify": "^3.6.0", - "ent": "^2.2.0", - "extend": "^3.0.1", - "google-auth-library": "^2.0.0", - "pify": "^4.0.0", - "retry-request": "^4.0.0", - "through2": "^3.0.0" - } - }, - "@google-cloud/projectify": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-0.3.3.tgz", - "integrity": "sha512-7522YHQ4IhaafgSunsFF15nG0TGVmxgXidy9cITMe+256RgqfcrfWphiMufW+Ou4kqagW/u3yxwbzVEW3dk2Uw==" - }, - "@google-cloud/promisify": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-0.3.1.tgz", - "integrity": "sha512-QzB0/IMvB0eFxFK7Eqh+bfC8NLv3E9ScjWQrPOk6GgfNroxcVITdTlT8NRsRrcp5+QQJVPLkRqKG0PUdaWXmHw==" - }, - "@types/node": { - "version": "10.17.26", - "resolved": "https://registry.npmjs.org/@types/node/-/node-10.17.26.tgz", - "integrity": "sha512-myMwkO2Cr82kirHY8uknNRHEVtn0wV3DTQfkrjx17jmkstDRZ24gNUdl8AHXVyVclTYI/bNjgTPTAWvWLqXqkw==" - }, - "agent-base": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.3.0.tgz", - "integrity": "sha512-salcGninV0nPrwpGNn4VTXBb1SOuXQBiqbrNXoeizJsHrsL6ERFM2Ne3JUSBWRE6aeNJI2ROP/WEEIDUiDe3cg==", - "requires": { - "es6-promisify": "^5.0.0" - } - }, - "arrify": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", - "integrity": "sha512-3CYzex9M9FGQjCGMGyi6/31c8GJbgb0qGyrx5HWxPd0aCwh4cB2YjMb2Xf9UuoogrMrlO9cTqnB5rI5GHZTcUA==" - }, - "debug": { - "version": "3.2.6", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", - "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", - "requires": { - "ms": "^2.1.1" - } - }, - "gaxios": { - "version": "1.8.4", - "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-1.8.4.tgz", - "integrity": "sha512-BoENMnu1Gav18HcpV9IleMPZ9exM+AvUjrAOV4Mzs/vfz2Lu/ABv451iEXByKiMPn2M140uul1txXCg83sAENw==", - "requires": { - "abort-controller": "^3.0.0", - "extend": "^3.0.2", - "https-proxy-agent": "^2.2.1", - "node-fetch": "^2.3.0" - } - }, - "gcp-metadata": { - "version": "0.9.3", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.9.3.tgz", - "integrity": "sha512-caV4S84xAjENtpezLCT/GILEAF5h/bC4cNqZFmt/tjTn8t+JBtTkQrgBrJu3857YdsnlM8rxX/PMcKGtE8hUlw==", - "requires": { - "gaxios": "^1.0.2", - "json-bigint": "^0.3.0" - } - }, - "google-auth-library": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-2.0.2.tgz", - "integrity": "sha512-FURxmo1hBVmcfLauuMRKOPYAPKht3dGuI2wjeJFalDUThO0HoYVjr4yxt5cgYSFm1dgUpmN9G/poa7ceTFAIiA==", - "requires": { - "axios": "^0.18.0", - "gcp-metadata": "^0.7.0", - "gtoken": "^2.3.0", - "https-proxy-agent": "^2.2.1", - "jws": "^3.1.5", - "lru-cache": "^5.0.0", - "semver": "^5.5.0" - }, - "dependencies": { - "gcp-metadata": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-0.7.0.tgz", - "integrity": "sha512-ffjC09amcDWjh3VZdkDngIo7WoluyC5Ag9PAYxZbmQLOLNI8lvPtoKTSCyU54j2gwy5roZh6sSMTfkY2ct7K3g==", - "requires": { - "axios": "^0.18.0", - "extend": "^3.0.1", - "retry-axios": "0.3.2" - } - } - } - }, - "google-p12-pem": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-1.0.4.tgz", - "integrity": "sha512-SwLAUJqUfTB2iS+wFfSS/G9p7bt4eWcc2LyfvmUXe7cWp6p3mpxDo6LLI29MXdU6wvPcQ/up298X7GMC5ylAlA==", - "requires": { - "node-forge": "^0.8.0", - "pify": "^4.0.0" - } - }, - "gtoken": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-2.3.3.tgz", - "integrity": "sha512-EaB49bu/TCoNeQjhCYKI/CurooBKkGxIqFHsWABW0b25fobBYVTMe84A8EBVVZhl8emiUdNypil9huMOTmyAnw==", - "requires": { - "gaxios": "^1.0.4", - "google-p12-pem": "^1.0.0", - "jws": "^3.1.5", - "mime": "^2.2.0", - "pify": "^4.0.0" - } - }, - "https-proxy-agent": { - "version": "2.2.4", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-2.2.4.tgz", - "integrity": "sha512-OmvfoQ53WLjtA9HeYP9RNrWMJzzAz1JGaSFr1nijg0PVR1JaD/xbJq1mdEIIlxGpXp9eSe/O2LgU9DJmTPd0Eg==", - "requires": { - "agent-base": "^4.3.0", - "debug": "^3.1.0" - } - }, - "jwa": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", - "integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==", - "requires": { - "buffer-equal-constant-time": "1.0.1", - "ecdsa-sig-formatter": "1.0.11", - "safe-buffer": "^5.0.1" - } - }, - "jws": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", - "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", - "requires": { - "jwa": "^1.4.1", - "safe-buffer": "^5.0.1" - } - }, - "mime": { - "version": "2.4.6", - "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.6.tgz", - "integrity": "sha512-RZKhC3EmpBchfTGBVb8fb+RL2cWyw/32lshnsETttkBAyAUXSGHxbEJWWRXc751DrIxG1q04b8QwMbAwkRPpUA==" - }, - "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" - }, - "node-forge": { - "version": "0.8.5", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.8.5.tgz", - "integrity": "sha512-vFMQIWt+J/7FLNyKouZ9TazT74PRV3wgv9UT4cRjC8BffxFbKXkgIWR42URCPSnHm/QDz6BOlb2Q0U4+VQT67Q==" - }, - "protobufjs": { - "version": "6.8.9", - "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.8.9.tgz", - "integrity": "sha512-j2JlRdUeL/f4Z6x4aU4gj9I2LECglC+5qR2TrWb193Tla1qfdaNQTZ8I27Pt7K0Ajmvjjpft7O3KWTGciz4gpw==", - "requires": { - "@protobufjs/aspromise": "^1.1.2", - "@protobufjs/base64": "^1.1.2", - "@protobufjs/codegen": "^2.0.4", - "@protobufjs/eventemitter": "^1.1.0", - "@protobufjs/fetch": "^1.1.0", - "@protobufjs/float": "^1.0.2", - "@protobufjs/inquire": "^1.1.0", - "@protobufjs/path": "^1.1.2", - "@protobufjs/pool": "^1.1.0", - "@protobufjs/utf8": "^1.1.0", - "@types/long": "^4.0.0", - "@types/node": "^10.1.0", - "long": "^4.0.0" - } - }, - "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" - }, - "teeny-request": { - "version": "3.11.3", - "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-3.11.3.tgz", - "integrity": "sha512-CKncqSF7sH6p4rzCgkb/z/Pcos5efl0DmolzvlqRQUNcpRIruOhY9+T1FsIlyEbfWd7MsFpodROOwHYh2BaXzw==", - "requires": { - "https-proxy-agent": "^2.2.1", - "node-fetch": "^2.2.0", - "uuid": "^3.3.2" - } - }, - "uuid": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", - "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" - } - } - }, "@google-cloud/projectify": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-1.0.4.tgz", @@ -873,194 +450,6 @@ } } }, - "@google-cloud/trace-agent": { - "version": "3.6.1", - "resolved": "https://registry.npmjs.org/@google-cloud/trace-agent/-/trace-agent-3.6.1.tgz", - "integrity": "sha512-KDo85aPN4gSxJ7oEIOlKd7aGENZFXAM1kbIn1Ds+61gh/K1CQWSyepgJo3nUpAwH6D1ezDWV7Iaf8ueoITc8Uw==", - "requires": { - "@google-cloud/common": "^0.32.1", - "builtin-modules": "^3.0.0", - "console-log-level": "^1.4.0", - "continuation-local-storage": "^3.2.1", - "extend": "^3.0.0", - "gcp-metadata": "^1.0.0", - "hex2dec": "^1.0.1", - "is": "^3.2.0", - "methods": "^1.1.1", - "require-in-the-middle": "^4.0.0", - "semver": "^6.0.0", - "shimmer": "^1.2.0", - "uuid": "^3.0.1" - }, - "dependencies": { - "@google-cloud/common": { - "version": "0.32.1", - "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.32.1.tgz", - "integrity": "sha512-bLdPzFvvBMtVkwsoBtygE9oUm3yrNmPa71gvOgucYI/GqvNP2tb6RYsDHPq98kvignhcgHGDI5wyNgxaCo8bKQ==", - "requires": { - "@google-cloud/projectify": "^0.3.3", - "@google-cloud/promisify": "^0.4.0", - "@types/request": "^2.48.1", - "arrify": "^2.0.0", - "duplexify": "^3.6.0", - "ent": "^2.2.0", - "extend": "^3.0.2", - "google-auth-library": "^3.1.1", - "pify": "^4.0.1", - "retry-request": "^4.0.0", - "teeny-request": "^3.11.3" - } - }, - "@google-cloud/projectify": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-0.3.3.tgz", - "integrity": "sha512-7522YHQ4IhaafgSunsFF15nG0TGVmxgXidy9cITMe+256RgqfcrfWphiMufW+Ou4kqagW/u3yxwbzVEW3dk2Uw==" - }, - "@google-cloud/promisify": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-0.4.0.tgz", - "integrity": "sha512-4yAHDC52TEMCNcMzVC8WlqnKKKq+Ssi2lXoUg9zWWkZ6U6tq9ZBRYLHHCRdfU+EU9YJsVmivwGcKYCjRGjnf4Q==" - }, - "agent-base": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.3.0.tgz", - "integrity": "sha512-salcGninV0nPrwpGNn4VTXBb1SOuXQBiqbrNXoeizJsHrsL6ERFM2Ne3JUSBWRE6aeNJI2ROP/WEEIDUiDe3cg==", - "requires": { - "es6-promisify": "^5.0.0" - } - }, - "debug": { - "version": "3.2.6", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", - "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", - "requires": { - "ms": "^2.1.1" - } - }, - "gaxios": { - "version": "1.8.4", - "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-1.8.4.tgz", - "integrity": "sha512-BoENMnu1Gav18HcpV9IleMPZ9exM+AvUjrAOV4Mzs/vfz2Lu/ABv451iEXByKiMPn2M140uul1txXCg83sAENw==", - "requires": { - "abort-controller": "^3.0.0", - "extend": "^3.0.2", - "https-proxy-agent": "^2.2.1", - "node-fetch": "^2.3.0" - } - }, - "gcp-metadata": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-1.0.0.tgz", - "integrity": "sha512-Q6HrgfrCQeEircnNP3rCcEgiDv7eF9+1B+1MMgpE190+/+0mjQR8PxeOaRgxZWmdDAF9EIryHB9g1moPiw1SbQ==", - "requires": { - "gaxios": "^1.0.2", - "json-bigint": "^0.3.0" - } - }, - "google-auth-library": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-3.1.2.tgz", - "integrity": "sha512-cDQMzTotwyWMrg5jRO7q0A4TL/3GWBgO7I7q5xGKNiiFf9SmGY/OJ1YsLMgI2MVHHsEGyrqYnbnmV1AE+Z6DnQ==", - "requires": { - "base64-js": "^1.3.0", - "fast-text-encoding": "^1.0.0", - "gaxios": "^1.2.1", - "gcp-metadata": "^1.0.0", - "gtoken": "^2.3.2", - "https-proxy-agent": "^2.2.1", - "jws": "^3.1.5", - "lru-cache": "^5.0.0", - "semver": "^5.5.0" - }, - "dependencies": { - "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" - } - } - }, - "google-p12-pem": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-1.0.4.tgz", - "integrity": "sha512-SwLAUJqUfTB2iS+wFfSS/G9p7bt4eWcc2LyfvmUXe7cWp6p3mpxDo6LLI29MXdU6wvPcQ/up298X7GMC5ylAlA==", - "requires": { - "node-forge": "^0.8.0", - "pify": "^4.0.0" - } - }, - "gtoken": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-2.3.3.tgz", - "integrity": "sha512-EaB49bu/TCoNeQjhCYKI/CurooBKkGxIqFHsWABW0b25fobBYVTMe84A8EBVVZhl8emiUdNypil9huMOTmyAnw==", - "requires": { - "gaxios": "^1.0.4", - "google-p12-pem": "^1.0.0", - "jws": "^3.1.5", - "mime": "^2.2.0", - "pify": "^4.0.0" - } - }, - "https-proxy-agent": { - "version": "2.2.4", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-2.2.4.tgz", - "integrity": "sha512-OmvfoQ53WLjtA9HeYP9RNrWMJzzAz1JGaSFr1nijg0PVR1JaD/xbJq1mdEIIlxGpXp9eSe/O2LgU9DJmTPd0Eg==", - "requires": { - "agent-base": "^4.3.0", - "debug": "^3.1.0" - } - }, - "jwa": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", - "integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==", - "requires": { - "buffer-equal-constant-time": "1.0.1", - "ecdsa-sig-formatter": "1.0.11", - "safe-buffer": "^5.0.1" - } - }, - "jws": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", - "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", - "requires": { - "jwa": "^1.4.1", - "safe-buffer": "^5.0.1" - } - }, - "mime": { - "version": "2.4.6", - "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.6.tgz", - "integrity": "sha512-RZKhC3EmpBchfTGBVb8fb+RL2cWyw/32lshnsETttkBAyAUXSGHxbEJWWRXc751DrIxG1q04b8QwMbAwkRPpUA==" - }, - "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" - }, - "node-forge": { - "version": "0.8.5", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.8.5.tgz", - "integrity": "sha512-vFMQIWt+J/7FLNyKouZ9TazT74PRV3wgv9UT4cRjC8BffxFbKXkgIWR42URCPSnHm/QDz6BOlb2Q0U4+VQT67Q==" - }, - "teeny-request": { - "version": "3.11.3", - "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-3.11.3.tgz", - "integrity": "sha512-CKncqSF7sH6p4rzCgkb/z/Pcos5efl0DmolzvlqRQUNcpRIruOhY9+T1FsIlyEbfWd7MsFpodROOwHYh2BaXzw==", - "requires": { - "https-proxy-agent": "^2.2.1", - "node-fetch": "^2.2.0", - "uuid": "^3.3.2" - } - }, - "uuid": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", - "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" - } - } - }, "@grpc/grpc-js": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.0.5.tgz", @@ -1114,6 +503,330 @@ } } }, + "@overleaf/metrics": { + "version": "3.4.1", + "resolved": "https://registry.npmjs.org/@overleaf/metrics/-/metrics-3.4.1.tgz", + "integrity": "sha512-OgjlzuC+2gPdIEDHhmd9LDMu01tk1ln0cJhw1727BZ+Wgf2Z1hjuHRt4JeCkf+PFTHwJutVYT8v6IGPpNEPtbg==", + "requires": { + "@google-cloud/debug-agent": "^5.1.2", + "@google-cloud/profiler": "^4.0.3", + "@google-cloud/trace-agent": "^5.1.1", + "compression": "^1.7.4", + "prom-client": "^11.1.3", + "underscore": "~1.6.0", + "yn": "^3.1.1" + }, + "dependencies": { + "@google-cloud/common": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-3.5.0.tgz", + "integrity": "sha512-10d7ZAvKhq47L271AqvHEd8KzJqGU45TY+rwM2Z3JHuB070FeTi7oJJd7elfrnKaEvaktw3hH2wKnRWxk/3oWQ==", + "requires": { + "@google-cloud/projectify": "^2.0.0", + "@google-cloud/promisify": "^2.0.0", + "arrify": "^2.0.1", + "duplexify": "^4.1.1", + "ent": "^2.2.0", + "extend": "^3.0.2", + "google-auth-library": "^6.1.1", + "retry-request": "^4.1.1", + "teeny-request": "^7.0.0" + } + }, + "@google-cloud/debug-agent": { + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/@google-cloud/debug-agent/-/debug-agent-5.1.3.tgz", + "integrity": "sha512-WbzeEz4MvPlM7DX2QBsPcWgF62u7LSQv/oMYPl0L+TddTebqjDKiVXwxpzWk61NIfcKiet3dyCbPIt3N5o8XPQ==", + "requires": { + "@google-cloud/common": "^3.0.0", + "acorn": "^8.0.0", + "coffeescript": "^2.0.0", + "console-log-level": "^1.4.0", + "extend": "^3.0.2", + "findit2": "^2.2.3", + "gcp-metadata": "^4.0.0", + "p-limit": "^3.0.1", + "semver": "^7.0.0", + "source-map": "^0.6.1", + "split": "^1.0.0" + } + }, + "@google-cloud/profiler": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@google-cloud/profiler/-/profiler-4.1.0.tgz", + "integrity": "sha512-9e1zXRctLSUHAoAsFGwE4rS28fr0siiG+jXl5OpwTK8ZAUlxb70aosHaZGdsv8YXrYKjuiufjRZ/OXCs0XLI9g==", + "requires": { + "@google-cloud/common": "^3.0.0", + "@types/console-log-level": "^1.4.0", + "@types/semver": "^7.0.0", + "console-log-level": "^1.4.0", + "delay": "^4.0.1", + "extend": "^3.0.2", + "gcp-metadata": "^4.0.0", + "parse-duration": "^0.4.4", + "pprof": "3.0.0", + "pretty-ms": "^7.0.0", + "protobufjs": "~6.10.0", + "semver": "^7.0.0", + "teeny-request": "^7.0.0" + } + }, + "@google-cloud/projectify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-2.0.1.tgz", + "integrity": "sha512-ZDG38U/Yy6Zr21LaR3BTiiLtpJl6RkPS/JwoRT453G+6Q1DhlV0waNf8Lfu+YVYGIIxgKnLayJRfYlFJfiI8iQ==" + }, + "@google-cloud/promisify": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-2.0.3.tgz", + "integrity": "sha512-d4VSA86eL/AFTe5xtyZX+ePUjE8dIFu2T8zmdeNBSa5/kNgXPCx/o/wbFNHAGLJdGnk1vddRuMESD9HbOC8irw==" + }, + "@google-cloud/trace-agent": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@google-cloud/trace-agent/-/trace-agent-5.1.1.tgz", + "integrity": "sha512-YTcK0RLN90pLCprg0XC8uV4oAVd79vsXhkcxmEVwiOOYjUDvSrAhb7y/0SY606zgfhJHmUTNb/fZSWEtZP/slQ==", + "requires": { + "@google-cloud/common": "^3.0.0", + "@opencensus/propagation-stackdriver": "0.0.22", + "builtin-modules": "^3.0.0", + "console-log-level": "^1.4.0", + "continuation-local-storage": "^3.2.1", + "extend": "^3.0.2", + "gcp-metadata": "^4.0.0", + "google-auth-library": "^6.0.0", + "hex2dec": "^1.0.1", + "is": "^3.2.0", + "methods": "^1.1.1", + "require-in-the-middle": "^5.0.0", + "semver": "^7.0.0", + "shimmer": "^1.2.0", + "source-map-support": "^0.5.16", + "uuid": "^8.0.0" + } + }, + "@opencensus/core": { + "version": "0.0.22", + "resolved": "https://registry.npmjs.org/@opencensus/core/-/core-0.0.22.tgz", + "integrity": "sha512-ErazJtivjceNoOZI1bG9giQ6cWS45J4i6iPUtlp7dLNu58OLs/v+CD0FsaPCh47XgPxAI12vbBE8Ec09ViwHNA==", + "requires": { + "continuation-local-storage": "^3.2.1", + "log-driver": "^1.2.7", + "semver": "^7.0.0", + "shimmer": "^1.2.0", + "uuid": "^8.0.0" + } + }, + "@opencensus/propagation-stackdriver": { + "version": "0.0.22", + "resolved": "https://registry.npmjs.org/@opencensus/propagation-stackdriver/-/propagation-stackdriver-0.0.22.tgz", + "integrity": "sha512-eBvf/ihb1mN8Yz/ASkz8nHzuMKqygu77+VNnUeR0yEh3Nj+ykB8VVR6lK+NAFXo1Rd1cOsTmgvuXAZgDAGleQQ==", + "requires": { + "@opencensus/core": "^0.0.22", + "hex2dec": "^1.0.1", + "uuid": "^8.0.0" + } + }, + "@types/semver": { + "version": "7.3.4", + "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.3.4.tgz", + "integrity": "sha512-+nVsLKlcUCeMzD2ufHEYuJ9a2ovstb6Dp52A5VsoKxDXgvE051XgHI/33I1EymwkRGQkwnA0LkhnUzituGs4EQ==" + }, + "acorn": { + "version": "8.0.4", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.0.4.tgz", + "integrity": "sha512-XNP0PqF1XD19ZlLKvB7cMmnZswW4C/03pRHgirB30uSJTaS3A3V1/P4sS3HPvFmjoriPCJQs+JDSbm4bL1TxGQ==" + }, + "bignumber.js": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.1.tgz", + "integrity": "sha512-IdZR9mh6ahOBv/hYGiXyVuyCetmGJhtYkqLBpTStdhEGjegpPlUawydyaF3pbIOFynJTpllEs+NP+CS9jKFLjA==" + }, + "coffeescript": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/coffeescript/-/coffeescript-2.5.1.tgz", + "integrity": "sha512-J2jRPX0eeFh5VKyVnoLrfVFgLZtnnmp96WQSLAS8OrLm2wtQLcnikYKe1gViJKDH7vucjuhHvBKKBP3rKcD1tQ==" + }, + "debug": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", + "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", + "requires": { + "ms": "2.1.2" + } + }, + "duplexify": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", + "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", + "requires": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.0" + } + }, + "gaxios": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-4.0.1.tgz", + "integrity": "sha512-jOin8xRZ/UytQeBpSXFqIzqU7Fi5TqgPNLlUsSB8kjJ76+FiGBfImF8KJu++c6J4jOldfJUtt0YmkRj2ZpSHTQ==", + "requires": { + "abort-controller": "^3.0.0", + "extend": "^3.0.2", + "https-proxy-agent": "^5.0.0", + "is-stream": "^2.0.0", + "node-fetch": "^2.3.0" + } + }, + "gcp-metadata": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.2.1.tgz", + "integrity": "sha512-tSk+REe5iq/N+K+SK1XjZJUrFPuDqGZVzCy2vocIHIGmPlTGsa8owXMJwGkrXr73NO0AzhPW4MF2DEHz7P2AVw==", + "requires": { + "gaxios": "^4.0.0", + "json-bigint": "^1.0.0" + } + }, + "google-auth-library": { + "version": "6.1.3", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.1.3.tgz", + "integrity": "sha512-m9mwvY3GWbr7ZYEbl61isWmk+fvTmOt0YNUfPOUY2VH8K5pZlAIWJjxEi0PqR3OjMretyiQLI6GURMrPSwHQ2g==", + "requires": { + "arrify": "^2.0.0", + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "fast-text-encoding": "^1.0.0", + "gaxios": "^4.0.0", + "gcp-metadata": "^4.2.0", + "gtoken": "^5.0.4", + "jws": "^4.0.0", + "lru-cache": "^6.0.0" + } + }, + "google-p12-pem": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.3.tgz", + "integrity": "sha512-wS0ek4ZtFx/ACKYF3JhyGe5kzH7pgiQ7J5otlumqR9psmWMYc+U9cErKlCYVYHoUaidXHdZ2xbo34kB+S+24hA==", + "requires": { + "node-forge": "^0.10.0" + } + }, + "gtoken": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.1.0.tgz", + "integrity": "sha512-4d8N6Lk8TEAHl9vVoRVMh9BNOKWVgl2DdNtr3428O75r3QFrF/a5MMu851VmK0AA8+iSvbwRv69k5XnMLURGhg==", + "requires": { + "gaxios": "^4.0.0", + "google-p12-pem": "^3.0.3", + "jws": "^4.0.0", + "mime": "^2.2.0" + } + }, + "json-bigint": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", + "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", + "requires": { + "bignumber.js": "^9.0.0" + } + }, + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "requires": { + "yallist": "^4.0.0" + } + }, + "mime": { + "version": "2.4.6", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.6.tgz", + "integrity": "sha512-RZKhC3EmpBchfTGBVb8fb+RL2cWyw/32lshnsETttkBAyAUXSGHxbEJWWRXc751DrIxG1q04b8QwMbAwkRPpUA==" + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node-forge": { + "version": "0.10.0", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.10.0.tgz", + "integrity": "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==" + }, + "p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "requires": { + "yocto-queue": "^0.1.0" + } + }, + "parse-duration": { + "version": "0.4.4", + "resolved": "https://registry.npmjs.org/parse-duration/-/parse-duration-0.4.4.tgz", + "integrity": "sha512-KbAJuYGUhZkB9gotDiKLnZ7Z3VTacK3fgwmDdB6ZVDtJbMBT6MfLga0WJaYpPDu0mzqT0NgHtHDt5PY4l0nidg==" + }, + "pretty-ms": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/pretty-ms/-/pretty-ms-7.0.1.tgz", + "integrity": "sha512-973driJZvxiGOQ5ONsFhOF/DtzPMOMtgC11kCpUrPGMTgqp2q/1gwzCquocrN33is0VZ5GFHXZYMM9l6h67v2Q==", + "requires": { + "parse-ms": "^2.1.0" + } + }, + "readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + }, + "require-in-the-middle": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/require-in-the-middle/-/require-in-the-middle-5.0.3.tgz", + "integrity": "sha512-p/ICV8uMlqC4tjOYabLMxAWCIKa0YUQgZZ6KDM0xgXJNgdGQ1WmL2A07TwmrZw+wi6ITUFKzH5v3n+ENEyXVkA==", + "requires": { + "debug": "^4.1.1", + "module-details-from-path": "^1.0.3", + "resolve": "^1.12.0" + } + }, + "semver": { + "version": "7.3.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.2.tgz", + "integrity": "sha512-OrOb32TeeambH6UrhtShmF7CRDqhL6/5XpPNp2DuRH6+9QLw/orhp72j87v8Qa1ScDkvrrBNpZcDejAirJmfXQ==" + }, + "teeny-request": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-7.0.1.tgz", + "integrity": "sha512-sasJmQ37klOlplL4Ia/786M5YlOcoLGQyq2TE4WHSRupbAuDaQW0PfVxV4MtdBtRJ4ngzS+1qim8zP6Zp35qCw==", + "requires": { + "http-proxy-agent": "^4.0.0", + "https-proxy-agent": "^5.0.0", + "node-fetch": "^2.6.1", + "stream-events": "^1.0.5", + "uuid": "^8.0.0" + } + }, + "uuid": { + "version": "8.3.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.1.tgz", + "integrity": "sha512-FOmRr+FmWEIG8uhZv6C2bTgEVXsHk08kE7mPlrBbEe+c3r9pjceVPgupIfNIhc4yx55H69OXANrUaSuu9eInKg==" + }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + }, + "yn": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", + "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==" + } + } + }, "@overleaf/o-error": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/@overleaf/o-error/-/o-error-3.0.0.tgz", @@ -1188,11 +901,6 @@ "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz", "integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==" }, - "@sindresorhus/is": { - "version": "0.15.0", - "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-0.15.0.tgz", - "integrity": "sha512-lu8BpxjAtRCAo5ifytTpCPCj99LF7o/2Myn+NXyNCBqvPYn7Pjd76AMmUB5l7XF1U6t0hcWrlEM5ESufW7wAeA==" - }, "@sinonjs/commons": { "version": "1.8.0", "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-1.8.0.tgz", @@ -1243,24 +951,11 @@ "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.0.0.tgz", "integrity": "sha512-KYyTT/T6ALPkIRd2Ge080X/BsXvy9O0hcWTtMWkPvwAwF99+vn6Dv4GzrFT/Nn1LePr+FFDbRXXlqmsy9lw2zA==" }, - "@types/caseless": { - "version": "0.12.2", - "resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.2.tgz", - "integrity": "sha512-6ckxMjBBD8URvjB6J3NcnuAn5Pkl7t3TizAg+xdlzzQGSPSmBcXf8KoIH0ua/i+tio+ZRUHEXp0HEmvaR4kt0w==" - }, "@types/console-log-level": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/@types/console-log-level/-/console-log-level-1.4.0.tgz", "integrity": "sha512-x+OscEQwcx5Biair4enH7ov9W+clcqUWaZRaxn5IkT4yNWWjRr2oiYDkY/x1uXSTVZOQ2xlbFQySaQGB+VdXGQ==" }, - "@types/duplexify": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/@types/duplexify/-/duplexify-3.6.0.tgz", - "integrity": "sha512-5zOA53RUlzN74bvrSGwjudssD9F3a797sDZQkiYpUOxW+WHaXTCPz4/d5Dgi6FKnOqZ2CpaTo0DhgIfsXAOE/A==", - "requires": { - "@types/node": "*" - } - }, "@types/eslint-visitor-keys": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/@types/eslint-visitor-keys/-/eslint-visitor-keys-1.0.0.tgz", @@ -1297,39 +992,6 @@ "resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.13.tgz", "integrity": "sha512-UfvBE9oRCAJVzfR+3eWm/sdLFe/qroAPEXP3GPJ1SehQiEVgZT6NQZWYbPMiJ3UdcKM06v4j+S1lTcdWCmw+3g==" }, - "@types/request": { - "version": "2.48.5", - "resolved": "https://registry.npmjs.org/@types/request/-/request-2.48.5.tgz", - "integrity": "sha512-/LO7xRVnL3DxJ1WkPGDQrp4VTV1reX9RkC85mJ+Qzykj2Bdw+mG15aAfDahc76HtknjzE16SX/Yddn6MxVbmGQ==", - "requires": { - "@types/caseless": "*", - "@types/node": "*", - "@types/tough-cookie": "*", - "form-data": "^2.5.0" - }, - "dependencies": { - "form-data": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz", - "integrity": "sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA==", - "requires": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.6", - "mime-types": "^2.1.12" - } - } - } - }, - "@types/semver": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/@types/semver/-/semver-5.5.0.tgz", - "integrity": "sha512-41qEJgBH/TWgo5NFSvBCJ1qkoi3Q6ONSF2avrHq1LVEZfYpdHmj0y9SuTK+u9ZhG1sYQKBL1AWXKyLWP4RaUoQ==" - }, - "@types/tough-cookie": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.0.tgz", - "integrity": "sha512-I99sngh224D0M7XgW1s120zxCt3VYQ3IQsuw3P3jbq5GG4yc79+ZjyKznyOGIQrflfylLgcfekeZW/vk0yng6A==" - }, "@typescript-eslint/experimental-utils": { "version": "1.13.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/experimental-utils/-/experimental-utils-1.13.0.tgz", @@ -1383,6 +1045,11 @@ } } }, + "abbrev": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", + "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==" + }, "abort-controller": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", @@ -1403,7 +1070,8 @@ "acorn": { "version": "6.4.1", "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.4.1.tgz", - "integrity": "sha512-ZVA9k326Nwrj3Cj9jlh3wGFutC2ZornPNARZwsNYqQYgN0EsV2d53w5RN/co65Ohn4sUAUtb1rSUAOD6XN9idA==" + "integrity": "sha512-ZVA9k326Nwrj3Cj9jlh3wGFutC2ZornPNARZwsNYqQYgN0EsV2d53w5RN/co65Ohn4sUAUtb1rSUAOD6XN9idA==", + "dev": true }, "acorn-jsx": { "version": "5.2.0", @@ -1493,6 +1161,20 @@ "picomatch": "^2.0.4" } }, + "aproba": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", + "integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==" + }, + "are-we-there-yet": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-1.1.5.tgz", + "integrity": "sha512-5hYdAkZlcG8tOLujVDTgCT+uPX0VnpAH28gWsLfzpXYm7wP6mp5Q/gYyR7YQ0cKVJcXJnl3j2kpBan13PtQf6w==", + "requires": { + "delegates": "^1.0.0", + "readable-stream": "^2.0.6" + } + }, "argparse": { "version": "1.0.10", "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", @@ -1612,15 +1294,6 @@ "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.9.1.tgz", "integrity": "sha512-wMHVg2EOHaMRxbzgFJ9gtjOOCrI80OHLG14rxi28XwOW8ux6IiEbRCGGGqCtdAIg4FQCbW20k9RsT4y3gJlFug==" }, - "axios": { - "version": "0.18.1", - "resolved": "https://registry.npmjs.org/axios/-/axios-0.18.1.tgz", - "integrity": "sha512-0BfJq4NSfQXd+SkFdrvFbG7addhYSBA2mQwISr46pD6E5iqkWg02RAs8vyTT/j0RTnoYmeXauBuSv1qKwR179g==", - "requires": { - "follow-redirects": "1.5.10", - "is-buffer": "^2.0.2" - } - }, "babel-eslint": { "version": "10.1.0", "resolved": "https://registry.npmjs.org/babel-eslint/-/babel-eslint-10.1.0.tgz", @@ -1675,7 +1348,7 @@ "bintrees": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/bintrees/-/bintrees-1.0.1.tgz", - "integrity": "sha512-tbaUB1QpTIj4cKY8c1rvNAvEQXA+ekzHmbe4jzNfW3QWsF9GnnP/BRWyl6/qqS53heoYJ93naaFcm/jooONH8g==" + "integrity": "sha1-DmVcm5wkNeqraL9AJyJtK1WjRSQ=" }, "bl": { "version": "2.2.1", @@ -1876,6 +1549,11 @@ "readdirp": "~3.2.0" } }, + "chownr": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz", + "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==" + }, "cli-cursor": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", @@ -1927,6 +1605,11 @@ } } }, + "code-point-at": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", + "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=" + }, "coffee-script": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz", @@ -1969,6 +1652,27 @@ "mime-db": ">= 1.43.0 < 2" } }, + "compression": { + "version": "1.7.4", + "resolved": "https://registry.npmjs.org/compression/-/compression-1.7.4.tgz", + "integrity": "sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ==", + "requires": { + "accepts": "~1.3.5", + "bytes": "3.0.0", + "compressible": "~2.0.16", + "debug": "2.6.9", + "on-headers": "~1.0.2", + "safe-buffer": "5.1.2", + "vary": "~1.1.2" + }, + "dependencies": { + "bytes": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz", + "integrity": "sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg=" + } + } + }, "concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", @@ -2010,6 +1714,11 @@ "xdg-basedir": "^4.0.0" } }, + "console-control-strings": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", + "integrity": "sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4=" + }, "console-log-level": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/console-log-level/-/console-log-level-1.4.1.tgz", @@ -2136,6 +1845,11 @@ "type-detect": "^4.0.0" } }, + "deep-extend": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", + "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==" + }, "deep-is": { "version": "0.1.3", "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.3.tgz", @@ -2161,6 +1875,11 @@ "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==" }, + "delegates": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", + "integrity": "sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o=" + }, "denque": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/denque/-/denque-1.4.1.tgz", @@ -2177,6 +1896,11 @@ "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.0.4.tgz", "integrity": "sha512-3NdhDuEXnfun/z7x9GOElY49LoqVHoGScmOKwmxhsS8N5Y+Z8KyPPDnaSzqWgYt/ji4mqwfTS34Htrk0zPIXVg==" }, + "detect-libc": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz", + "integrity": "sha1-+hN8S9aY7fVc1c0CrFWfkaTEups=" + }, "diff": { "version": "3.5.0", "resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz", @@ -2325,19 +2049,6 @@ "is-symbol": "^1.0.2" } }, - "es6-promise": { - "version": "4.2.8", - "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.8.tgz", - "integrity": "sha512-HJDGx5daxeIvxdBxvG2cb9g4tEvwIk3i8+nhX0yGrYmZUzbkdg8QbDevheDB8gd0//uPj4c1EQua8Q+MViT0/w==" - }, - "es6-promisify": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/es6-promisify/-/es6-promisify-5.0.0.tgz", - "integrity": "sha512-C+d6UdsYDk0lMebHNR4S2NybQMMngAOnOwYBQjTOiv0MkoJMP0Myw2mgpDLBcpfCmRLxyFqYhS/CfOENq4SJhQ==", - "requires": { - "es6-promise": "^4.0.3" - } - }, "escape-html": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", @@ -2877,7 +2588,7 @@ "findit2": { "version": "2.2.3", "resolved": "https://registry.npmjs.org/findit2/-/findit2-2.2.3.tgz", - "integrity": "sha512-lg/Moejf4qXovVutL0Lz4IsaPoNYMuxt4PA0nGqFxnJ1CTTGGlEO2wKgoDpwknhvZ8k4Q2F+eesgkLbG2Mxfog==" + "integrity": "sha1-WKRmaX34piBc39vzlVNri9d3pfY=" }, "flat": { "version": "4.1.0", @@ -2916,24 +2627,6 @@ "integrity": "sha512-a1hQMktqW9Nmqr5aktAux3JMNqaucxGcjtjWnZLHX7yyPCmlSV3M54nGYbqT8K+0GhF3NBgmJCc3ma+WOgX8Jg==", "dev": true }, - "follow-redirects": { - "version": "1.5.10", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.5.10.tgz", - "integrity": "sha512-0V5l4Cizzvqt5D44aTXbFZz+FtyXV1vrDN6qrelxtfYQKW0KO0W2T/hkE8xvGa/540LkZlkaUjO4ailYTFtHVQ==", - "requires": { - "debug": "=3.1.0" - }, - "dependencies": { - "debug": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz", - "integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==", - "requires": { - "ms": "2.0.0" - } - } - } - }, "forever-agent": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", @@ -2959,6 +2652,14 @@ "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==" }, + "fs-minipass": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-1.2.7.tgz", + "integrity": "sha512-GWSSJGFy4e9GUeCcbIkED+bgAoFyj7XF1mV8rma3QW4NIqX9Kyx79N/PF61H5udOV3aY1IaMLs6pGbH71nlCTA==", + "requires": { + "minipass": "^2.6.0" + } + }, "fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", @@ -2983,6 +2684,54 @@ "integrity": "sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g==", "dev": true }, + "gauge": { + "version": "2.7.4", + "resolved": "https://registry.npmjs.org/gauge/-/gauge-2.7.4.tgz", + "integrity": "sha1-LANAXHU4w51+s3sxcCLjJfsBi/c=", + "requires": { + "aproba": "^1.0.3", + "console-control-strings": "^1.0.0", + "has-unicode": "^2.0.0", + "object-assign": "^4.1.0", + "signal-exit": "^3.0.0", + "string-width": "^1.0.1", + "strip-ansi": "^3.0.1", + "wide-align": "^1.1.0" + }, + "dependencies": { + "ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=" + }, + "is-fullwidth-code-point": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", + "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", + "requires": { + "number-is-nan": "^1.0.0" + } + }, + "string-width": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", + "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", + "requires": { + "code-point-at": "^1.0.0", + "is-fullwidth-code-point": "^1.0.0", + "strip-ansi": "^3.0.0" + } + }, + "strip-ansi": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "requires": { + "ansi-regex": "^2.0.0" + } + } + } + }, "gaxios": { "version": "2.3.4", "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-2.3.4.tgz", @@ -3379,6 +3128,11 @@ "integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==", "dev": true }, + "has-unicode": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", + "integrity": "sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk=" + }, "hash-stream-validation": { "version": "0.2.2", "resolved": "https://registry.npmjs.org/hash-stream-validation/-/hash-stream-validation-0.2.2.tgz", @@ -3505,6 +3259,14 @@ "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==", "dev": true }, + "ignore-walk": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-3.0.3.tgz", + "integrity": "sha512-m7o6xuOaT1aqheYHKf8W6J5pYH85ZI9w077erOzLje3JsB1gkafkAhHHY19dqjulgIZHFm32Cp5uNZgcQqdJKw==", + "requires": { + "minimatch": "^3.0.4" + } + }, "import-fresh": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.2.1.tgz", @@ -3540,6 +3302,11 @@ "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", "integrity": "sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw==" }, + "ini": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.5.tgz", + "integrity": "sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw==" + }, "inquirer": { "version": "7.0.4", "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-7.0.4.tgz", @@ -3589,7 +3356,8 @@ "is-buffer": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.4.tgz", - "integrity": "sha512-Kq1rokWXOPXWuaMAqZiJW4XxsmD9zGx9q4aePabbn3qCRGedtH7Cm+zV8WETitMfu1wdh+Rvd6w5egwSngUX2A==" + "integrity": "sha512-Kq1rokWXOPXWuaMAqZiJW4XxsmD9zGx9q4aePabbn3qCRGedtH7Cm+zV8WETitMfu1wdh+Rvd6w5egwSngUX2A==", + "dev": true }, "is-callable": { "version": "1.2.0", @@ -3885,11 +3653,6 @@ "resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz", "integrity": "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==" }, - "lodash.pickby": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/lodash.pickby/-/lodash.pickby-4.6.0.tgz", - "integrity": "sha512-AZV+GsS/6ckvPOVQPXSiFFacKvKB4kOQu6ynt9wz0F3LO4R9Ij4K1ddYsIytDpSgLz88JHd9P+oaLeej5/Sl7Q==" - }, "lodash.unescape": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/lodash.unescape/-/lodash.unescape-4.0.1.tgz", @@ -3994,15 +3757,6 @@ "yallist": "^3.0.2" } }, - "lynx": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/lynx/-/lynx-0.1.1.tgz", - "integrity": "sha512-JI52N0NwK2b/Md0TFPdPtUBI46kjyJXF7+q08l2yvQ56q6QA8s7ZjZQQRoxFpS2jDXNf/B0p8ID+OIKcTsZwzw==", - "requires": { - "mersenne": "~0.0.3", - "statsd-parser": "~0.0.4" - } - }, "make-dir": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.0.2.tgz", @@ -4068,11 +3822,6 @@ "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", "integrity": "sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==" }, - "mersenne": { - "version": "0.0.4", - "resolved": "https://registry.npmjs.org/mersenne/-/mersenne-0.0.4.tgz", - "integrity": "sha512-XoSUL+nF8hMTKGQxUs8r3Btdsf1yuKKBdCCGbh3YXgCXuVKishpZv1CNc385w9s8t4Ynwc5h61BwW/FCVulkbg==" - }, "messageformat": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/messageformat/-/messageformat-2.3.0.tgz", @@ -4101,28 +3850,6 @@ "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==" }, - "metrics-sharelatex": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/metrics-sharelatex/-/metrics-sharelatex-2.7.0.tgz", - "integrity": "sha512-fv7TsM2pVhY/VuHqP5p9Ip2BhHfGPrP0e2TuZGCMK9/J83ZuGCrxkpaU3OOz31CWcmcQ5am4Du70EZAaIdA/2A==", - "requires": { - "@google-cloud/debug-agent": "^3.0.0", - "@google-cloud/profiler": "^0.2.3", - "@google-cloud/trace-agent": "^3.2.0", - "coffee-script": "1.6.0", - "lynx": "~0.1.1", - "prom-client": "^11.1.3", - "underscore": "~1.6.0", - "yn": "^3.1.1" - }, - "dependencies": { - "yn": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", - "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==" - } - } - }, "mime": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", @@ -4159,6 +3886,23 @@ "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==" }, + "minipass": { + "version": "2.9.0", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-2.9.0.tgz", + "integrity": "sha512-wxfUjg9WebH+CUDX/CdbRlh5SmfZiy/hpkxaRI16Y9W56Pa75sWgd/rvFilSgrauD9NyFymP/+JFV3KwzIsJeg==", + "requires": { + "safe-buffer": "^5.1.2", + "yallist": "^3.0.0" + } + }, + "minizlib": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-1.3.3.tgz", + "integrity": "sha512-6ZYMOEnmVsdCeTJVE0W9ZD+pVnE8h9Hma/iOwwRDsdQoePpoX56/8B6z3P9VNwppJuBKNRuFDRNRqRWexT9G9Q==", + "requires": { + "minipass": "^2.9.0" + } + }, "mkdirp": { "version": "0.5.5", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", @@ -4317,7 +4061,7 @@ "module-details-from-path": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/module-details-from-path/-/module-details-from-path-1.0.3.tgz", - "integrity": "sha512-ySViT69/76t8VhE1xXHK6Ch4NcDd26gx0MzKXLO+F7NOtnqH68d9zF94nT8ZWSxXh8ELOERsnJO/sWt1xZYw5A==" + "integrity": "sha1-EUyUlnPiqKNenTV4hSeqN7Z52is=" }, "moment": { "version": "2.24.0", @@ -4402,6 +4146,36 @@ "integrity": "sha512-zIdGUrPRFTUELUvr3Gmc7KZ2Sw/h1PiVM0Af/oHB6zgnV1ikqSfRk+TOufi79aHYCW3NiOXmr1BP5nWbzojLaA==", "optional": true }, + "needle": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/needle/-/needle-2.5.2.tgz", + "integrity": "sha512-LbRIwS9BfkPvNwNHlsA41Q29kL2L/6VaOJ0qisM5lLWsTV3nP15abO5ITL6L81zqFhzjRKDAYjpcBcwM0AVvLQ==", + "requires": { + "debug": "^3.2.6", + "iconv-lite": "^0.4.4", + "sax": "^1.2.4" + }, + "dependencies": { + "debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "requires": { + "ms": "^2.1.1" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "sax": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz", + "integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==" + } + } + }, "negotiator": { "version": "0.6.2", "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.2.tgz", @@ -4477,11 +4251,44 @@ "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.9.1.tgz", "integrity": "sha512-G6RlQt5Sb4GMBzXvhfkeFmbqR6MzhtnT7VTHuLadjkii3rdYHNdw0m8zA4BTxVIh68FicCQ2NSUANpsqkr9jvQ==" }, + "node-pre-gyp": { + "version": "0.16.0", + "resolved": "https://registry.npmjs.org/node-pre-gyp/-/node-pre-gyp-0.16.0.tgz", + "integrity": "sha512-4efGA+X/YXAHLi1hN8KaPrILULaUn2nWecFrn1k2I+99HpoyvcOGEbtcOxpDiUwPF2ZANMJDh32qwOUPenuR1g==", + "requires": { + "detect-libc": "^1.0.2", + "mkdirp": "^0.5.3", + "needle": "^2.5.0", + "nopt": "^4.0.1", + "npm-packlist": "^1.1.6", + "npmlog": "^4.0.2", + "rc": "^1.2.7", + "rimraf": "^2.6.1", + "semver": "^5.3.0", + "tar": "^4.4.2" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" + } + } + }, "node-uuid": { "version": "1.4.8", "resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.8.tgz", "integrity": "sha512-TkCET/3rr9mUuRp+CpO7qfgT++aAxfDRaalQhwPFzI9BY/2rCDn6OfpZOVggi1AXfTPpfkTrg5f5WQx5G1uLxA==" }, + "nopt": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/nopt/-/nopt-4.0.3.tgz", + "integrity": "sha512-CvaGwVMztSMJLOeXPrez7fyfObdZqNUK1cPAEzLHrTybIua9pMdmmPR5YwtfNftIOMv3DPUhFaxsZMNTQO20Kg==", + "requires": { + "abbrev": "1", + "osenv": "^0.1.4" + } + }, "normalize-package-data": { "version": "2.5.0", "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", @@ -4508,11 +4315,55 @@ "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", "dev": true }, + "npm-bundled": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-1.1.1.tgz", + "integrity": "sha512-gqkfgGePhTpAEgUsGEgcq1rqPXA+tv/aVBlgEzfXwA1yiUJF7xtEt3CtVwOjNYQOVknDk0F20w58Fnm3EtG0fA==", + "requires": { + "npm-normalize-package-bin": "^1.0.1" + } + }, + "npm-normalize-package-bin": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-1.0.1.tgz", + "integrity": "sha512-EPfafl6JL5/rU+ot6P3gRSCpPDW5VmIzX959Ob1+ySFUuuYHWHekXpwdUZcKP5C+DS4GEtdJluwBjnsNDl+fSA==" + }, + "npm-packlist": { + "version": "1.4.8", + "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-1.4.8.tgz", + "integrity": "sha512-5+AZgwru5IevF5ZdnFglB5wNlHG1AOOuw28WhUq8/8emhBmLv6jX5by4WJCh7lW0uSYZYS6DXqIsyZVIXRZU9A==", + "requires": { + "ignore-walk": "^3.0.1", + "npm-bundled": "^1.0.1", + "npm-normalize-package-bin": "^1.0.1" + } + }, + "npmlog": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-4.1.2.tgz", + "integrity": "sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg==", + "requires": { + "are-we-there-yet": "~1.1.2", + "console-control-strings": "~1.1.0", + "gauge": "~2.7.3", + "set-blocking": "~2.0.0" + } + }, + "number-is-nan": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", + "integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=" + }, "oauth-sign": { "version": "0.9.0", "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==" }, + "object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=" + }, "object-inspect": { "version": "1.8.0", "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.8.0.tgz", @@ -4567,6 +4418,11 @@ "ee-first": "1.1.1" } }, + "on-headers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz", + "integrity": "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==" + }, "once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", @@ -4597,16 +4453,30 @@ "word-wrap": "~1.2.3" } }, + "os-homedir": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz", + "integrity": "sha1-/7xJiDNuDoM94MFox+8VISGqf7M=" + }, "os-tmpdir": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", - "integrity": "sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g==", - "dev": true + "integrity": "sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g==" + }, + "osenv": { + "version": "0.1.5", + "resolved": "https://registry.npmjs.org/osenv/-/osenv-0.1.5.tgz", + "integrity": "sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g==", + "requires": { + "os-homedir": "^1.0.0", + "os-tmpdir": "^1.0.0" + } }, "p-limit": { "version": "2.2.2", "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.2.2.tgz", "integrity": "sha512-WGR+xHecKTr7EbUEhyLSh5Dube9JtdiG78ufaeLxTgpudf/20KqyMioIUZJAezlTIi6evxuoUs9YXc11cU+yzQ==", + "dev": true, "requires": { "p-try": "^2.0.0" } @@ -4651,11 +4521,6 @@ "callsites": "^3.0.0" } }, - "parse-duration": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/parse-duration/-/parse-duration-0.1.3.tgz", - "integrity": "sha512-hMOZHfUmjxO5hMKn7Eft+ckP2M4nV4yzauLXiw3PndpkASnx5r8pDAMcOAiqxoemqWjMWmz4fOHQM6n6WwETXw==" - }, "parse-json": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", @@ -4742,11 +4607,6 @@ "integrity": "sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==", "dev": true }, - "pify": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", - "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==" - }, "pkg-dir": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-2.0.0.tgz", @@ -4756,6 +4616,43 @@ "find-up": "^2.1.0" } }, + "pprof": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pprof/-/pprof-3.0.0.tgz", + "integrity": "sha512-uPWbAhoH/zvq1kM3/Fd/wshb4D7sLlGap8t6uCTER4aZRWqqyPYgXzpjWbT0Unn5U25pEy2VREUu27nQ9o9VPA==", + "requires": { + "bindings": "^1.2.1", + "delay": "^4.0.1", + "findit2": "^2.2.3", + "nan": "^2.14.0", + "node-pre-gyp": "^0.16.0", + "p-limit": "^3.0.0", + "pify": "^5.0.0", + "protobufjs": "~6.10.0", + "source-map": "^0.7.3", + "split": "^1.0.1" + }, + "dependencies": { + "p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "requires": { + "yocto-queue": "^0.1.0" + } + }, + "pify": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-5.0.0.tgz", + "integrity": "sha512-eW/gHNMlxdSP6dmG6uJip6FXN0EQBwm2clYYd8Wul42Cwu/DK8HEftzsapcNdYe2MfLiIwZqsDk2RDEsTE79hA==" + }, + "source-map": { + "version": "0.7.3", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz", + "integrity": "sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ==" + } + } + }, "prelude-ls": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", @@ -5337,14 +5234,6 @@ } } }, - "pretty-ms": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/pretty-ms/-/pretty-ms-4.0.0.tgz", - "integrity": "sha512-qG66ahoLCwpLXD09ZPHSCbUWYTqdosB7SMP4OffgTgL2PBKXMuUsrk5Bwg8q4qPkjTXsKBMr+YK3Ltd/6F9s/Q==", - "requires": { - "parse-ms": "^2.0.0" - } - }, "process-nextick-args": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", @@ -5512,6 +5401,24 @@ "unpipe": "1.0.0" } }, + "rc": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz", + "integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==", + "requires": { + "deep-extend": "^0.6.0", + "ini": "~1.3.0", + "minimist": "^1.2.0", + "strip-json-comments": "~2.0.1" + }, + "dependencies": { + "strip-json-comments": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", + "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=" + } + } + }, "read-pkg": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-2.0.0.tgz", @@ -5639,31 +5546,6 @@ "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", "dev": true }, - "require-in-the-middle": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/require-in-the-middle/-/require-in-the-middle-4.0.1.tgz", - "integrity": "sha512-EfkM2zANyGkrfIExsECMeNn/uzjvHrE9h36yLXSavmrDiH4tgDNvltAmEKnt4PNLbqKPHZz+uszW2wTKrLUX0w==", - "requires": { - "debug": "^4.1.1", - "module-details-from-path": "^1.0.3", - "resolve": "^1.12.0" - }, - "dependencies": { - "debug": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", - "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", - "requires": { - "ms": "^2.1.1" - } - }, - "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" - } - } - }, "require-like": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz", @@ -5730,11 +5612,6 @@ "signal-exit": "^3.0.2" } }, - "retry-axios": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/retry-axios/-/retry-axios-0.3.2.tgz", - "integrity": "sha512-jp4YlI0qyDFfXiXGhkCOliBN1G7fRH03Nqy8YdShzGqbY5/9S2x/IR6C88ls2DFkbWuL3ASkP7QD3pVrNpPgwQ==" - }, "retry-request": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-4.1.1.tgz", @@ -5759,6 +5636,14 @@ } } }, + "rimraf": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", + "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "requires": { + "glob": "^7.1.3" + } + }, "run-async": { "version": "2.4.0", "resolved": "https://registry.npmjs.org/run-async/-/run-async-2.4.0.tgz", @@ -5864,8 +5749,7 @@ "set-blocking": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", - "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==", - "dev": true + "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==" }, "setprototypeof": { "version": "1.1.1", @@ -5987,6 +5871,15 @@ "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" }, + "source-map-support": { + "version": "0.5.19", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.19.tgz", + "integrity": "sha512-Wonm7zOCIJzBGQdB+thsPar0kYuCIzYvxZwlBa87yi/Mdjv7Tip2cyVbLj5o0cFPN4EVkuTwb3GDDyUx2DGnGw==", + "requires": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + }, "sparse-bitfield": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/sparse-bitfield/-/sparse-bitfield-3.0.3.tgz", @@ -6075,11 +5968,6 @@ "integrity": "sha512-vjUc6sfgtgY0dxCdnc40mK6Oftjo9+2K8H/NG81TMhgL392FtiPA9tn9RLyTxXmTLPJPjF3VyzFp6bsWFLisMQ==", "dev": true }, - "statsd-parser": { - "version": "0.0.4", - "resolved": "https://registry.npmjs.org/statsd-parser/-/statsd-parser-0.0.4.tgz", - "integrity": "sha512-7XO+ur89EalMXXFQaydsczB8sclr5nDsNIoUu0IzJx1pIbHUhO3LtpSzBwetIuU9DyTLMiVaJBMtWS/Nb2KR4g==" - }, "statuses": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", @@ -6244,10 +6132,24 @@ } } }, + "tar": { + "version": "4.4.13", + "resolved": "https://registry.npmjs.org/tar/-/tar-4.4.13.tgz", + "integrity": "sha512-w2VwSrBoHa5BsSyH+KxEqeQBAllHhccyMFVHtGtdMpF4W7IRWfZjFiQceJPChOeTsSDVUpER2T8FA93pr0L+QA==", + "requires": { + "chownr": "^1.1.1", + "fs-minipass": "^1.2.5", + "minipass": "^2.8.6", + "minizlib": "^1.2.1", + "mkdirp": "^0.5.0", + "safe-buffer": "^5.1.2", + "yallist": "^3.0.3" + } + }, "tdigest": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/tdigest/-/tdigest-0.1.1.tgz", - "integrity": "sha512-CXcDY/NIgIbKZPx5H4JJNpq6JwJhU5Z4+yWj4ZghDc7/9nVajiRlPPyMXRePPPlBfcayUqtoCXjo7/Hm82ecUA==", + "integrity": "sha1-Ljyyw56kSeVdHmzZEReszKRYgCE=", "requires": { "bintrees": "1.0.1" } @@ -6458,7 +6360,7 @@ "underscore": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz", - "integrity": "sha512-z4o1fvKUojIWh9XuaVLUDdf86RQiq13AC1dmHbTpoyuu+bquHms76v16CjycCbec87J7z0k//SiQVk0sMdFmpQ==" + "integrity": "sha1-izixDKze9jM3uLJOT/htRa6lKag=" }, "unique-string": { "version": "2.0.0", @@ -6641,7 +6543,6 @@ "version": "1.1.3", "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.3.tgz", "integrity": "sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA==", - "dev": true, "requires": { "string-width": "^1.0.2 || 2" }, @@ -6649,20 +6550,17 @@ "ansi-regex": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", - "integrity": "sha512-wFUFA5bg5dviipbQQ32yOQhl6gcJaJXiHE7dvR8VYPG97+J/GNC5FKGepKdEDUFeXRzDxPF1X/Btc8L+v7oqIQ==", - "dev": true + "integrity": "sha512-wFUFA5bg5dviipbQQ32yOQhl6gcJaJXiHE7dvR8VYPG97+J/GNC5FKGepKdEDUFeXRzDxPF1X/Btc8L+v7oqIQ==" }, "is-fullwidth-code-point": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w==", - "dev": true + "integrity": "sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w==" }, "string-width": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", - "dev": true, "requires": { "is-fullwidth-code-point": "^2.0.0", "strip-ansi": "^4.0.0" @@ -6672,7 +6570,6 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", "integrity": "sha512-4XaJ2zQdCzROZDivEVIDPkcQn8LMFSa8kj8Gxb/Lnwzv9A8VctNZ+lfivC/sV3ivW8ElJTERXZoPBRrZKkNKow==", - "dev": true, "requires": { "ansi-regex": "^3.0.0" } @@ -6882,6 +6779,11 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/yn/-/yn-4.0.0.tgz", "integrity": "sha512-huWiiCS4TxKc4SfgmTwW1K7JmXPPAmuXWYy4j9qjQo4+27Kni8mGhAAi1cloRWmBe2EqcLgt3IGqQoRL/MtPgg==" + }, + "yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==" } } } diff --git a/services/filestore/package.json b/services/filestore/package.json index 17915dbde5..a4492ca3e6 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -20,6 +20,7 @@ "test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js" }, "dependencies": { + "@overleaf/metrics": "^3.4.1", "@overleaf/o-error": "^3.0.0", "@overleaf/object-persistor": "git+https://github.com/overleaf/object-persistor.git", "body-parser": "^1.19.0", @@ -28,7 +29,6 @@ "glob": "^7.1.6", "lodash.once": "^4.1.1", "logger-sharelatex": "^2.2.0", - "metrics-sharelatex": "^2.7.0", "node-uuid": "~1.4.8", "range-parser": "^1.2.1", "request": "^2.88.2", diff --git a/services/filestore/test/unit/js/FileControllerTests.js b/services/filestore/test/unit/js/FileControllerTests.js index 4ae65028c7..6a5ae729c8 100644 --- a/services/filestore/test/unit/js/FileControllerTests.js +++ b/services/filestore/test/unit/js/FileControllerTests.js @@ -59,7 +59,7 @@ describe('FileController', function () { './Errors': Errors, stream: stream, 'settings-sharelatex': settings, - 'metrics-sharelatex': { + '@overleaf/metrics': { inc() {} } }, diff --git a/services/filestore/test/unit/js/FileConverterTests.js b/services/filestore/test/unit/js/FileConverterTests.js index 6d97783f0c..b6f59348d5 100644 --- a/services/filestore/test/unit/js/FileConverterTests.js +++ b/services/filestore/test/unit/js/FileConverterTests.js @@ -28,7 +28,7 @@ describe('FileConverter', function () { FileConverter = SandboxedModule.require(modulePath, { requires: { './SafeExec': SafeExec, - 'metrics-sharelatex': { + '@overleaf/metrics': { inc: sinon.stub(), Timer: sinon.stub().returns({ done: sinon.stub() }) }, diff --git a/services/filestore/test/unit/js/ImageOptimiserTests.js b/services/filestore/test/unit/js/ImageOptimiserTests.js index 7d9fcfd5c5..3a0007342f 100644 --- a/services/filestore/test/unit/js/ImageOptimiserTests.js +++ b/services/filestore/test/unit/js/ImageOptimiserTests.js @@ -20,7 +20,7 @@ describe('ImageOptimiser', function () { requires: { './SafeExec': SafeExec, 'logger-sharelatex': logger, - 'metrics-sharelatex': { + '@overleaf/metrics': { Timer: sinon.stub().returns({ done: sinon.stub() }) } } diff --git a/services/filestore/test/unit/js/LocalFileWriterTests.js b/services/filestore/test/unit/js/LocalFileWriterTests.js index 5041e79288..5fb9e56dca 100644 --- a/services/filestore/test/unit/js/LocalFileWriterTests.js +++ b/services/filestore/test/unit/js/LocalFileWriterTests.js @@ -30,7 +30,7 @@ describe('LocalFileWriter', function () { fs, stream, 'settings-sharelatex': settings, - 'metrics-sharelatex': { + '@overleaf/metrics': { inc: sinon.stub(), Timer: sinon.stub().returns({ done: sinon.stub() }) }, From c55c0d9d257c4af335c42aee0e5e60ca185a659d Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Wed, 2 Dec 2020 12:39:01 +0000 Subject: [PATCH 534/555] [misc] install @overleaf/object-persistor from tar-ball --- services/filestore/package-lock.json | 4 ++-- services/filestore/package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index eba7645b4a..d670c056f5 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -833,8 +833,8 @@ "integrity": "sha512-LsM2s6Iy9G97ktPo0ys4VxtI/m3ahc1ZHwjo5XnhXtjeIkkkVAehsrcRRoV/yWepPjymB0oZonhcfojpjYR/tg==" }, "@overleaf/object-persistor": { - "version": "git+https://github.com/overleaf/object-persistor.git#8fbc9ed03206bfb54368578d22b7ac4f285baa25", - "from": "git+https://github.com/overleaf/object-persistor.git", + "version": "https://github.com/overleaf/object-persistor/archive/8fbc9ed03206bfb54368578d22b7ac4f285baa25.tar.gz", + "integrity": "sha512-QApK1xZv4c2sco4lBwq5tn2w7ZNcnqQ/bVPbrxpiJCK5L5Gfvwcm0T2cAbbsuI/FSkSB93eU60iZqPtT87vp9w==", "requires": { "@google-cloud/storage": "^5.1.2", "@overleaf/o-error": "^3.0.0", diff --git a/services/filestore/package.json b/services/filestore/package.json index a4492ca3e6..3670b9d9c9 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -22,7 +22,7 @@ "dependencies": { "@overleaf/metrics": "^3.4.1", "@overleaf/o-error": "^3.0.0", - "@overleaf/object-persistor": "git+https://github.com/overleaf/object-persistor.git", + "@overleaf/object-persistor": "https://github.com/overleaf/object-persistor/archive/8fbc9ed03206bfb54368578d22b7ac4f285baa25.tar.gz", "body-parser": "^1.19.0", "express": "^4.17.1", "fast-crc32c": "^2.0.0", From a017ecab1a2595beeae08e03f74728fb49b0ef64 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 13 Dec 2020 07:35:07 +0000 Subject: [PATCH 535/555] Bump ini from 1.3.5 to 1.3.8 Bumps [ini](https://github.com/isaacs/ini) from 1.3.5 to 1.3.8. - [Release notes](https://github.com/isaacs/ini/releases) - [Commits](https://github.com/isaacs/ini/compare/v1.3.5...v1.3.8) Signed-off-by: dependabot[bot] --- services/filestore/package-lock.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index d670c056f5..3e9084e17b 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -3303,9 +3303,9 @@ "integrity": "sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw==" }, "ini": { - "version": "1.3.5", - "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.5.tgz", - "integrity": "sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw==" + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", + "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==" }, "inquirer": { "version": "7.0.4", From 8989735c69cd2f8be598eae36f7ff1275c84f669 Mon Sep 17 00:00:00 2001 From: Simon Detheridge Date: Mon, 4 Jan 2021 13:32:16 +0000 Subject: [PATCH 536/555] pin fake-gcs to 1.21.2 to prevent acceptance-test errors --- services/filestore/docker-compose.ci.yml | 2 +- services/filestore/docker-compose.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/services/filestore/docker-compose.ci.yml b/services/filestore/docker-compose.ci.yml index bb16114cf9..2a90f26046 100644 --- a/services/filestore/docker-compose.ci.yml +++ b/services/filestore/docker-compose.ci.yml @@ -67,7 +67,7 @@ services: interval: 1s retries: 20 gcs: - image: fsouza/fake-gcs-server + image: fsouza/fake-gcs-server:v1.21.2 command: ["--port=9090", "--scheme=http"] healthcheck: test: wget --quiet --output-document=/dev/null http://localhost:9090/storage/v1/b diff --git a/services/filestore/docker-compose.yml b/services/filestore/docker-compose.yml index 6ad3ceb2a8..1172975721 100644 --- a/services/filestore/docker-compose.yml +++ b/services/filestore/docker-compose.yml @@ -69,7 +69,7 @@ services: interval: 1s retries: 20 gcs: - image: fsouza/fake-gcs-server + image: fsouza/fake-gcs-server:v1.21.2 command: ["--port=9090", "--scheme=http"] healthcheck: test: wget --quiet --output-document=/dev/null http://localhost:9090/storage/v1/b From c9cb5198e1c7fd89a7cd4d6ff741006d66a7c9ed Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Tue, 5 Jan 2021 18:30:42 +0000 Subject: [PATCH 537/555] [misc] bump the node version to 12.20.1 --- services/filestore/.nvmrc | 2 +- services/filestore/Dockerfile | 2 +- services/filestore/buildscript.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/services/filestore/.nvmrc b/services/filestore/.nvmrc index ce122aaa98..ec101caa3f 100644 --- a/services/filestore/.nvmrc +++ b/services/filestore/.nvmrc @@ -1 +1 @@ -12.18.4 +12.20.1 diff --git a/services/filestore/Dockerfile b/services/filestore/Dockerfile index b6b31f2022..6df4c7de58 100644 --- a/services/filestore/Dockerfile +++ b/services/filestore/Dockerfile @@ -2,7 +2,7 @@ # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -FROM node:12.18.4 as base +FROM node:12.20.1 as base WORKDIR /app COPY install_deps.sh /app diff --git a/services/filestore/buildscript.txt b/services/filestore/buildscript.txt index 2e464749ec..d8ecd26cdd 100644 --- a/services/filestore/buildscript.txt +++ b/services/filestore/buildscript.txt @@ -4,6 +4,6 @@ filestore --docker-repos=gcr.io/overleaf-ops --env-add=ENABLE_CONVERSIONS="true",USE_PROM_METRICS="true",AWS_S3_USER_FILES_BUCKET_NAME=fake_user_files,AWS_S3_TEMPLATE_FILES_BUCKET_NAME=fake_template_files,AWS_S3_PUBLIC_FILES_BUCKET_NAME=fake_public_files,GCS_USER_FILES_BUCKET_NAME=fake_userfiles,GCS_TEMPLATE_FILES_BUCKET_NAME=fake_templatefiles,GCS_PUBLIC_FILES_BUCKET_NAME=fake_publicfiles --env-pass-through= ---node-version=12.18.4 +--node-version=12.20.1 --public-repo=True --script-version=3.4.0 From 5f084f860325a71d155636edb13d263d68c220d6 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Tue, 16 Feb 2021 15:10:11 +0000 Subject: [PATCH 538/555] [misc] bump the version of the metrics module to 3.5.1 --- services/filestore/package-lock.json | 991 ++++++++++++++++++--------- services/filestore/package.json | 2 +- 2 files changed, 666 insertions(+), 327 deletions(-) diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index d670c056f5..83f59175ff 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -169,6 +169,202 @@ } } }, + "@google-cloud/debug-agent": { + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/@google-cloud/debug-agent/-/debug-agent-5.1.3.tgz", + "integrity": "sha512-WbzeEz4MvPlM7DX2QBsPcWgF62u7LSQv/oMYPl0L+TddTebqjDKiVXwxpzWk61NIfcKiet3dyCbPIt3N5o8XPQ==", + "requires": { + "@google-cloud/common": "^3.0.0", + "acorn": "^8.0.0", + "coffeescript": "^2.0.0", + "console-log-level": "^1.4.0", + "extend": "^3.0.2", + "findit2": "^2.2.3", + "gcp-metadata": "^4.0.0", + "p-limit": "^3.0.1", + "semver": "^7.0.0", + "source-map": "^0.6.1", + "split": "^1.0.0" + }, + "dependencies": { + "@google-cloud/common": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-3.5.0.tgz", + "integrity": "sha512-10d7ZAvKhq47L271AqvHEd8KzJqGU45TY+rwM2Z3JHuB070FeTi7oJJd7elfrnKaEvaktw3hH2wKnRWxk/3oWQ==", + "requires": { + "@google-cloud/projectify": "^2.0.0", + "@google-cloud/promisify": "^2.0.0", + "arrify": "^2.0.1", + "duplexify": "^4.1.1", + "ent": "^2.2.0", + "extend": "^3.0.2", + "google-auth-library": "^6.1.1", + "retry-request": "^4.1.1", + "teeny-request": "^7.0.0" + } + }, + "@google-cloud/projectify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-2.0.1.tgz", + "integrity": "sha512-ZDG38U/Yy6Zr21LaR3BTiiLtpJl6RkPS/JwoRT453G+6Q1DhlV0waNf8Lfu+YVYGIIxgKnLayJRfYlFJfiI8iQ==" + }, + "@google-cloud/promisify": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-2.0.3.tgz", + "integrity": "sha512-d4VSA86eL/AFTe5xtyZX+ePUjE8dIFu2T8zmdeNBSa5/kNgXPCx/o/wbFNHAGLJdGnk1vddRuMESD9HbOC8irw==" + }, + "acorn": { + "version": "8.0.5", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.0.5.tgz", + "integrity": "sha512-v+DieK/HJkJOpFBETDJioequtc3PfxsWMaxIdIwujtF7FEV/MAyDQLlm6/zPvr7Mix07mLh6ccVwIsloceodlg==" + }, + "bignumber.js": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.1.tgz", + "integrity": "sha512-IdZR9mh6ahOBv/hYGiXyVuyCetmGJhtYkqLBpTStdhEGjegpPlUawydyaF3pbIOFynJTpllEs+NP+CS9jKFLjA==" + }, + "coffeescript": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/coffeescript/-/coffeescript-2.5.1.tgz", + "integrity": "sha512-J2jRPX0eeFh5VKyVnoLrfVFgLZtnnmp96WQSLAS8OrLm2wtQLcnikYKe1gViJKDH7vucjuhHvBKKBP3rKcD1tQ==" + }, + "duplexify": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", + "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", + "requires": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.0" + } + }, + "gaxios": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-4.1.0.tgz", + "integrity": "sha512-vb0to8xzGnA2qcgywAjtshOKKVDf2eQhJoiL6fHhgW5tVN7wNk7egnYIO9zotfn3lQ3De1VPdf7V5/BWfCtCmg==", + "requires": { + "abort-controller": "^3.0.0", + "extend": "^3.0.2", + "https-proxy-agent": "^5.0.0", + "is-stream": "^2.0.0", + "node-fetch": "^2.3.0" + } + }, + "gcp-metadata": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.2.1.tgz", + "integrity": "sha512-tSk+REe5iq/N+K+SK1XjZJUrFPuDqGZVzCy2vocIHIGmPlTGsa8owXMJwGkrXr73NO0AzhPW4MF2DEHz7P2AVw==", + "requires": { + "gaxios": "^4.0.0", + "json-bigint": "^1.0.0" + } + }, + "google-auth-library": { + "version": "6.1.6", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.1.6.tgz", + "integrity": "sha512-Q+ZjUEvLQj/lrVHF/IQwRo6p3s8Nc44Zk/DALsN+ac3T4HY/g/3rrufkgtl+nZ1TW7DNAw5cTChdVp4apUXVgQ==", + "requires": { + "arrify": "^2.0.0", + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "fast-text-encoding": "^1.0.0", + "gaxios": "^4.0.0", + "gcp-metadata": "^4.2.0", + "gtoken": "^5.0.4", + "jws": "^4.0.0", + "lru-cache": "^6.0.0" + } + }, + "google-p12-pem": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.3.tgz", + "integrity": "sha512-wS0ek4ZtFx/ACKYF3JhyGe5kzH7pgiQ7J5otlumqR9psmWMYc+U9cErKlCYVYHoUaidXHdZ2xbo34kB+S+24hA==", + "requires": { + "node-forge": "^0.10.0" + } + }, + "gtoken": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.2.1.tgz", + "integrity": "sha512-OY0BfPKe3QnMsY9MzTHTSKn+Vl2l1CcLe6BwDEQj00mbbkl5nyQ/7EUREstg4fQNZ8iYE7br4JJ7TdKeDOPWmw==", + "requires": { + "gaxios": "^4.0.0", + "google-p12-pem": "^3.0.3", + "jws": "^4.0.0" + } + }, + "json-bigint": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", + "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", + "requires": { + "bignumber.js": "^9.0.0" + } + }, + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "requires": { + "yallist": "^4.0.0" + } + }, + "node-forge": { + "version": "0.10.0", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.10.0.tgz", + "integrity": "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==" + }, + "p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "requires": { + "yocto-queue": "^0.1.0" + } + }, + "readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + }, + "semver": { + "version": "7.3.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.4.tgz", + "integrity": "sha512-tCfb2WLjqFAtXn4KEdxIhalnRtoKFN7nAwj0B3ZXCbQloV2tq5eDbcTmT68JJD3nRJq24/XgxtQKFIpQdtvmVw==", + "requires": { + "lru-cache": "^6.0.0" + } + }, + "teeny-request": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-7.0.1.tgz", + "integrity": "sha512-sasJmQ37klOlplL4Ia/786M5YlOcoLGQyq2TE4WHSRupbAuDaQW0PfVxV4MtdBtRJ4ngzS+1qim8zP6Zp35qCw==", + "requires": { + "http-proxy-agent": "^4.0.0", + "https-proxy-agent": "^5.0.0", + "node-fetch": "^2.6.1", + "stream-events": "^1.0.5", + "uuid": "^8.0.0" + } + }, + "uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" + }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + } + } + }, "@google-cloud/logging": { "version": "7.3.0", "resolved": "https://registry.npmjs.org/@google-cloud/logging/-/logging-7.3.0.tgz", @@ -231,6 +427,186 @@ "extend": "^3.0.2" } }, + "@google-cloud/profiler": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@google-cloud/profiler/-/profiler-4.1.0.tgz", + "integrity": "sha512-9e1zXRctLSUHAoAsFGwE4rS28fr0siiG+jXl5OpwTK8ZAUlxb70aosHaZGdsv8YXrYKjuiufjRZ/OXCs0XLI9g==", + "requires": { + "@google-cloud/common": "^3.0.0", + "@types/console-log-level": "^1.4.0", + "@types/semver": "^7.0.0", + "console-log-level": "^1.4.0", + "delay": "^4.0.1", + "extend": "^3.0.2", + "gcp-metadata": "^4.0.0", + "parse-duration": "^0.4.4", + "pprof": "3.0.0", + "pretty-ms": "^7.0.0", + "protobufjs": "~6.10.0", + "semver": "^7.0.0", + "teeny-request": "^7.0.0" + }, + "dependencies": { + "@google-cloud/common": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-3.5.0.tgz", + "integrity": "sha512-10d7ZAvKhq47L271AqvHEd8KzJqGU45TY+rwM2Z3JHuB070FeTi7oJJd7elfrnKaEvaktw3hH2wKnRWxk/3oWQ==", + "requires": { + "@google-cloud/projectify": "^2.0.0", + "@google-cloud/promisify": "^2.0.0", + "arrify": "^2.0.1", + "duplexify": "^4.1.1", + "ent": "^2.2.0", + "extend": "^3.0.2", + "google-auth-library": "^6.1.1", + "retry-request": "^4.1.1", + "teeny-request": "^7.0.0" + } + }, + "@google-cloud/projectify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-2.0.1.tgz", + "integrity": "sha512-ZDG38U/Yy6Zr21LaR3BTiiLtpJl6RkPS/JwoRT453G+6Q1DhlV0waNf8Lfu+YVYGIIxgKnLayJRfYlFJfiI8iQ==" + }, + "@google-cloud/promisify": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-2.0.3.tgz", + "integrity": "sha512-d4VSA86eL/AFTe5xtyZX+ePUjE8dIFu2T8zmdeNBSa5/kNgXPCx/o/wbFNHAGLJdGnk1vddRuMESD9HbOC8irw==" + }, + "bignumber.js": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.1.tgz", + "integrity": "sha512-IdZR9mh6ahOBv/hYGiXyVuyCetmGJhtYkqLBpTStdhEGjegpPlUawydyaF3pbIOFynJTpllEs+NP+CS9jKFLjA==" + }, + "duplexify": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", + "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", + "requires": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.0" + } + }, + "gaxios": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-4.1.0.tgz", + "integrity": "sha512-vb0to8xzGnA2qcgywAjtshOKKVDf2eQhJoiL6fHhgW5tVN7wNk7egnYIO9zotfn3lQ3De1VPdf7V5/BWfCtCmg==", + "requires": { + "abort-controller": "^3.0.0", + "extend": "^3.0.2", + "https-proxy-agent": "^5.0.0", + "is-stream": "^2.0.0", + "node-fetch": "^2.3.0" + } + }, + "gcp-metadata": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.2.1.tgz", + "integrity": "sha512-tSk+REe5iq/N+K+SK1XjZJUrFPuDqGZVzCy2vocIHIGmPlTGsa8owXMJwGkrXr73NO0AzhPW4MF2DEHz7P2AVw==", + "requires": { + "gaxios": "^4.0.0", + "json-bigint": "^1.0.0" + } + }, + "google-auth-library": { + "version": "6.1.6", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.1.6.tgz", + "integrity": "sha512-Q+ZjUEvLQj/lrVHF/IQwRo6p3s8Nc44Zk/DALsN+ac3T4HY/g/3rrufkgtl+nZ1TW7DNAw5cTChdVp4apUXVgQ==", + "requires": { + "arrify": "^2.0.0", + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "fast-text-encoding": "^1.0.0", + "gaxios": "^4.0.0", + "gcp-metadata": "^4.2.0", + "gtoken": "^5.0.4", + "jws": "^4.0.0", + "lru-cache": "^6.0.0" + } + }, + "google-p12-pem": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.3.tgz", + "integrity": "sha512-wS0ek4ZtFx/ACKYF3JhyGe5kzH7pgiQ7J5otlumqR9psmWMYc+U9cErKlCYVYHoUaidXHdZ2xbo34kB+S+24hA==", + "requires": { + "node-forge": "^0.10.0" + } + }, + "gtoken": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.2.1.tgz", + "integrity": "sha512-OY0BfPKe3QnMsY9MzTHTSKn+Vl2l1CcLe6BwDEQj00mbbkl5nyQ/7EUREstg4fQNZ8iYE7br4JJ7TdKeDOPWmw==", + "requires": { + "gaxios": "^4.0.0", + "google-p12-pem": "^3.0.3", + "jws": "^4.0.0" + } + }, + "json-bigint": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", + "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", + "requires": { + "bignumber.js": "^9.0.0" + } + }, + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "requires": { + "yallist": "^4.0.0" + } + }, + "node-forge": { + "version": "0.10.0", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.10.0.tgz", + "integrity": "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==" + }, + "readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + }, + "semver": { + "version": "7.3.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.4.tgz", + "integrity": "sha512-tCfb2WLjqFAtXn4KEdxIhalnRtoKFN7nAwj0B3ZXCbQloV2tq5eDbcTmT68JJD3nRJq24/XgxtQKFIpQdtvmVw==", + "requires": { + "lru-cache": "^6.0.0" + } + }, + "teeny-request": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-7.0.1.tgz", + "integrity": "sha512-sasJmQ37klOlplL4Ia/786M5YlOcoLGQyq2TE4WHSRupbAuDaQW0PfVxV4MtdBtRJ4ngzS+1qim8zP6Zp35qCw==", + "requires": { + "http-proxy-agent": "^4.0.0", + "https-proxy-agent": "^5.0.0", + "node-fetch": "^2.6.1", + "stream-events": "^1.0.5", + "uuid": "^8.0.0" + } + }, + "uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" + }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + } + } + }, "@google-cloud/projectify": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-1.0.4.tgz", @@ -450,6 +826,229 @@ } } }, + "@google-cloud/trace-agent": { + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/@google-cloud/trace-agent/-/trace-agent-5.1.3.tgz", + "integrity": "sha512-f+5DX7n6QpDlHA+4kr81z69SLAdrlvd9T8skqCMgnYvtXx14AwzXZyzEDf3jppOYzYoqPPJv8XYiyYHHmYD0BA==", + "requires": { + "@google-cloud/common": "^3.0.0", + "@opencensus/propagation-stackdriver": "0.0.22", + "builtin-modules": "^3.0.0", + "console-log-level": "^1.4.0", + "continuation-local-storage": "^3.2.1", + "extend": "^3.0.2", + "gcp-metadata": "^4.0.0", + "google-auth-library": "^7.0.0", + "hex2dec": "^1.0.1", + "is": "^3.2.0", + "methods": "^1.1.1", + "require-in-the-middle": "^5.0.0", + "semver": "^7.0.0", + "shimmer": "^1.2.0", + "source-map-support": "^0.5.16", + "uuid": "^8.0.0" + }, + "dependencies": { + "@google-cloud/common": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-3.5.0.tgz", + "integrity": "sha512-10d7ZAvKhq47L271AqvHEd8KzJqGU45TY+rwM2Z3JHuB070FeTi7oJJd7elfrnKaEvaktw3hH2wKnRWxk/3oWQ==", + "requires": { + "@google-cloud/projectify": "^2.0.0", + "@google-cloud/promisify": "^2.0.0", + "arrify": "^2.0.1", + "duplexify": "^4.1.1", + "ent": "^2.2.0", + "extend": "^3.0.2", + "google-auth-library": "^6.1.1", + "retry-request": "^4.1.1", + "teeny-request": "^7.0.0" + }, + "dependencies": { + "google-auth-library": { + "version": "6.1.6", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.1.6.tgz", + "integrity": "sha512-Q+ZjUEvLQj/lrVHF/IQwRo6p3s8Nc44Zk/DALsN+ac3T4HY/g/3rrufkgtl+nZ1TW7DNAw5cTChdVp4apUXVgQ==", + "requires": { + "arrify": "^2.0.0", + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "fast-text-encoding": "^1.0.0", + "gaxios": "^4.0.0", + "gcp-metadata": "^4.2.0", + "gtoken": "^5.0.4", + "jws": "^4.0.0", + "lru-cache": "^6.0.0" + } + } + } + }, + "@google-cloud/projectify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-2.0.1.tgz", + "integrity": "sha512-ZDG38U/Yy6Zr21LaR3BTiiLtpJl6RkPS/JwoRT453G+6Q1DhlV0waNf8Lfu+YVYGIIxgKnLayJRfYlFJfiI8iQ==" + }, + "@google-cloud/promisify": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-2.0.3.tgz", + "integrity": "sha512-d4VSA86eL/AFTe5xtyZX+ePUjE8dIFu2T8zmdeNBSa5/kNgXPCx/o/wbFNHAGLJdGnk1vddRuMESD9HbOC8irw==" + }, + "@opencensus/core": { + "version": "0.0.22", + "resolved": "https://registry.npmjs.org/@opencensus/core/-/core-0.0.22.tgz", + "integrity": "sha512-ErazJtivjceNoOZI1bG9giQ6cWS45J4i6iPUtlp7dLNu58OLs/v+CD0FsaPCh47XgPxAI12vbBE8Ec09ViwHNA==", + "requires": { + "continuation-local-storage": "^3.2.1", + "log-driver": "^1.2.7", + "semver": "^7.0.0", + "shimmer": "^1.2.0", + "uuid": "^8.0.0" + } + }, + "@opencensus/propagation-stackdriver": { + "version": "0.0.22", + "resolved": "https://registry.npmjs.org/@opencensus/propagation-stackdriver/-/propagation-stackdriver-0.0.22.tgz", + "integrity": "sha512-eBvf/ihb1mN8Yz/ASkz8nHzuMKqygu77+VNnUeR0yEh3Nj+ykB8VVR6lK+NAFXo1Rd1cOsTmgvuXAZgDAGleQQ==", + "requires": { + "@opencensus/core": "^0.0.22", + "hex2dec": "^1.0.1", + "uuid": "^8.0.0" + } + }, + "bignumber.js": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.1.tgz", + "integrity": "sha512-IdZR9mh6ahOBv/hYGiXyVuyCetmGJhtYkqLBpTStdhEGjegpPlUawydyaF3pbIOFynJTpllEs+NP+CS9jKFLjA==" + }, + "duplexify": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", + "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", + "requires": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.0" + } + }, + "gaxios": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-4.1.0.tgz", + "integrity": "sha512-vb0to8xzGnA2qcgywAjtshOKKVDf2eQhJoiL6fHhgW5tVN7wNk7egnYIO9zotfn3lQ3De1VPdf7V5/BWfCtCmg==", + "requires": { + "abort-controller": "^3.0.0", + "extend": "^3.0.2", + "https-proxy-agent": "^5.0.0", + "is-stream": "^2.0.0", + "node-fetch": "^2.3.0" + } + }, + "gcp-metadata": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.2.1.tgz", + "integrity": "sha512-tSk+REe5iq/N+K+SK1XjZJUrFPuDqGZVzCy2vocIHIGmPlTGsa8owXMJwGkrXr73NO0AzhPW4MF2DEHz7P2AVw==", + "requires": { + "gaxios": "^4.0.0", + "json-bigint": "^1.0.0" + } + }, + "google-auth-library": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-7.0.2.tgz", + "integrity": "sha512-vjyNZR3pDLC0u7GHLfj+Hw9tGprrJwoMwkYGqURCXYITjCrP9HprOyxVV+KekdLgATtWGuDkQG2MTh0qpUPUgg==", + "requires": { + "arrify": "^2.0.0", + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "fast-text-encoding": "^1.0.0", + "gaxios": "^4.0.0", + "gcp-metadata": "^4.2.0", + "gtoken": "^5.0.4", + "jws": "^4.0.0", + "lru-cache": "^6.0.0" + } + }, + "google-p12-pem": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.3.tgz", + "integrity": "sha512-wS0ek4ZtFx/ACKYF3JhyGe5kzH7pgiQ7J5otlumqR9psmWMYc+U9cErKlCYVYHoUaidXHdZ2xbo34kB+S+24hA==", + "requires": { + "node-forge": "^0.10.0" + } + }, + "gtoken": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.2.1.tgz", + "integrity": "sha512-OY0BfPKe3QnMsY9MzTHTSKn+Vl2l1CcLe6BwDEQj00mbbkl5nyQ/7EUREstg4fQNZ8iYE7br4JJ7TdKeDOPWmw==", + "requires": { + "gaxios": "^4.0.0", + "google-p12-pem": "^3.0.3", + "jws": "^4.0.0" + } + }, + "json-bigint": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", + "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", + "requires": { + "bignumber.js": "^9.0.0" + } + }, + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "requires": { + "yallist": "^4.0.0" + } + }, + "node-forge": { + "version": "0.10.0", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.10.0.tgz", + "integrity": "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==" + }, + "readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + }, + "semver": { + "version": "7.3.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.4.tgz", + "integrity": "sha512-tCfb2WLjqFAtXn4KEdxIhalnRtoKFN7nAwj0B3ZXCbQloV2tq5eDbcTmT68JJD3nRJq24/XgxtQKFIpQdtvmVw==", + "requires": { + "lru-cache": "^6.0.0" + } + }, + "teeny-request": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-7.0.1.tgz", + "integrity": "sha512-sasJmQ37klOlplL4Ia/786M5YlOcoLGQyq2TE4WHSRupbAuDaQW0PfVxV4MtdBtRJ4ngzS+1qim8zP6Zp35qCw==", + "requires": { + "http-proxy-agent": "^4.0.0", + "https-proxy-agent": "^5.0.0", + "node-fetch": "^2.6.1", + "stream-events": "^1.0.5", + "uuid": "^8.0.0" + } + }, + "uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" + }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + } + } + }, "@grpc/grpc-js": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.0.5.tgz", @@ -504,9 +1103,9 @@ } }, "@overleaf/metrics": { - "version": "3.4.1", - "resolved": "https://registry.npmjs.org/@overleaf/metrics/-/metrics-3.4.1.tgz", - "integrity": "sha512-OgjlzuC+2gPdIEDHhmd9LDMu01tk1ln0cJhw1727BZ+Wgf2Z1hjuHRt4JeCkf+PFTHwJutVYT8v6IGPpNEPtbg==", + "version": "3.5.1", + "resolved": "https://registry.npmjs.org/@overleaf/metrics/-/metrics-3.5.1.tgz", + "integrity": "sha512-RLHxkMF7Y3725L3QwXo9cIn2gGobsMYUGuxKxg7PVMrPTMsomHEMeG7StOxCO7ML1Z/BwB/9nsVYNrsRdAJtKg==", "requires": { "@google-cloud/debug-agent": "^5.1.2", "@google-cloud/profiler": "^4.0.3", @@ -517,309 +1116,6 @@ "yn": "^3.1.1" }, "dependencies": { - "@google-cloud/common": { - "version": "3.5.0", - "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-3.5.0.tgz", - "integrity": "sha512-10d7ZAvKhq47L271AqvHEd8KzJqGU45TY+rwM2Z3JHuB070FeTi7oJJd7elfrnKaEvaktw3hH2wKnRWxk/3oWQ==", - "requires": { - "@google-cloud/projectify": "^2.0.0", - "@google-cloud/promisify": "^2.0.0", - "arrify": "^2.0.1", - "duplexify": "^4.1.1", - "ent": "^2.2.0", - "extend": "^3.0.2", - "google-auth-library": "^6.1.1", - "retry-request": "^4.1.1", - "teeny-request": "^7.0.0" - } - }, - "@google-cloud/debug-agent": { - "version": "5.1.3", - "resolved": "https://registry.npmjs.org/@google-cloud/debug-agent/-/debug-agent-5.1.3.tgz", - "integrity": "sha512-WbzeEz4MvPlM7DX2QBsPcWgF62u7LSQv/oMYPl0L+TddTebqjDKiVXwxpzWk61NIfcKiet3dyCbPIt3N5o8XPQ==", - "requires": { - "@google-cloud/common": "^3.0.0", - "acorn": "^8.0.0", - "coffeescript": "^2.0.0", - "console-log-level": "^1.4.0", - "extend": "^3.0.2", - "findit2": "^2.2.3", - "gcp-metadata": "^4.0.0", - "p-limit": "^3.0.1", - "semver": "^7.0.0", - "source-map": "^0.6.1", - "split": "^1.0.0" - } - }, - "@google-cloud/profiler": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/@google-cloud/profiler/-/profiler-4.1.0.tgz", - "integrity": "sha512-9e1zXRctLSUHAoAsFGwE4rS28fr0siiG+jXl5OpwTK8ZAUlxb70aosHaZGdsv8YXrYKjuiufjRZ/OXCs0XLI9g==", - "requires": { - "@google-cloud/common": "^3.0.0", - "@types/console-log-level": "^1.4.0", - "@types/semver": "^7.0.0", - "console-log-level": "^1.4.0", - "delay": "^4.0.1", - "extend": "^3.0.2", - "gcp-metadata": "^4.0.0", - "parse-duration": "^0.4.4", - "pprof": "3.0.0", - "pretty-ms": "^7.0.0", - "protobufjs": "~6.10.0", - "semver": "^7.0.0", - "teeny-request": "^7.0.0" - } - }, - "@google-cloud/projectify": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-2.0.1.tgz", - "integrity": "sha512-ZDG38U/Yy6Zr21LaR3BTiiLtpJl6RkPS/JwoRT453G+6Q1DhlV0waNf8Lfu+YVYGIIxgKnLayJRfYlFJfiI8iQ==" - }, - "@google-cloud/promisify": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-2.0.3.tgz", - "integrity": "sha512-d4VSA86eL/AFTe5xtyZX+ePUjE8dIFu2T8zmdeNBSa5/kNgXPCx/o/wbFNHAGLJdGnk1vddRuMESD9HbOC8irw==" - }, - "@google-cloud/trace-agent": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/@google-cloud/trace-agent/-/trace-agent-5.1.1.tgz", - "integrity": "sha512-YTcK0RLN90pLCprg0XC8uV4oAVd79vsXhkcxmEVwiOOYjUDvSrAhb7y/0SY606zgfhJHmUTNb/fZSWEtZP/slQ==", - "requires": { - "@google-cloud/common": "^3.0.0", - "@opencensus/propagation-stackdriver": "0.0.22", - "builtin-modules": "^3.0.0", - "console-log-level": "^1.4.0", - "continuation-local-storage": "^3.2.1", - "extend": "^3.0.2", - "gcp-metadata": "^4.0.0", - "google-auth-library": "^6.0.0", - "hex2dec": "^1.0.1", - "is": "^3.2.0", - "methods": "^1.1.1", - "require-in-the-middle": "^5.0.0", - "semver": "^7.0.0", - "shimmer": "^1.2.0", - "source-map-support": "^0.5.16", - "uuid": "^8.0.0" - } - }, - "@opencensus/core": { - "version": "0.0.22", - "resolved": "https://registry.npmjs.org/@opencensus/core/-/core-0.0.22.tgz", - "integrity": "sha512-ErazJtivjceNoOZI1bG9giQ6cWS45J4i6iPUtlp7dLNu58OLs/v+CD0FsaPCh47XgPxAI12vbBE8Ec09ViwHNA==", - "requires": { - "continuation-local-storage": "^3.2.1", - "log-driver": "^1.2.7", - "semver": "^7.0.0", - "shimmer": "^1.2.0", - "uuid": "^8.0.0" - } - }, - "@opencensus/propagation-stackdriver": { - "version": "0.0.22", - "resolved": "https://registry.npmjs.org/@opencensus/propagation-stackdriver/-/propagation-stackdriver-0.0.22.tgz", - "integrity": "sha512-eBvf/ihb1mN8Yz/ASkz8nHzuMKqygu77+VNnUeR0yEh3Nj+ykB8VVR6lK+NAFXo1Rd1cOsTmgvuXAZgDAGleQQ==", - "requires": { - "@opencensus/core": "^0.0.22", - "hex2dec": "^1.0.1", - "uuid": "^8.0.0" - } - }, - "@types/semver": { - "version": "7.3.4", - "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.3.4.tgz", - "integrity": "sha512-+nVsLKlcUCeMzD2ufHEYuJ9a2ovstb6Dp52A5VsoKxDXgvE051XgHI/33I1EymwkRGQkwnA0LkhnUzituGs4EQ==" - }, - "acorn": { - "version": "8.0.4", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.0.4.tgz", - "integrity": "sha512-XNP0PqF1XD19ZlLKvB7cMmnZswW4C/03pRHgirB30uSJTaS3A3V1/P4sS3HPvFmjoriPCJQs+JDSbm4bL1TxGQ==" - }, - "bignumber.js": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.1.tgz", - "integrity": "sha512-IdZR9mh6ahOBv/hYGiXyVuyCetmGJhtYkqLBpTStdhEGjegpPlUawydyaF3pbIOFynJTpllEs+NP+CS9jKFLjA==" - }, - "coffeescript": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/coffeescript/-/coffeescript-2.5.1.tgz", - "integrity": "sha512-J2jRPX0eeFh5VKyVnoLrfVFgLZtnnmp96WQSLAS8OrLm2wtQLcnikYKe1gViJKDH7vucjuhHvBKKBP3rKcD1tQ==" - }, - "debug": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", - "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", - "requires": { - "ms": "2.1.2" - } - }, - "duplexify": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", - "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", - "requires": { - "end-of-stream": "^1.4.1", - "inherits": "^2.0.3", - "readable-stream": "^3.1.1", - "stream-shift": "^1.0.0" - } - }, - "gaxios": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-4.0.1.tgz", - "integrity": "sha512-jOin8xRZ/UytQeBpSXFqIzqU7Fi5TqgPNLlUsSB8kjJ76+FiGBfImF8KJu++c6J4jOldfJUtt0YmkRj2ZpSHTQ==", - "requires": { - "abort-controller": "^3.0.0", - "extend": "^3.0.2", - "https-proxy-agent": "^5.0.0", - "is-stream": "^2.0.0", - "node-fetch": "^2.3.0" - } - }, - "gcp-metadata": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.2.1.tgz", - "integrity": "sha512-tSk+REe5iq/N+K+SK1XjZJUrFPuDqGZVzCy2vocIHIGmPlTGsa8owXMJwGkrXr73NO0AzhPW4MF2DEHz7P2AVw==", - "requires": { - "gaxios": "^4.0.0", - "json-bigint": "^1.0.0" - } - }, - "google-auth-library": { - "version": "6.1.3", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.1.3.tgz", - "integrity": "sha512-m9mwvY3GWbr7ZYEbl61isWmk+fvTmOt0YNUfPOUY2VH8K5pZlAIWJjxEi0PqR3OjMretyiQLI6GURMrPSwHQ2g==", - "requires": { - "arrify": "^2.0.0", - "base64-js": "^1.3.0", - "ecdsa-sig-formatter": "^1.0.11", - "fast-text-encoding": "^1.0.0", - "gaxios": "^4.0.0", - "gcp-metadata": "^4.2.0", - "gtoken": "^5.0.4", - "jws": "^4.0.0", - "lru-cache": "^6.0.0" - } - }, - "google-p12-pem": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.3.tgz", - "integrity": "sha512-wS0ek4ZtFx/ACKYF3JhyGe5kzH7pgiQ7J5otlumqR9psmWMYc+U9cErKlCYVYHoUaidXHdZ2xbo34kB+S+24hA==", - "requires": { - "node-forge": "^0.10.0" - } - }, - "gtoken": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.1.0.tgz", - "integrity": "sha512-4d8N6Lk8TEAHl9vVoRVMh9BNOKWVgl2DdNtr3428O75r3QFrF/a5MMu851VmK0AA8+iSvbwRv69k5XnMLURGhg==", - "requires": { - "gaxios": "^4.0.0", - "google-p12-pem": "^3.0.3", - "jws": "^4.0.0", - "mime": "^2.2.0" - } - }, - "json-bigint": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", - "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", - "requires": { - "bignumber.js": "^9.0.0" - } - }, - "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "requires": { - "yallist": "^4.0.0" - } - }, - "mime": { - "version": "2.4.6", - "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.6.tgz", - "integrity": "sha512-RZKhC3EmpBchfTGBVb8fb+RL2cWyw/32lshnsETttkBAyAUXSGHxbEJWWRXc751DrIxG1q04b8QwMbAwkRPpUA==" - }, - "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" - }, - "node-forge": { - "version": "0.10.0", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.10.0.tgz", - "integrity": "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==" - }, - "p-limit": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", - "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", - "requires": { - "yocto-queue": "^0.1.0" - } - }, - "parse-duration": { - "version": "0.4.4", - "resolved": "https://registry.npmjs.org/parse-duration/-/parse-duration-0.4.4.tgz", - "integrity": "sha512-KbAJuYGUhZkB9gotDiKLnZ7Z3VTacK3fgwmDdB6ZVDtJbMBT6MfLga0WJaYpPDu0mzqT0NgHtHDt5PY4l0nidg==" - }, - "pretty-ms": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/pretty-ms/-/pretty-ms-7.0.1.tgz", - "integrity": "sha512-973driJZvxiGOQ5ONsFhOF/DtzPMOMtgC11kCpUrPGMTgqp2q/1gwzCquocrN33is0VZ5GFHXZYMM9l6h67v2Q==", - "requires": { - "parse-ms": "^2.1.0" - } - }, - "readable-stream": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", - "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", - "requires": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - } - }, - "require-in-the-middle": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/require-in-the-middle/-/require-in-the-middle-5.0.3.tgz", - "integrity": "sha512-p/ICV8uMlqC4tjOYabLMxAWCIKa0YUQgZZ6KDM0xgXJNgdGQ1WmL2A07TwmrZw+wi6ITUFKzH5v3n+ENEyXVkA==", - "requires": { - "debug": "^4.1.1", - "module-details-from-path": "^1.0.3", - "resolve": "^1.12.0" - } - }, - "semver": { - "version": "7.3.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.2.tgz", - "integrity": "sha512-OrOb32TeeambH6UrhtShmF7CRDqhL6/5XpPNp2DuRH6+9QLw/orhp72j87v8Qa1ScDkvrrBNpZcDejAirJmfXQ==" - }, - "teeny-request": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-7.0.1.tgz", - "integrity": "sha512-sasJmQ37klOlplL4Ia/786M5YlOcoLGQyq2TE4WHSRupbAuDaQW0PfVxV4MtdBtRJ4ngzS+1qim8zP6Zp35qCw==", - "requires": { - "http-proxy-agent": "^4.0.0", - "https-proxy-agent": "^5.0.0", - "node-fetch": "^2.6.1", - "stream-events": "^1.0.5", - "uuid": "^8.0.0" - } - }, - "uuid": { - "version": "8.3.1", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.1.tgz", - "integrity": "sha512-FOmRr+FmWEIG8uhZv6C2bTgEVXsHk08kE7mPlrBbEe+c3r9pjceVPgupIfNIhc4yx55H69OXANrUaSuu9eInKg==" - }, - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" - }, "yn": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", @@ -992,6 +1288,11 @@ "resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.13.tgz", "integrity": "sha512-UfvBE9oRCAJVzfR+3eWm/sdLFe/qroAPEXP3GPJ1SehQiEVgZT6NQZWYbPMiJ3UdcKM06v4j+S1lTcdWCmw+3g==" }, + "@types/semver": { + "version": "7.3.4", + "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.3.4.tgz", + "integrity": "sha512-+nVsLKlcUCeMzD2ufHEYuJ9a2ovstb6Dp52A5VsoKxDXgvE051XgHI/33I1EymwkRGQkwnA0LkhnUzituGs4EQ==" + }, "@typescript-eslint/experimental-utils": { "version": "1.13.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/experimental-utils/-/experimental-utils-1.13.0.tgz", @@ -1434,9 +1735,9 @@ "integrity": "sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==" }, "builtin-modules": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.1.0.tgz", - "integrity": "sha512-k0KL0aWZuBt2lrxrcASWDfwOLMnodeQjodT/1SxEQAXsHANgo6ZC/VEaSEHCXt7aSTZ4/4H5LKa+tBXmW7Vtvw==" + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.2.0.tgz", + "integrity": "sha512-lGzLKcioL90C7wMczpkY0n/oART3MbBa8R9OFGE1rJxoVI86u4WAGfEk8Wjv10eKSyTHVGkSo3bvBylCEtk7LA==" }, "bunyan": { "version": "1.8.14", @@ -1866,9 +2167,9 @@ } }, "delay": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/delay/-/delay-4.3.0.tgz", - "integrity": "sha512-Lwaf3zVFDMBop1yDuFZ19F9WyGcZcGacsbdlZtWjQmM50tOcMntm1njF/Nb/Vjij3KaSvCF+sEYGKrrjObu2NA==" + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/delay/-/delay-4.4.1.tgz", + "integrity": "sha512-aL3AhqtfhOlT/3ai6sWXeqwnw63ATNpnUiN4HL7x9q+My5QtHlO3OIkasmug9LKzpheLdmUKGRKnYXYAS7FQkQ==" }, "delayed-stream": { "version": "1.0.0", @@ -3303,9 +3604,9 @@ "integrity": "sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw==" }, "ini": { - "version": "1.3.5", - "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.5.tgz", - "integrity": "sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw==" + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", + "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==" }, "inquirer": { "version": "7.0.4", @@ -4147,9 +4448,9 @@ "optional": true }, "needle": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/needle/-/needle-2.5.2.tgz", - "integrity": "sha512-LbRIwS9BfkPvNwNHlsA41Q29kL2L/6VaOJ0qisM5lLWsTV3nP15abO5ITL6L81zqFhzjRKDAYjpcBcwM0AVvLQ==", + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/needle/-/needle-2.6.0.tgz", + "integrity": "sha512-KKYdza4heMsEfSWD7VPUIz3zX2XDwOyX2d+geb4vrERZMT5RMU6ujjaD+I5Yr54uZxQ2w6XRTAhHBbSCyovZBg==", "requires": { "debug": "^3.2.6", "iconv-lite": "^0.4.4", @@ -4165,9 +4466,9 @@ } }, "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" }, "sax": { "version": "1.2.4", @@ -4521,6 +4822,11 @@ "callsites": "^3.0.0" } }, + "parse-duration": { + "version": "0.4.4", + "resolved": "https://registry.npmjs.org/parse-duration/-/parse-duration-0.4.4.tgz", + "integrity": "sha512-KbAJuYGUhZkB9gotDiKLnZ7Z3VTacK3fgwmDdB6ZVDtJbMBT6MfLga0WJaYpPDu0mzqT0NgHtHDt5PY4l0nidg==" + }, "parse-json": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", @@ -4607,6 +4913,11 @@ "integrity": "sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==", "dev": true }, + "pify": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-5.0.0.tgz", + "integrity": "sha512-eW/gHNMlxdSP6dmG6uJip6FXN0EQBwm2clYYd8Wul42Cwu/DK8HEftzsapcNdYe2MfLiIwZqsDk2RDEsTE79hA==" + }, "pkg-dir": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-2.0.0.tgz", @@ -4641,11 +4952,6 @@ "yocto-queue": "^0.1.0" } }, - "pify": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-5.0.0.tgz", - "integrity": "sha512-eW/gHNMlxdSP6dmG6uJip6FXN0EQBwm2clYYd8Wul42Cwu/DK8HEftzsapcNdYe2MfLiIwZqsDk2RDEsTE79hA==" - }, "source-map": { "version": "0.7.3", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz", @@ -5234,6 +5540,14 @@ } } }, + "pretty-ms": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/pretty-ms/-/pretty-ms-7.0.1.tgz", + "integrity": "sha512-973driJZvxiGOQ5ONsFhOF/DtzPMOMtgC11kCpUrPGMTgqp2q/1gwzCquocrN33is0VZ5GFHXZYMM9l6h67v2Q==", + "requires": { + "parse-ms": "^2.1.0" + } + }, "process-nextick-args": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", @@ -5546,6 +5860,31 @@ "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", "dev": true }, + "require-in-the-middle": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/require-in-the-middle/-/require-in-the-middle-5.1.0.tgz", + "integrity": "sha512-M2rLKVupQfJ5lf9OvqFGIT+9iVLnTmjgbOmpil12hiSQNn5zJTKGPoIisETNjfK+09vP3rpm1zJajmErpr2sEQ==", + "requires": { + "debug": "^4.1.1", + "module-details-from-path": "^1.0.3", + "resolve": "^1.12.0" + }, + "dependencies": { + "debug": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", + "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", + "requires": { + "ms": "2.1.2" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + } + } + }, "require-like": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz", diff --git a/services/filestore/package.json b/services/filestore/package.json index 3670b9d9c9..66a7336c82 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -20,7 +20,7 @@ "test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js" }, "dependencies": { - "@overleaf/metrics": "^3.4.1", + "@overleaf/metrics": "^3.5.1", "@overleaf/o-error": "^3.0.0", "@overleaf/object-persistor": "https://github.com/overleaf/object-persistor/archive/8fbc9ed03206bfb54368578d22b7ac4f285baa25.tar.gz", "body-parser": "^1.19.0", From 96697eb91b930fac8e4fdc09c0750444c34c6d5a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 31 Mar 2021 14:06:02 +0000 Subject: [PATCH 539/555] Bump y18n from 4.0.0 to 4.0.1 Bumps [y18n](https://github.com/yargs/y18n) from 4.0.0 to 4.0.1. - [Release notes](https://github.com/yargs/y18n/releases) - [Changelog](https://github.com/yargs/y18n/blob/master/CHANGELOG.md) - [Commits](https://github.com/yargs/y18n/commits) Signed-off-by: dependabot[bot] --- services/filestore/package-lock.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index d670c056f5..e448039bda 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -6668,9 +6668,9 @@ "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==" }, "y18n": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.0.tgz", - "integrity": "sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w==", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.1.tgz", + "integrity": "sha512-wNcy4NvjMYL8gogWWYAO7ZFWFfHcbdbE57tZO8e4cbpj8tfUcwrwqSl3ad8HxpYWCdXcJUCeKKZS62Av1affwQ==", "dev": true }, "yaassertion": { From 126f5bef112548de18f32b55c28ce44735e0043b Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Thu, 29 Apr 2021 15:30:51 +0100 Subject: [PATCH 540/555] [misc] add linting for missing explicit dependencies and fix any errors --- services/filestore/.eslintrc | 13 +++++++++++-- services/filestore/Makefile | 6 ++++-- services/filestore/buildscript.txt | 2 +- 3 files changed, 16 insertions(+), 5 deletions(-) diff --git a/services/filestore/.eslintrc b/services/filestore/.eslintrc index 76dad1561d..321353f971 100644 --- a/services/filestore/.eslintrc +++ b/services/filestore/.eslintrc @@ -22,7 +22,10 @@ "rules": { // Swap the no-unused-expressions rule with a more chai-friendly one "no-unused-expressions": 0, - "chai-friendly/no-unused-expressions": "error" + "chai-friendly/no-unused-expressions": "error", + + // Do not allow importing of implicit dependencies. + "import/no-extraneous-dependencies": "error" }, "overrides": [ { @@ -57,7 +60,13 @@ "files": ["app/**/*.js", "app.js", "index.js"], "rules": { // don't allow console.log in backend code - "no-console": "error" + "no-console": "error", + + // Do not allow importing of implicit dependencies. + "import/no-extraneous-dependencies": ["error", { + // Do not allow importing of devDependencies. + "devDependencies": false + }] } } ] diff --git a/services/filestore/Makefile b/services/filestore/Makefile index f713b7a0a2..de66417ba5 100644 --- a/services/filestore/Makefile +++ b/services/filestore/Makefile @@ -21,8 +21,10 @@ DOCKER_COMPOSE_TEST_UNIT = \ COMPOSE_PROJECT_NAME=test_unit_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) clean: - docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) - docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + -docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + -docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + -$(DOCKER_COMPOSE_TEST_UNIT) down --rmi local + -$(DOCKER_COMPOSE_TEST_ACCEPTANCE) down --rmi local format: $(DOCKER_COMPOSE) run --rm test_unit npm run --silent format diff --git a/services/filestore/buildscript.txt b/services/filestore/buildscript.txt index d8ecd26cdd..5320893ad7 100644 --- a/services/filestore/buildscript.txt +++ b/services/filestore/buildscript.txt @@ -6,4 +6,4 @@ filestore --env-pass-through= --node-version=12.20.1 --public-repo=True ---script-version=3.4.0 +--script-version=3.8.0 From 78c0b90eb70b206b07a84ea0f1f62a97c20296bd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 11 May 2021 03:28:23 +0000 Subject: [PATCH 541/555] Bump lodash from 4.17.20 to 4.17.21 Bumps [lodash](https://github.com/lodash/lodash) from 4.17.20 to 4.17.21. - [Release notes](https://github.com/lodash/lodash/releases) - [Commits](https://github.com/lodash/lodash/compare/4.17.20...4.17.21) Signed-off-by: dependabot[bot] --- services/filestore/package-lock.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index db6b776a3a..ee2af76866 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -3912,9 +3912,9 @@ } }, "lodash": { - "version": "4.17.20", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.20.tgz", - "integrity": "sha512-PlhdFcillOINfeV7Ni6oF1TAEayyZBoZ8bcshTHqOYJYlrqzRK5hagpagky5o4HfCzzd1TRkXPMFq6cKk9rGmA==" + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" }, "lodash.at": { "version": "4.6.0", From ce32ca9238224b1114244950bbb0707d489fd218 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 11 May 2021 12:30:36 +0000 Subject: [PATCH 542/555] Bump hosted-git-info from 2.8.8 to 2.8.9 Bumps [hosted-git-info](https://github.com/npm/hosted-git-info) from 2.8.8 to 2.8.9. - [Release notes](https://github.com/npm/hosted-git-info/releases) - [Changelog](https://github.com/npm/hosted-git-info/blob/v2.8.9/CHANGELOG.md) - [Commits](https://github.com/npm/hosted-git-info/compare/v2.8.8...v2.8.9) Signed-off-by: dependabot[bot] --- services/filestore/package-lock.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index db6b776a3a..25f28ed7ef 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -3465,9 +3465,9 @@ "integrity": "sha512-Yu+q/XWr2fFQ11tHxPq4p4EiNkb2y+lAacJNhAdRXVfRIcDH6gi7htWFnnlIzvqHMHoWeIsfXlNAjZInpAOJDA==" }, "hosted-git-info": { - "version": "2.8.8", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.8.tgz", - "integrity": "sha512-f/wzC2QaWBs7t9IYqB4T3sR1xviIViXJRJTWBlx2Gf3g0Xi5vI7Yy4koXQ1c9OYDGHN9sBy1DQ2AB8fqZBWhUg==", + "version": "2.8.9", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", + "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==", "dev": true }, "http-errors": { From 8786542ad68f214f049a416b4dfad86eb6dae938 Mon Sep 17 00:00:00 2001 From: decaffeinate Date: Wed, 19 May 2021 17:24:22 +0100 Subject: [PATCH 543/555] decaffeinate: Rename settings.defaults.coffee from .coffee to .js --- .../config/{settings.defaults.coffee => settings.defaults.js} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename services/filestore/config/{settings.defaults.coffee => settings.defaults.js} (100%) diff --git a/services/filestore/config/settings.defaults.coffee b/services/filestore/config/settings.defaults.js similarity index 100% rename from services/filestore/config/settings.defaults.coffee rename to services/filestore/config/settings.defaults.js From 3acf18324049c79e174104503e1729226713c520 Mon Sep 17 00:00:00 2001 From: decaffeinate Date: Wed, 19 May 2021 17:24:23 +0100 Subject: [PATCH 544/555] decaffeinate: Convert settings.defaults.coffee to JS --- .../filestore/config/settings.defaults.js | 161 ++++++++++-------- 1 file changed, 92 insertions(+), 69 deletions(-) diff --git a/services/filestore/config/settings.defaults.js b/services/filestore/config/settings.defaults.js index a53a4cf840..dfa373bcb0 100644 --- a/services/filestore/config/settings.defaults.js +++ b/services/filestore/config/settings.defaults.js @@ -1,95 +1,118 @@ -Path = require "path" +/* + * decaffeinate suggestions: + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const Path = require("path"); -# environment variables renamed for consistency -# use AWS_ACCESS_KEY_ID-style going forward -if process.env['AWS_KEY'] && !process.env['AWS_ACCESS_KEY_ID'] - process.env['AWS_ACCESS_KEY_ID'] = process.env['AWS_KEY'] -if process.env['AWS_SECRET'] && !process.env['AWS_SECRET_ACCESS_KEY'] - process.env['AWS_SECRET_ACCESS_KEY'] = process.env['AWS_SECRET'] +// environment variables renamed for consistency +// use AWS_ACCESS_KEY_ID-style going forward +if (process.env['AWS_KEY'] && !process.env['AWS_ACCESS_KEY_ID']) { + process.env['AWS_ACCESS_KEY_ID'] = process.env['AWS_KEY']; +} +if (process.env['AWS_SECRET'] && !process.env['AWS_SECRET_ACCESS_KEY']) { + process.env['AWS_SECRET_ACCESS_KEY'] = process.env['AWS_SECRET']; +} -# pre-backend setting, fall back to old behaviour -unless process.env['BACKEND']? - if process.env['AWS_ACCESS_KEY_ID']? or process.env['S3_BUCKET_CREDENTIALS']? - process.env['BACKEND'] = "s3" - process.env['USER_FILES_BUCKET_NAME'] = process.env['AWS_S3_USER_FILES_BUCKET_NAME'] - process.env['TEMPLATE_FILES_BUCKET_NAME'] = process.env['AWS_S3_TEMPLATE_FILES_BUCKET_NAME'] - process.env['PUBLIC_FILES_BUCKET_NAME'] = process.env['AWS_S3_PUBLIC_FILES_BUCKET_NAME'] - else - process.env['BACKEND'] = "fs" - process.env['USER_FILES_BUCKET_NAME'] = Path.resolve(__dirname + "/../user_files") - process.env['TEMPLATE_FILES_BUCKET_NAME'] = Path.resolve(__dirname + "/../template_files") - process.env['PUBLIC_FILES_BUCKET_NAME'] = Path.resolve(__dirname + "/../public_files") +// pre-backend setting, fall back to old behaviour +if (process.env['BACKEND'] == null) { + if ((process.env['AWS_ACCESS_KEY_ID'] != null) || (process.env['S3_BUCKET_CREDENTIALS'] != null)) { + process.env['BACKEND'] = "s3"; + process.env['USER_FILES_BUCKET_NAME'] = process.env['AWS_S3_USER_FILES_BUCKET_NAME']; + process.env['TEMPLATE_FILES_BUCKET_NAME'] = process.env['AWS_S3_TEMPLATE_FILES_BUCKET_NAME']; + process.env['PUBLIC_FILES_BUCKET_NAME'] = process.env['AWS_S3_PUBLIC_FILES_BUCKET_NAME']; + } else { + process.env['BACKEND'] = "fs"; + process.env['USER_FILES_BUCKET_NAME'] = Path.resolve(__dirname + "/../user_files"); + process.env['TEMPLATE_FILES_BUCKET_NAME'] = Path.resolve(__dirname + "/../template_files"); + process.env['PUBLIC_FILES_BUCKET_NAME'] = Path.resolve(__dirname + "/../public_files"); + } +} -settings = - internal: - filestore: - port: 3009 - host: process.env['LISTEN_ADDRESS'] or "localhost" +const settings = { + internal: { + filestore: { + port: 3009, + host: process.env['LISTEN_ADDRESS'] || "localhost" + } + }, - filestore: - # Which backend persistor to use. - # Choices are - # s3 - Amazon S3 - # fs - local filesystem - # gcs - Google Cloud Storage - backend: process.env['BACKEND'] + filestore: { + // Which backend persistor to use. + // Choices are + // s3 - Amazon S3 + // fs - local filesystem + // gcs - Google Cloud Storage + backend: process.env['BACKEND'], - gcs: + gcs: { endpoint: - if process.env['GCS_API_ENDPOINT'] - apiEndpoint: process.env['GCS_API_ENDPOINT'] - apiScheme: process.env['GCS_API_SCHEME'] + process.env['GCS_API_ENDPOINT'] ?{ + apiEndpoint: process.env['GCS_API_ENDPOINT'], + apiScheme: process.env['GCS_API_SCHEME'], projectId: process.env['GCS_PROJECT_ID'] - unlockBeforeDelete: process.env['GCS_UNLOCK_BEFORE_DELETE'] == "true" # unlock an event-based hold before deleting. default false - deletedBucketSuffix: process.env['GCS_DELETED_BUCKET_SUFFIX'] # if present, copy file to another bucket on delete. default null - deleteConcurrency: parseInt(process.env['GCS_DELETE_CONCURRENCY']) || 50 + } : undefined, + unlockBeforeDelete: process.env['GCS_UNLOCK_BEFORE_DELETE'] === "true", // unlock an event-based hold before deleting. default false + deletedBucketSuffix: process.env['GCS_DELETED_BUCKET_SUFFIX'], // if present, copy file to another bucket on delete. default null + deleteConcurrency: parseInt(process.env['GCS_DELETE_CONCURRENCY']) || 50, signedUrlExpiryInMs: parseInt(process.env['LINK_EXPIRY_TIMEOUT'] || 60000) + }, s3: - if process.env['AWS_ACCESS_KEY_ID']? or process.env['S3_BUCKET_CREDENTIALS']? - key: process.env['AWS_ACCESS_KEY_ID'] - secret: process.env['AWS_SECRET_ACCESS_KEY'] - endpoint: process.env['AWS_S3_ENDPOINT'] - pathStyle: process.env['AWS_S3_PATH_STYLE'] - partSize: process.env['AWS_S3_PARTSIZE'] or (100 * 1024 * 1024) - bucketCreds: JSON.parse process.env['S3_BUCKET_CREDENTIALS'] if process.env['S3_BUCKET_CREDENTIALS']? + (process.env['AWS_ACCESS_KEY_ID'] != null) || (process.env['S3_BUCKET_CREDENTIALS'] != null) ?{ + key: process.env['AWS_ACCESS_KEY_ID'], + secret: process.env['AWS_SECRET_ACCESS_KEY'], + endpoint: process.env['AWS_S3_ENDPOINT'], + pathStyle: process.env['AWS_S3_PATH_STYLE'], + partSize: process.env['AWS_S3_PARTSIZE'] || (100 * 1024 * 1024), + bucketCreds: ((process.env['S3_BUCKET_CREDENTIALS'] != null) ? JSON.parse(process.env['S3_BUCKET_CREDENTIALS']) : undefined) + } : undefined, - # GCS should be configured by the service account on the kubernetes pod. See GOOGLE_APPLICATION_CREDENTIALS, - # which will be picked up automatically. + // GCS should be configured by the service account on the kubernetes pod. See GOOGLE_APPLICATION_CREDENTIALS, + // which will be picked up automatically. - stores: - user_files: process.env['USER_FILES_BUCKET_NAME'] - template_files: process.env['TEMPLATE_FILES_BUCKET_NAME'] + stores: { + user_files: process.env['USER_FILES_BUCKET_NAME'], + template_files: process.env['TEMPLATE_FILES_BUCKET_NAME'], public_files: process.env['PUBLIC_FILES_BUCKET_NAME'] + }, fallback: - if process.env['FALLBACK_BACKEND']? - backend: process.env['FALLBACK_BACKEND'] - # mapping of bucket names on the fallback, to bucket names on the primary. - # e.g. { myS3UserFilesBucketName: 'myGoogleUserFilesBucketName' } - buckets: JSON.parse(process.env['FALLBACK_BUCKET_MAPPING'] || '{}') - copyOnMiss: process.env['COPY_ON_MISS'] == 'true' + (process.env['FALLBACK_BACKEND'] != null) ?{ + backend: process.env['FALLBACK_BACKEND'], + // mapping of bucket names on the fallback, to bucket names on the primary. + // e.g. { myS3UserFilesBucketName: 'myGoogleUserFilesBucketName' } + buckets: JSON.parse(process.env['FALLBACK_BUCKET_MAPPING'] || '{}'), + copyOnMiss: process.env['COPY_ON_MISS'] === 'true' + } : undefined, - allowRedirects: if process.env['ALLOW_REDIRECTS'] == 'true' then true else false + allowRedirects: process.env['ALLOW_REDIRECTS'] === 'true' ? true : false + }, - path: + path: { uploadFolder: Path.resolve(__dirname + "/../uploads") + }, - commands: - # Any commands to wrap the convert utility in, for example ["nice"], or ["firejail", "--profile=/etc/firejail/convert.profile"] + commands: { + // Any commands to wrap the convert utility in, for example ["nice"], or ["firejail", "--profile=/etc/firejail/convert.profile"] convertCommandPrefix: [] + }, - enableConversions: if process.env['ENABLE_CONVERSIONS'] == 'true' then true else false + enableConversions: process.env['ENABLE_CONVERSIONS'] === 'true' ? true : false, - sentry: + sentry: { dsn: process.env.SENTRY_DSN + } +}; -# Filestore health check -# ---------------------- -# Project and file details to check in persistor when calling /health_check -if process.env['HEALTH_CHECK_PROJECT_ID']? and process.env['HEALTH_CHECK_FILE_ID']? - settings.health_check = - project_id: process.env['HEALTH_CHECK_PROJECT_ID'] +// Filestore health check +// ---------------------- +// Project and file details to check in persistor when calling /health_check +if ((process.env['HEALTH_CHECK_PROJECT_ID'] != null) && (process.env['HEALTH_CHECK_FILE_ID'] != null)) { + settings.health_check = { + project_id: process.env['HEALTH_CHECK_PROJECT_ID'], file_id: process.env['HEALTH_CHECK_FILE_ID'] + }; +} -module.exports = settings +module.exports = settings; From 0b628280411b8772e9bde153574ea16451f79de3 Mon Sep 17 00:00:00 2001 From: decaffeinate Date: Wed, 19 May 2021 17:24:23 +0100 Subject: [PATCH 545/555] decaffeinate: Run post-processing cleanups on settings.defaults.coffee --- .../filestore/config/settings.defaults.js | 91 ++++++++++--------- 1 file changed, 48 insertions(+), 43 deletions(-) diff --git a/services/filestore/config/settings.defaults.js b/services/filestore/config/settings.defaults.js index dfa373bcb0..2327f50ea9 100644 --- a/services/filestore/config/settings.defaults.js +++ b/services/filestore/config/settings.defaults.js @@ -1,3 +1,8 @@ +/* eslint-disable + no-path-concat, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS207: Consider shorter variations of null checks @@ -7,25 +12,25 @@ const Path = require("path"); // environment variables renamed for consistency // use AWS_ACCESS_KEY_ID-style going forward -if (process.env['AWS_KEY'] && !process.env['AWS_ACCESS_KEY_ID']) { - process.env['AWS_ACCESS_KEY_ID'] = process.env['AWS_KEY']; +if (process.env.AWS_KEY && !process.env.AWS_ACCESS_KEY_ID) { + process.env.AWS_ACCESS_KEY_ID = process.env.AWS_KEY; } -if (process.env['AWS_SECRET'] && !process.env['AWS_SECRET_ACCESS_KEY']) { - process.env['AWS_SECRET_ACCESS_KEY'] = process.env['AWS_SECRET']; +if (process.env.AWS_SECRET && !process.env.AWS_SECRET_ACCESS_KEY) { + process.env.AWS_SECRET_ACCESS_KEY = process.env.AWS_SECRET; } // pre-backend setting, fall back to old behaviour -if (process.env['BACKEND'] == null) { - if ((process.env['AWS_ACCESS_KEY_ID'] != null) || (process.env['S3_BUCKET_CREDENTIALS'] != null)) { - process.env['BACKEND'] = "s3"; - process.env['USER_FILES_BUCKET_NAME'] = process.env['AWS_S3_USER_FILES_BUCKET_NAME']; - process.env['TEMPLATE_FILES_BUCKET_NAME'] = process.env['AWS_S3_TEMPLATE_FILES_BUCKET_NAME']; - process.env['PUBLIC_FILES_BUCKET_NAME'] = process.env['AWS_S3_PUBLIC_FILES_BUCKET_NAME']; +if (process.env.BACKEND == null) { + if ((process.env.AWS_ACCESS_KEY_ID != null) || (process.env.S3_BUCKET_CREDENTIALS != null)) { + process.env.BACKEND = "s3"; + process.env.USER_FILES_BUCKET_NAME = process.env.AWS_S3_USER_FILES_BUCKET_NAME; + process.env.TEMPLATE_FILES_BUCKET_NAME = process.env.AWS_S3_TEMPLATE_FILES_BUCKET_NAME; + process.env.PUBLIC_FILES_BUCKET_NAME = process.env.AWS_S3_PUBLIC_FILES_BUCKET_NAME; } else { - process.env['BACKEND'] = "fs"; - process.env['USER_FILES_BUCKET_NAME'] = Path.resolve(__dirname + "/../user_files"); - process.env['TEMPLATE_FILES_BUCKET_NAME'] = Path.resolve(__dirname + "/../template_files"); - process.env['PUBLIC_FILES_BUCKET_NAME'] = Path.resolve(__dirname + "/../public_files"); + process.env.BACKEND = "fs"; + process.env.USER_FILES_BUCKET_NAME = Path.resolve(__dirname + "/../user_files"); + process.env.TEMPLATE_FILES_BUCKET_NAME = Path.resolve(__dirname + "/../template_files"); + process.env.PUBLIC_FILES_BUCKET_NAME = Path.resolve(__dirname + "/../public_files"); } } @@ -33,7 +38,7 @@ const settings = { internal: { filestore: { port: 3009, - host: process.env['LISTEN_ADDRESS'] || "localhost" + host: process.env.LISTEN_ADDRESS || "localhost" } }, @@ -43,50 +48,50 @@ const settings = { // s3 - Amazon S3 // fs - local filesystem // gcs - Google Cloud Storage - backend: process.env['BACKEND'], + backend: process.env.BACKEND, gcs: { endpoint: - process.env['GCS_API_ENDPOINT'] ?{ - apiEndpoint: process.env['GCS_API_ENDPOINT'], - apiScheme: process.env['GCS_API_SCHEME'], - projectId: process.env['GCS_PROJECT_ID'] + process.env.GCS_API_ENDPOINT ?{ + apiEndpoint: process.env.GCS_API_ENDPOINT, + apiScheme: process.env.GCS_API_SCHEME, + projectId: process.env.GCS_PROJECT_ID } : undefined, - unlockBeforeDelete: process.env['GCS_UNLOCK_BEFORE_DELETE'] === "true", // unlock an event-based hold before deleting. default false - deletedBucketSuffix: process.env['GCS_DELETED_BUCKET_SUFFIX'], // if present, copy file to another bucket on delete. default null - deleteConcurrency: parseInt(process.env['GCS_DELETE_CONCURRENCY']) || 50, - signedUrlExpiryInMs: parseInt(process.env['LINK_EXPIRY_TIMEOUT'] || 60000) + unlockBeforeDelete: process.env.GCS_UNLOCK_BEFORE_DELETE === "true", // unlock an event-based hold before deleting. default false + deletedBucketSuffix: process.env.GCS_DELETED_BUCKET_SUFFIX, // if present, copy file to another bucket on delete. default null + deleteConcurrency: parseInt(process.env.GCS_DELETE_CONCURRENCY) || 50, + signedUrlExpiryInMs: parseInt(process.env.LINK_EXPIRY_TIMEOUT || 60000) }, s3: - (process.env['AWS_ACCESS_KEY_ID'] != null) || (process.env['S3_BUCKET_CREDENTIALS'] != null) ?{ - key: process.env['AWS_ACCESS_KEY_ID'], - secret: process.env['AWS_SECRET_ACCESS_KEY'], - endpoint: process.env['AWS_S3_ENDPOINT'], - pathStyle: process.env['AWS_S3_PATH_STYLE'], - partSize: process.env['AWS_S3_PARTSIZE'] || (100 * 1024 * 1024), - bucketCreds: ((process.env['S3_BUCKET_CREDENTIALS'] != null) ? JSON.parse(process.env['S3_BUCKET_CREDENTIALS']) : undefined) + (process.env.AWS_ACCESS_KEY_ID != null) || (process.env.S3_BUCKET_CREDENTIALS != null) ?{ + key: process.env.AWS_ACCESS_KEY_ID, + secret: process.env.AWS_SECRET_ACCESS_KEY, + endpoint: process.env.AWS_S3_ENDPOINT, + pathStyle: process.env.AWS_S3_PATH_STYLE, + partSize: process.env.AWS_S3_PARTSIZE || (100 * 1024 * 1024), + bucketCreds: ((process.env.S3_BUCKET_CREDENTIALS != null) ? JSON.parse(process.env.S3_BUCKET_CREDENTIALS) : undefined) } : undefined, // GCS should be configured by the service account on the kubernetes pod. See GOOGLE_APPLICATION_CREDENTIALS, // which will be picked up automatically. stores: { - user_files: process.env['USER_FILES_BUCKET_NAME'], - template_files: process.env['TEMPLATE_FILES_BUCKET_NAME'], - public_files: process.env['PUBLIC_FILES_BUCKET_NAME'] + user_files: process.env.USER_FILES_BUCKET_NAME, + template_files: process.env.TEMPLATE_FILES_BUCKET_NAME, + public_files: process.env.PUBLIC_FILES_BUCKET_NAME }, fallback: - (process.env['FALLBACK_BACKEND'] != null) ?{ - backend: process.env['FALLBACK_BACKEND'], + (process.env.FALLBACK_BACKEND != null) ?{ + backend: process.env.FALLBACK_BACKEND, // mapping of bucket names on the fallback, to bucket names on the primary. // e.g. { myS3UserFilesBucketName: 'myGoogleUserFilesBucketName' } - buckets: JSON.parse(process.env['FALLBACK_BUCKET_MAPPING'] || '{}'), - copyOnMiss: process.env['COPY_ON_MISS'] === 'true' + buckets: JSON.parse(process.env.FALLBACK_BUCKET_MAPPING || '{}'), + copyOnMiss: process.env.COPY_ON_MISS === 'true' } : undefined, - allowRedirects: process.env['ALLOW_REDIRECTS'] === 'true' ? true : false + allowRedirects: process.env.ALLOW_REDIRECTS === 'true' }, path: { @@ -98,7 +103,7 @@ const settings = { convertCommandPrefix: [] }, - enableConversions: process.env['ENABLE_CONVERSIONS'] === 'true' ? true : false, + enableConversions: process.env.ENABLE_CONVERSIONS === 'true', sentry: { dsn: process.env.SENTRY_DSN @@ -108,10 +113,10 @@ const settings = { // Filestore health check // ---------------------- // Project and file details to check in persistor when calling /health_check -if ((process.env['HEALTH_CHECK_PROJECT_ID'] != null) && (process.env['HEALTH_CHECK_FILE_ID'] != null)) { +if ((process.env.HEALTH_CHECK_PROJECT_ID != null) && (process.env.HEALTH_CHECK_FILE_ID != null)) { settings.health_check = { - project_id: process.env['HEALTH_CHECK_PROJECT_ID'], - file_id: process.env['HEALTH_CHECK_FILE_ID'] + project_id: process.env.HEALTH_CHECK_PROJECT_ID, + file_id: process.env.HEALTH_CHECK_FILE_ID }; } From b5b2bb3d22a2838077acff2f50b285c3835e8279 Mon Sep 17 00:00:00 2001 From: Alf Eaton Date: Tue, 25 May 2021 15:53:36 +0100 Subject: [PATCH 546/555] Run format:fix --- .../filestore/config/settings.defaults.js | 190 ++++++++++-------- 1 file changed, 107 insertions(+), 83 deletions(-) diff --git a/services/filestore/config/settings.defaults.js b/services/filestore/config/settings.defaults.js index 2327f50ea9..1d84cf529a 100644 --- a/services/filestore/config/settings.defaults.js +++ b/services/filestore/config/settings.defaults.js @@ -8,116 +8,140 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const Path = require("path"); +const Path = require('path') // environment variables renamed for consistency // use AWS_ACCESS_KEY_ID-style going forward if (process.env.AWS_KEY && !process.env.AWS_ACCESS_KEY_ID) { - process.env.AWS_ACCESS_KEY_ID = process.env.AWS_KEY; + process.env.AWS_ACCESS_KEY_ID = process.env.AWS_KEY } if (process.env.AWS_SECRET && !process.env.AWS_SECRET_ACCESS_KEY) { - process.env.AWS_SECRET_ACCESS_KEY = process.env.AWS_SECRET; + process.env.AWS_SECRET_ACCESS_KEY = process.env.AWS_SECRET } // pre-backend setting, fall back to old behaviour if (process.env.BACKEND == null) { - if ((process.env.AWS_ACCESS_KEY_ID != null) || (process.env.S3_BUCKET_CREDENTIALS != null)) { - process.env.BACKEND = "s3"; - process.env.USER_FILES_BUCKET_NAME = process.env.AWS_S3_USER_FILES_BUCKET_NAME; - process.env.TEMPLATE_FILES_BUCKET_NAME = process.env.AWS_S3_TEMPLATE_FILES_BUCKET_NAME; - process.env.PUBLIC_FILES_BUCKET_NAME = process.env.AWS_S3_PUBLIC_FILES_BUCKET_NAME; - } else { - process.env.BACKEND = "fs"; - process.env.USER_FILES_BUCKET_NAME = Path.resolve(__dirname + "/../user_files"); - process.env.TEMPLATE_FILES_BUCKET_NAME = Path.resolve(__dirname + "/../template_files"); - process.env.PUBLIC_FILES_BUCKET_NAME = Path.resolve(__dirname + "/../public_files"); - } + if ( + process.env.AWS_ACCESS_KEY_ID != null || + process.env.S3_BUCKET_CREDENTIALS != null + ) { + process.env.BACKEND = 's3' + process.env.USER_FILES_BUCKET_NAME = + process.env.AWS_S3_USER_FILES_BUCKET_NAME + process.env.TEMPLATE_FILES_BUCKET_NAME = + process.env.AWS_S3_TEMPLATE_FILES_BUCKET_NAME + process.env.PUBLIC_FILES_BUCKET_NAME = + process.env.AWS_S3_PUBLIC_FILES_BUCKET_NAME + } else { + process.env.BACKEND = 'fs' + process.env.USER_FILES_BUCKET_NAME = Path.resolve( + __dirname + '/../user_files' + ) + process.env.TEMPLATE_FILES_BUCKET_NAME = Path.resolve( + __dirname + '/../template_files' + ) + process.env.PUBLIC_FILES_BUCKET_NAME = Path.resolve( + __dirname + '/../public_files' + ) + } } const settings = { - internal: { - filestore: { - port: 3009, - host: process.env.LISTEN_ADDRESS || "localhost" - } - }, + internal: { + filestore: { + port: 3009, + host: process.env.LISTEN_ADDRESS || 'localhost' + } + }, - filestore: { - // Which backend persistor to use. - // Choices are - // s3 - Amazon S3 - // fs - local filesystem - // gcs - Google Cloud Storage - backend: process.env.BACKEND, + filestore: { + // Which backend persistor to use. + // Choices are + // s3 - Amazon S3 + // fs - local filesystem + // gcs - Google Cloud Storage + backend: process.env.BACKEND, - gcs: { - endpoint: - process.env.GCS_API_ENDPOINT ?{ - apiEndpoint: process.env.GCS_API_ENDPOINT, - apiScheme: process.env.GCS_API_SCHEME, - projectId: process.env.GCS_PROJECT_ID - } : undefined, - unlockBeforeDelete: process.env.GCS_UNLOCK_BEFORE_DELETE === "true", // unlock an event-based hold before deleting. default false - deletedBucketSuffix: process.env.GCS_DELETED_BUCKET_SUFFIX, // if present, copy file to another bucket on delete. default null - deleteConcurrency: parseInt(process.env.GCS_DELETE_CONCURRENCY) || 50, - signedUrlExpiryInMs: parseInt(process.env.LINK_EXPIRY_TIMEOUT || 60000) - }, + gcs: { + endpoint: process.env.GCS_API_ENDPOINT + ? { + apiEndpoint: process.env.GCS_API_ENDPOINT, + apiScheme: process.env.GCS_API_SCHEME, + projectId: process.env.GCS_PROJECT_ID + } + : undefined, + unlockBeforeDelete: process.env.GCS_UNLOCK_BEFORE_DELETE === 'true', // unlock an event-based hold before deleting. default false + deletedBucketSuffix: process.env.GCS_DELETED_BUCKET_SUFFIX, // if present, copy file to another bucket on delete. default null + deleteConcurrency: parseInt(process.env.GCS_DELETE_CONCURRENCY) || 50, + signedUrlExpiryInMs: parseInt(process.env.LINK_EXPIRY_TIMEOUT || 60000) + }, - s3: - (process.env.AWS_ACCESS_KEY_ID != null) || (process.env.S3_BUCKET_CREDENTIALS != null) ?{ - key: process.env.AWS_ACCESS_KEY_ID, - secret: process.env.AWS_SECRET_ACCESS_KEY, - endpoint: process.env.AWS_S3_ENDPOINT, - pathStyle: process.env.AWS_S3_PATH_STYLE, - partSize: process.env.AWS_S3_PARTSIZE || (100 * 1024 * 1024), - bucketCreds: ((process.env.S3_BUCKET_CREDENTIALS != null) ? JSON.parse(process.env.S3_BUCKET_CREDENTIALS) : undefined) - } : undefined, + s3: + process.env.AWS_ACCESS_KEY_ID != null || + process.env.S3_BUCKET_CREDENTIALS != null + ? { + key: process.env.AWS_ACCESS_KEY_ID, + secret: process.env.AWS_SECRET_ACCESS_KEY, + endpoint: process.env.AWS_S3_ENDPOINT, + pathStyle: process.env.AWS_S3_PATH_STYLE, + partSize: process.env.AWS_S3_PARTSIZE || 100 * 1024 * 1024, + bucketCreds: + process.env.S3_BUCKET_CREDENTIALS != null + ? JSON.parse(process.env.S3_BUCKET_CREDENTIALS) + : undefined + } + : undefined, - // GCS should be configured by the service account on the kubernetes pod. See GOOGLE_APPLICATION_CREDENTIALS, - // which will be picked up automatically. + // GCS should be configured by the service account on the kubernetes pod. See GOOGLE_APPLICATION_CREDENTIALS, + // which will be picked up automatically. - stores: { - user_files: process.env.USER_FILES_BUCKET_NAME, - template_files: process.env.TEMPLATE_FILES_BUCKET_NAME, - public_files: process.env.PUBLIC_FILES_BUCKET_NAME - }, + stores: { + user_files: process.env.USER_FILES_BUCKET_NAME, + template_files: process.env.TEMPLATE_FILES_BUCKET_NAME, + public_files: process.env.PUBLIC_FILES_BUCKET_NAME + }, - fallback: - (process.env.FALLBACK_BACKEND != null) ?{ - backend: process.env.FALLBACK_BACKEND, - // mapping of bucket names on the fallback, to bucket names on the primary. - // e.g. { myS3UserFilesBucketName: 'myGoogleUserFilesBucketName' } - buckets: JSON.parse(process.env.FALLBACK_BUCKET_MAPPING || '{}'), - copyOnMiss: process.env.COPY_ON_MISS === 'true' - } : undefined, + fallback: + process.env.FALLBACK_BACKEND != null + ? { + backend: process.env.FALLBACK_BACKEND, + // mapping of bucket names on the fallback, to bucket names on the primary. + // e.g. { myS3UserFilesBucketName: 'myGoogleUserFilesBucketName' } + buckets: JSON.parse(process.env.FALLBACK_BUCKET_MAPPING || '{}'), + copyOnMiss: process.env.COPY_ON_MISS === 'true' + } + : undefined, - allowRedirects: process.env.ALLOW_REDIRECTS === 'true' - }, + allowRedirects: process.env.ALLOW_REDIRECTS === 'true' + }, - path: { - uploadFolder: Path.resolve(__dirname + "/../uploads") - }, + path: { + uploadFolder: Path.resolve(__dirname + '/../uploads') + }, - commands: { - // Any commands to wrap the convert utility in, for example ["nice"], or ["firejail", "--profile=/etc/firejail/convert.profile"] - convertCommandPrefix: [] - }, + commands: { + // Any commands to wrap the convert utility in, for example ["nice"], or ["firejail", "--profile=/etc/firejail/convert.profile"] + convertCommandPrefix: [] + }, - enableConversions: process.env.ENABLE_CONVERSIONS === 'true', + enableConversions: process.env.ENABLE_CONVERSIONS === 'true', - sentry: { - dsn: process.env.SENTRY_DSN - } -}; + sentry: { + dsn: process.env.SENTRY_DSN + } +} // Filestore health check // ---------------------- // Project and file details to check in persistor when calling /health_check -if ((process.env.HEALTH_CHECK_PROJECT_ID != null) && (process.env.HEALTH_CHECK_FILE_ID != null)) { - settings.health_check = { - project_id: process.env.HEALTH_CHECK_PROJECT_ID, - file_id: process.env.HEALTH_CHECK_FILE_ID - }; +if ( + process.env.HEALTH_CHECK_PROJECT_ID != null && + process.env.HEALTH_CHECK_FILE_ID != null +) { + settings.health_check = { + project_id: process.env.HEALTH_CHECK_PROJECT_ID, + file_id: process.env.HEALTH_CHECK_FILE_ID + } } -module.exports = settings; +module.exports = settings From 0649773c152daca7e2ee522c9f06de6ea7ce3a00 Mon Sep 17 00:00:00 2001 From: Alf Eaton Date: Tue, 25 May 2021 15:54:50 +0100 Subject: [PATCH 547/555] Manual tidying --- .../filestore/config/settings.defaults.js | 53 +++++++------------ 1 file changed, 19 insertions(+), 34 deletions(-) diff --git a/services/filestore/config/settings.defaults.js b/services/filestore/config/settings.defaults.js index 1d84cf529a..d10e17ac3b 100644 --- a/services/filestore/config/settings.defaults.js +++ b/services/filestore/config/settings.defaults.js @@ -1,13 +1,3 @@ -/* eslint-disable - no-path-concat, -*/ -// TODO: This file was created by bulk-decaffeinate. -// Fix any style issues and re-enable lint. -/* - * decaffeinate suggestions: - * DS207: Consider shorter variations of null checks - * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md - */ const Path = require('path') // environment variables renamed for consistency @@ -21,10 +11,7 @@ if (process.env.AWS_SECRET && !process.env.AWS_SECRET_ACCESS_KEY) { // pre-backend setting, fall back to old behaviour if (process.env.BACKEND == null) { - if ( - process.env.AWS_ACCESS_KEY_ID != null || - process.env.S3_BUCKET_CREDENTIALS != null - ) { + if (process.env.AWS_ACCESS_KEY_ID || process.env.S3_BUCKET_CREDENTIALS) { process.env.BACKEND = 's3' process.env.USER_FILES_BUCKET_NAME = process.env.AWS_S3_USER_FILES_BUCKET_NAME @@ -35,12 +22,15 @@ if (process.env.BACKEND == null) { } else { process.env.BACKEND = 'fs' process.env.USER_FILES_BUCKET_NAME = Path.resolve( + // eslint-disable-next-line no-path-concat __dirname + '/../user_files' ) process.env.TEMPLATE_FILES_BUCKET_NAME = Path.resolve( + // eslint-disable-next-line no-path-concat __dirname + '/../template_files' ) process.env.PUBLIC_FILES_BUCKET_NAME = Path.resolve( + // eslint-disable-next-line no-path-concat __dirname + '/../public_files' ) } @@ -77,18 +67,16 @@ const settings = { }, s3: - process.env.AWS_ACCESS_KEY_ID != null || - process.env.S3_BUCKET_CREDENTIALS != null + process.env.AWS_ACCESS_KEY_ID || process.env.S3_BUCKET_CREDENTIALS ? { key: process.env.AWS_ACCESS_KEY_ID, secret: process.env.AWS_SECRET_ACCESS_KEY, endpoint: process.env.AWS_S3_ENDPOINT, pathStyle: process.env.AWS_S3_PATH_STYLE, partSize: process.env.AWS_S3_PARTSIZE || 100 * 1024 * 1024, - bucketCreds: - process.env.S3_BUCKET_CREDENTIALS != null - ? JSON.parse(process.env.S3_BUCKET_CREDENTIALS) - : undefined + bucketCreds: process.env.S3_BUCKET_CREDENTIALS + ? JSON.parse(process.env.S3_BUCKET_CREDENTIALS) + : undefined } : undefined, @@ -101,21 +89,21 @@ const settings = { public_files: process.env.PUBLIC_FILES_BUCKET_NAME }, - fallback: - process.env.FALLBACK_BACKEND != null - ? { - backend: process.env.FALLBACK_BACKEND, - // mapping of bucket names on the fallback, to bucket names on the primary. - // e.g. { myS3UserFilesBucketName: 'myGoogleUserFilesBucketName' } - buckets: JSON.parse(process.env.FALLBACK_BUCKET_MAPPING || '{}'), - copyOnMiss: process.env.COPY_ON_MISS === 'true' - } - : undefined, + fallback: process.env.FALLBACK_BACKEND + ? { + backend: process.env.FALLBACK_BACKEND, + // mapping of bucket names on the fallback, to bucket names on the primary. + // e.g. { myS3UserFilesBucketName: 'myGoogleUserFilesBucketName' } + buckets: JSON.parse(process.env.FALLBACK_BUCKET_MAPPING || '{}'), + copyOnMiss: process.env.COPY_ON_MISS === 'true' + } + : undefined, allowRedirects: process.env.ALLOW_REDIRECTS === 'true' }, path: { + // eslint-disable-next-line no-path-concat uploadFolder: Path.resolve(__dirname + '/../uploads') }, @@ -134,10 +122,7 @@ const settings = { // Filestore health check // ---------------------- // Project and file details to check in persistor when calling /health_check -if ( - process.env.HEALTH_CHECK_PROJECT_ID != null && - process.env.HEALTH_CHECK_FILE_ID != null -) { +if (process.env.HEALTH_CHECK_PROJECT_ID && process.env.HEALTH_CHECK_FILE_ID) { settings.health_check = { project_id: process.env.HEALTH_CHECK_PROJECT_ID, file_id: process.env.HEALTH_CHECK_FILE_ID From 0e0841e789461a003c9e89d5a746caa5b616d328 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 11 Jun 2021 02:20:30 +0000 Subject: [PATCH 548/555] Bump glob-parent from 5.1.0 to 5.1.2 Bumps [glob-parent](https://github.com/gulpjs/glob-parent) from 5.1.0 to 5.1.2. - [Release notes](https://github.com/gulpjs/glob-parent/releases) - [Changelog](https://github.com/gulpjs/glob-parent/blob/main/CHANGELOG.md) - [Commits](https://github.com/gulpjs/glob-parent/compare/v5.1.0...v5.1.2) --- updated-dependencies: - dependency-name: glob-parent dependency-type: indirect ... Signed-off-by: dependabot[bot] --- services/filestore/package-lock.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index f27ecdf63b..748cdd120e 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -3197,9 +3197,9 @@ } }, "glob-parent": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.0.tgz", - "integrity": "sha512-qjtRgnIVmOfnKUE3NJAQEdk+lKrxfw8t5ke7SXtfMTHcjsBfOfWXCQfdb30zfDoZQ2IRSIiidmjtbHZPZ++Ihw==", + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", "dev": true, "requires": { "is-glob": "^4.0.1" From 8d6f169c66ddb0cf00477fedeeacf928fb1f68ef Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Mon, 12 Jul 2021 17:35:49 +0100 Subject: [PATCH 549/555] [misc] install bunyan as production dependency ``` Error: Cannot find module 'bunyan' Require stack: - .../node_modules/@google-cloud/logging-bunyan/build/src/middleware/express.js - .../node_modules/@google-cloud/logging-bunyan/build/src/index.js - .../node_modules/logger-sharelatex/logging-manager.js - .../node_modules/logger-sharelatex/index.js - .../app.js ``` --- services/filestore/package-lock.json | 20 ++++++++++---------- services/filestore/package.json | 2 +- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index f27ecdf63b..32c09f8489 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -1740,9 +1740,9 @@ "integrity": "sha512-lGzLKcioL90C7wMczpkY0n/oART3MbBa8R9OFGE1rJxoVI86u4WAGfEk8Wjv10eKSyTHVGkSo3bvBylCEtk7LA==" }, "bunyan": { - "version": "1.8.14", - "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.14.tgz", - "integrity": "sha512-LlahJUxXzZLuw/hetUQJmRgZ1LF6+cr5TPpRj6jf327AsiIq2jhYEH4oqUUkVKTor+9w2BT3oxVwhzE5lw9tcg==", + "version": "1.8.15", + "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.15.tgz", + "integrity": "sha512-0tECWShh6wUysgucJcBAoYegf3JJoZWibxdqhTm7OHPeT42qdjkZ29QCMcKwbgU1kiH+auSIasNRXMLWXafXig==", "requires": { "dtrace-provider": "~0.8", "moment": "^2.19.3", @@ -4365,9 +4365,9 @@ "integrity": "sha1-EUyUlnPiqKNenTV4hSeqN7Z52is=" }, "moment": { - "version": "2.24.0", - "resolved": "https://registry.npmjs.org/moment/-/moment-2.24.0.tgz", - "integrity": "sha512-bV7f+6l2QigeBBZSM/6yTNq4P2fNpSWj/0e7jQcy87A8e7o2nAfP/34/2ky5Vw4B9S446EtIhodAzkFCcR4dQg==", + "version": "2.29.1", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.29.1.tgz", + "integrity": "sha512-kHmoybcPV8Sqy59DwNDY3Jefr64lK/by/da0ViFcuA4DH0vQg5Q6Ze5VimxkfQNSC+Mls/Kx53s7TjP1RhFEDQ==", "optional": true }, "mongodb": { @@ -4398,7 +4398,7 @@ "mv": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/mv/-/mv-2.1.1.tgz", - "integrity": "sha512-at/ZndSy3xEGJ8i0ygALh8ru9qy7gWW1cmkaqBN29JmMlIvM//MEO9y1sk/avxuwnPcfhkejkLsuPxH81BrkSg==", + "integrity": "sha1-rmzg1vbV4KT32JN5jQPB6pVZtqI=", "optional": true, "requires": { "mkdirp": "~0.5.1", @@ -4409,7 +4409,7 @@ "glob": { "version": "6.0.4", "resolved": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz", - "integrity": "sha512-MKZeRNyYZAVVVG1oZeLaWie1uweH40m9AZwIwxyPbTSX4hHrVYSzLg0Ro5Z5R7XKkIX+Cc6oD1rqeDJnwsB8/A==", + "integrity": "sha1-DwiGD2oVUSey+t1PnOJLGqtuTSI=", "optional": true, "requires": { "inflight": "^1.0.4", @@ -4422,7 +4422,7 @@ "rimraf": { "version": "2.4.5", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.5.tgz", - "integrity": "sha512-J5xnxTyqaiw06JjMftq7L9ouA448dw/E7dKghkP9WpKNuwmARNNg+Gk8/u5ryb9N/Yo2+z3MCwuqFK/+qPOPfQ==", + "integrity": "sha1-7nEM5dk6j9uFb7Xqj/Di11k0sto=", "optional": true, "requires": { "glob": "^6.0.1" @@ -4444,7 +4444,7 @@ "ncp": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ncp/-/ncp-2.0.0.tgz", - "integrity": "sha512-zIdGUrPRFTUELUvr3Gmc7KZ2Sw/h1PiVM0Af/oHB6zgnV1ikqSfRk+TOufi79aHYCW3NiOXmr1BP5nWbzojLaA==", + "integrity": "sha1-GVoh1sRuNh0vsSgbo4uR6d9727M=", "optional": true }, "needle": { diff --git a/services/filestore/package.json b/services/filestore/package.json index 66a7336c82..89ab38c935 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -24,6 +24,7 @@ "@overleaf/o-error": "^3.0.0", "@overleaf/object-persistor": "https://github.com/overleaf/object-persistor/archive/8fbc9ed03206bfb54368578d22b7ac4f285baa25.tar.gz", "body-parser": "^1.19.0", + "bunyan": "^1.8.15", "express": "^4.17.1", "fast-crc32c": "^2.0.0", "glob": "^7.1.6", @@ -41,7 +42,6 @@ "@google-cloud/storage": "^5.1.2", "aws-sdk": "^2.718.0", "babel-eslint": "^10.1.0", - "bunyan": "^1.8.14", "chai": "4.2.0", "chai-as-promised": "^7.1.1", "disrequire": "^1.1.0", From 3152bf361d3f59308c9abb62a498b4cd80f74c32 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Mon, 12 Jul 2021 17:47:19 +0100 Subject: [PATCH 550/555] [misc] switch from settings-sharelatex to @overleaf/settings --- services/filestore/app.js | 2 +- services/filestore/app/js/FileConverter.js | 2 +- services/filestore/app/js/FileHandler.js | 2 +- .../filestore/app/js/HealthCheckController.js | 2 +- services/filestore/app/js/KeyBuilder.js | 2 +- services/filestore/app/js/LocalFileWriter.js | 2 +- services/filestore/app/js/PersistorManager.js | 2 +- services/filestore/app/js/SafeExec.js | 2 +- services/filestore/package-lock.json | 18 +++++------------- services/filestore/package.json | 2 +- .../test/acceptance/js/FilestoreApp.js | 2 +- .../test/acceptance/js/FilestoreTests.js | 2 +- .../test/unit/js/FileControllerTests.js | 2 +- .../test/unit/js/FileConverterTests.js | 2 +- .../filestore/test/unit/js/FileHandlerTests.js | 2 +- .../filestore/test/unit/js/KeybuilderTests.js | 2 +- .../test/unit/js/LocalFileWriterTests.js | 2 +- .../filestore/test/unit/js/SafeExecTests.js | 2 +- .../filestore/test/unit/js/SettingsTests.js | 2 +- 19 files changed, 23 insertions(+), 31 deletions(-) diff --git a/services/filestore/app.js b/services/filestore/app.js index 5f59b13bed..4a5f34fc5b 100644 --- a/services/filestore/app.js +++ b/services/filestore/app.js @@ -4,7 +4,7 @@ Metrics.initialize(process.env.METRICS_APP_NAME || 'filestore') const logger = require('logger-sharelatex') logger.initialize(process.env.METRICS_APP_NAME || 'filestore') -const settings = require('settings-sharelatex') +const settings = require('@overleaf/settings') const express = require('express') const bodyParser = require('body-parser') diff --git a/services/filestore/app/js/FileConverter.js b/services/filestore/app/js/FileConverter.js index 18137f16b7..3a8549fd6c 100644 --- a/services/filestore/app/js/FileConverter.js +++ b/services/filestore/app/js/FileConverter.js @@ -1,5 +1,5 @@ const metrics = require('@overleaf/metrics') -const Settings = require('settings-sharelatex') +const Settings = require('@overleaf/settings') const { callbackify } = require('util') const safeExec = require('./SafeExec').promises diff --git a/services/filestore/app/js/FileHandler.js b/services/filestore/app/js/FileHandler.js index 7f476e3630..872ab65746 100644 --- a/services/filestore/app/js/FileHandler.js +++ b/services/filestore/app/js/FileHandler.js @@ -1,4 +1,4 @@ -const Settings = require('settings-sharelatex') +const Settings = require('@overleaf/settings') const { callbackify } = require('util') const fs = require('fs') const PersistorManager = require('./PersistorManager') diff --git a/services/filestore/app/js/HealthCheckController.js b/services/filestore/app/js/HealthCheckController.js index c71b567fe0..9f32f479fd 100644 --- a/services/filestore/app/js/HealthCheckController.js +++ b/services/filestore/app/js/HealthCheckController.js @@ -1,6 +1,6 @@ const fs = require('fs') const path = require('path') -const Settings = require('settings-sharelatex') +const Settings = require('@overleaf/settings') const streamBuffers = require('stream-buffers') const { promisify } = require('util') const Stream = require('stream') diff --git a/services/filestore/app/js/KeyBuilder.js b/services/filestore/app/js/KeyBuilder.js index 9968753349..b615e69743 100644 --- a/services/filestore/app/js/KeyBuilder.js +++ b/services/filestore/app/js/KeyBuilder.js @@ -1,4 +1,4 @@ -const settings = require('settings-sharelatex') +const settings = require('@overleaf/settings') module.exports = { getConvertedFolderKey, diff --git a/services/filestore/app/js/LocalFileWriter.js b/services/filestore/app/js/LocalFileWriter.js index 501856e2f1..9b0cc7b289 100644 --- a/services/filestore/app/js/LocalFileWriter.js +++ b/services/filestore/app/js/LocalFileWriter.js @@ -4,7 +4,7 @@ const path = require('path') const Stream = require('stream') const { callbackify, promisify } = require('util') const metrics = require('@overleaf/metrics') -const Settings = require('settings-sharelatex') +const Settings = require('@overleaf/settings') const { WriteError } = require('./Errors') module.exports = { diff --git a/services/filestore/app/js/PersistorManager.js b/services/filestore/app/js/PersistorManager.js index 1af996a7ab..3a57723be2 100644 --- a/services/filestore/app/js/PersistorManager.js +++ b/services/filestore/app/js/PersistorManager.js @@ -1,4 +1,4 @@ -const settings = require('settings-sharelatex') +const settings = require('@overleaf/settings') const persistorSettings = settings.filestore persistorSettings.Metrics = require('@overleaf/metrics') diff --git a/services/filestore/app/js/SafeExec.js b/services/filestore/app/js/SafeExec.js index 6bdae0ea34..655ecf2f5c 100644 --- a/services/filestore/app/js/SafeExec.js +++ b/services/filestore/app/js/SafeExec.js @@ -1,6 +1,6 @@ const lodashOnce = require('lodash.once') const childProcess = require('child_process') -const Settings = require('settings-sharelatex') +const Settings = require('@overleaf/settings') const { ConversionsDisabledError, FailedCommandError } = require('./Errors') // execute a command in the same way as 'exec' but with a timeout that diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index 32c09f8489..01303d74ba 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -1143,6 +1143,11 @@ "tiny-async-pool": "^1.1.0" } }, + "@overleaf/settings": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@overleaf/settings/-/settings-2.1.1.tgz", + "integrity": "sha512-vcJwqCGFKmQxTP/syUqCeMaSRjHmBcQgKOACR9He2uJcErg2GZPa1go+nGvszMbkElM4HfRKm/MfxvqHhoN4TQ==" + }, "@protobufjs/aspromise": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", @@ -1911,11 +1916,6 @@ "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=" }, - "coffee-script": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz", - "integrity": "sha512-Tx8itEfCsQp8RbLDFt7qwjqXycAx2g6SI7//4PPUR2j6meLmNifYm6zKrNDcU1+Q/GWRhjhEZk7DaLG1TfIzGA==" - }, "color-convert": { "version": "1.9.3", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", @@ -6095,14 +6095,6 @@ "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.1.tgz", "integrity": "sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw==" }, - "settings-sharelatex": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/settings-sharelatex/-/settings-sharelatex-1.1.0.tgz", - "integrity": "sha512-f7D+0lnlohoteSn6IKTH72NE+JnAdMWTKwQglAuimZWTID2FRRItZSGeYMTRpvEnaQApkoVwRp//WRMsiddnqw==", - "requires": { - "coffee-script": "1.6.0" - } - }, "shebang-command": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", diff --git a/services/filestore/package.json b/services/filestore/package.json index 89ab38c935..6cb9dab872 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -23,6 +23,7 @@ "@overleaf/metrics": "^3.5.1", "@overleaf/o-error": "^3.0.0", "@overleaf/object-persistor": "https://github.com/overleaf/object-persistor/archive/8fbc9ed03206bfb54368578d22b7ac4f285baa25.tar.gz", + "@overleaf/settings": "^2.1.1", "body-parser": "^1.19.0", "bunyan": "^1.8.15", "express": "^4.17.1", @@ -34,7 +35,6 @@ "range-parser": "^1.2.1", "request": "^2.88.2", "request-promise-native": "^1.0.8", - "settings-sharelatex": "^1.1.0", "stream-buffers": "~0.2.6", "tiny-async-pool": "^1.1.0" }, diff --git a/services/filestore/test/acceptance/js/FilestoreApp.js b/services/filestore/test/acceptance/js/FilestoreApp.js index acb6b71828..495edb2273 100644 --- a/services/filestore/test/acceptance/js/FilestoreApp.js +++ b/services/filestore/test/acceptance/js/FilestoreApp.js @@ -1,5 +1,5 @@ const logger = require('logger-sharelatex') -const Settings = require('settings-sharelatex') +const Settings = require('@overleaf/settings') const fs = require('fs') const Path = require('path') const { promisify } = require('util') diff --git a/services/filestore/test/acceptance/js/FilestoreTests.js b/services/filestore/test/acceptance/js/FilestoreTests.js index 7c7cb90e54..b7a2744ed3 100644 --- a/services/filestore/test/acceptance/js/FilestoreTests.js +++ b/services/filestore/test/acceptance/js/FilestoreTests.js @@ -1,7 +1,7 @@ const chai = require('chai') const { expect } = chai const fs = require('fs') -const Settings = require('settings-sharelatex') +const Settings = require('@overleaf/settings') const Path = require('path') const FilestoreApp = require('./FilestoreApp') const TestHelper = require('./TestHelper') diff --git a/services/filestore/test/unit/js/FileControllerTests.js b/services/filestore/test/unit/js/FileControllerTests.js index 6a5ae729c8..f731c8f212 100644 --- a/services/filestore/test/unit/js/FileControllerTests.js +++ b/services/filestore/test/unit/js/FileControllerTests.js @@ -58,7 +58,7 @@ describe('FileController', function () { './PersistorManager': PersistorManager, './Errors': Errors, stream: stream, - 'settings-sharelatex': settings, + '@overleaf/settings': settings, '@overleaf/metrics': { inc() {} } diff --git a/services/filestore/test/unit/js/FileConverterTests.js b/services/filestore/test/unit/js/FileConverterTests.js index b6f59348d5..923983a7ae 100644 --- a/services/filestore/test/unit/js/FileConverterTests.js +++ b/services/filestore/test/unit/js/FileConverterTests.js @@ -32,7 +32,7 @@ describe('FileConverter', function () { inc: sinon.stub(), Timer: sinon.stub().returns({ done: sinon.stub() }) }, - 'settings-sharelatex': Settings, + '@overleaf/settings': Settings, '@overleaf/object-persistor': ObjectPersistor } }) diff --git a/services/filestore/test/unit/js/FileHandlerTests.js b/services/filestore/test/unit/js/FileHandlerTests.js index 90bc06832e..23a0922c0e 100644 --- a/services/filestore/test/unit/js/FileHandlerTests.js +++ b/services/filestore/test/unit/js/FileHandlerTests.js @@ -81,7 +81,7 @@ describe('FileHandler', function () { './FileConverter': FileConverter, './KeyBuilder': KeyBuilder, './ImageOptimiser': ImageOptimiser, - 'settings-sharelatex': Settings, + '@overleaf/settings': Settings, '@overleaf/object-persistor': ObjectPersistor, fs: fs }, diff --git a/services/filestore/test/unit/js/KeybuilderTests.js b/services/filestore/test/unit/js/KeybuilderTests.js index d72e65df5b..883364fb22 100644 --- a/services/filestore/test/unit/js/KeybuilderTests.js +++ b/services/filestore/test/unit/js/KeybuilderTests.js @@ -8,7 +8,7 @@ describe('KeybuilderTests', function () { beforeEach(function () { KeyBuilder = SandboxedModule.require(modulePath, { - requires: { 'settings-sharelatex': {} } + requires: { '@overleaf/settings': {} } }) }) diff --git a/services/filestore/test/unit/js/LocalFileWriterTests.js b/services/filestore/test/unit/js/LocalFileWriterTests.js index 5fb9e56dca..3635b9a994 100644 --- a/services/filestore/test/unit/js/LocalFileWriterTests.js +++ b/services/filestore/test/unit/js/LocalFileWriterTests.js @@ -29,7 +29,7 @@ describe('LocalFileWriter', function () { requires: { fs, stream, - 'settings-sharelatex': settings, + '@overleaf/settings': settings, '@overleaf/metrics': { inc: sinon.stub(), Timer: sinon.stub().returns({ done: sinon.stub() }) diff --git a/services/filestore/test/unit/js/SafeExecTests.js b/services/filestore/test/unit/js/SafeExecTests.js index c4b59e70e4..0bcfb44133 100644 --- a/services/filestore/test/unit/js/SafeExecTests.js +++ b/services/filestore/test/unit/js/SafeExecTests.js @@ -17,7 +17,7 @@ describe('SafeExec', function () { safeExec = SandboxedModule.require(modulePath, { globals: { process }, requires: { - 'settings-sharelatex': settings, + '@overleaf/settings': settings, '@overleaf/object-persistor': ObjectPersistor } }) diff --git a/services/filestore/test/unit/js/SettingsTests.js b/services/filestore/test/unit/js/SettingsTests.js index 68bc580ca6..7ddff93c5c 100644 --- a/services/filestore/test/unit/js/SettingsTests.js +++ b/services/filestore/test/unit/js/SettingsTests.js @@ -12,7 +12,7 @@ describe('Settings', function () { } } process.env.S3_BUCKET_CREDENTIALS = JSON.stringify(s3Settings) - const settings = SandboxedModule.require('settings-sharelatex', { + const settings = SandboxedModule.require('@overleaf/settings', { globals: { console, process } }) expect(settings.filestore.s3.bucketCreds).to.deep.equal(s3Settings) From d1ddd8c968dccaa193219b088c8fc645b97902eb Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Mon, 12 Jul 2021 17:51:05 +0100 Subject: [PATCH 551/555] [misc] run npm dedupe --- services/filestore/package-lock.json | 69 +++++----------------------- 1 file changed, 11 insertions(+), 58 deletions(-) diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index 01303d74ba..3b30103bc1 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -223,11 +223,6 @@ "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.1.tgz", "integrity": "sha512-IdZR9mh6ahOBv/hYGiXyVuyCetmGJhtYkqLBpTStdhEGjegpPlUawydyaF3pbIOFynJTpllEs+NP+CS9jKFLjA==" }, - "coffeescript": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/coffeescript/-/coffeescript-2.5.1.tgz", - "integrity": "sha512-J2jRPX0eeFh5VKyVnoLrfVFgLZtnnmp96WQSLAS8OrLm2wtQLcnikYKe1gViJKDH7vucjuhHvBKKBP3rKcD1tQ==" - }, "duplexify": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", @@ -712,30 +707,13 @@ } }, "gcp-metadata": { - "version": "4.1.4", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.1.4.tgz", + "version": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.1.4.tgz", "integrity": "sha512-5J/GIH0yWt/56R3dNaNWPGQ/zXsZOddYECfJaqxFWgrZ9HC2Kvc5vl9upOgUUHKzURjAVf2N+f6tEJiojqXUuA==", "requires": { "gaxios": "^3.0.0", "json-bigint": "^1.0.0" } }, - "google-auth-library": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.0.5.tgz", - "integrity": "sha512-Wj31lfTm2yR4g3WfOOB1Am1tt478Xq9OvzTPQJi17tn/I9R5IcsxjANBsE93nYmxYxtwDedhOdIb8l3vSPG49Q==", - "requires": { - "arrify": "^2.0.0", - "base64-js": "^1.3.0", - "ecdsa-sig-formatter": "^1.0.11", - "fast-text-encoding": "^1.0.0", - "gaxios": "^3.0.0", - "gcp-metadata": "^4.1.0", - "gtoken": "^5.0.0", - "jws": "^4.0.0", - "lru-cache": "^6.0.0" - } - }, "google-p12-pem": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.2.tgz", @@ -745,8 +723,7 @@ } }, "gtoken": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.0.2.tgz", + "version": "https://registry.npmjs.org/gtoken/-/gtoken-5.0.2.tgz", "integrity": "sha512-lull70rHCTvRTmAt+R/6W5bTtx4MjHku7AwJwK5fGqhOmygcZud0nrZcX+QUNfBJwCzqy7S5i1Bc4NYnr5PMMA==", "requires": { "gaxios": "^3.0.0", @@ -764,8 +741,7 @@ } }, "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "version": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", "requires": { "yallist": "^4.0.0" @@ -1916,6 +1892,11 @@ "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=" }, + "coffeescript": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/coffeescript/-/coffeescript-2.5.1.tgz", + "integrity": "sha512-J2jRPX0eeFh5VKyVnoLrfVFgLZtnnmp96WQSLAS8OrLm2wtQLcnikYKe1gViJKDH7vucjuhHvBKKBP3rKcD1tQ==" + }, "color-convert": { "version": "1.9.3", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", @@ -3086,30 +3067,13 @@ } }, "gcp-metadata": { - "version": "4.1.4", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.1.4.tgz", + "version": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.1.4.tgz", "integrity": "sha512-5J/GIH0yWt/56R3dNaNWPGQ/zXsZOddYECfJaqxFWgrZ9HC2Kvc5vl9upOgUUHKzURjAVf2N+f6tEJiojqXUuA==", "requires": { "gaxios": "^3.0.0", "json-bigint": "^1.0.0" } }, - "google-auth-library": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.0.5.tgz", - "integrity": "sha512-Wj31lfTm2yR4g3WfOOB1Am1tt478Xq9OvzTPQJi17tn/I9R5IcsxjANBsE93nYmxYxtwDedhOdIb8l3vSPG49Q==", - "requires": { - "arrify": "^2.0.0", - "base64-js": "^1.3.0", - "ecdsa-sig-formatter": "^1.0.11", - "fast-text-encoding": "^1.0.0", - "gaxios": "^3.0.0", - "gcp-metadata": "^4.1.0", - "gtoken": "^5.0.0", - "jws": "^4.0.0", - "lru-cache": "^6.0.0" - } - }, "google-p12-pem": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.2.tgz", @@ -3119,8 +3083,7 @@ } }, "gtoken": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.0.2.tgz", + "version": "https://registry.npmjs.org/gtoken/-/gtoken-5.0.2.tgz", "integrity": "sha512-lull70rHCTvRTmAt+R/6W5bTtx4MjHku7AwJwK5fGqhOmygcZud0nrZcX+QUNfBJwCzqy7S5i1Bc4NYnr5PMMA==", "requires": { "gaxios": "^3.0.0", @@ -3138,8 +3101,7 @@ } }, "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "version": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", "requires": { "yallist": "^4.0.0" @@ -4073,15 +4035,6 @@ "dev": true, "requires": { "minimist": "^1.2.0" - }, - "dependencies": { - "minimist": { - "version": "1.2.5", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", - "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==", - "dev": true, - "optional": true - } } }, "map-obj": { From 3003a1dded88974ce3c65bb53ddb30dbf278a9ec Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Tue, 13 Jul 2021 11:55:17 +0100 Subject: [PATCH 552/555] [misc] upgrade build scripts to version 3.11.0 and cleanup packages ``` npm uninstall prettier-eslint-cli eslint-plugin-standard eslint-plugin-jsx-a11y eslint-plugin-react eslint-config-standard-jsx eslint-config-standard-react babel-eslint npm dedupe ``` --- services/filestore/.eslintrc | 2 +- services/filestore/.github/dependabot.yml | 2 +- services/filestore/.prettierrc | 6 +- services/filestore/buildscript.txt | 2 +- services/filestore/package-lock.json | 2422 ++++++++++----------- services/filestore/package.json | 34 +- 6 files changed, 1209 insertions(+), 1259 deletions(-) diff --git a/services/filestore/.eslintrc b/services/filestore/.eslintrc index 321353f971..1c14f50efe 100644 --- a/services/filestore/.eslintrc +++ b/services/filestore/.eslintrc @@ -3,9 +3,9 @@ // https://github.com/sharelatex/sharelatex-dev-environment { "extends": [ + "eslint:recommended", "standard", "prettier", - "prettier/standard" ], "parserOptions": { "ecmaVersion": 2018 diff --git a/services/filestore/.github/dependabot.yml b/services/filestore/.github/dependabot.yml index e2c64a3351..c856753655 100644 --- a/services/filestore/.github/dependabot.yml +++ b/services/filestore/.github/dependabot.yml @@ -20,4 +20,4 @@ updates: # future if we reorganise teams labels: - "dependencies" - - "Team-Magma" + - "type:maintenance" diff --git a/services/filestore/.prettierrc b/services/filestore/.prettierrc index 24f9ec526f..c92c3526e7 100644 --- a/services/filestore/.prettierrc +++ b/services/filestore/.prettierrc @@ -2,6 +2,10 @@ # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment { + "arrowParens": "avoid", "semi": false, - "singleQuote": true + "singleQuote": true, + "trailingComma": "es5", + "tabWidth": 2, + "useTabs": false } diff --git a/services/filestore/buildscript.txt b/services/filestore/buildscript.txt index 5320893ad7..31316376cd 100644 --- a/services/filestore/buildscript.txt +++ b/services/filestore/buildscript.txt @@ -6,4 +6,4 @@ filestore --env-pass-through= --node-version=12.20.1 --public-repo=True ---script-version=3.8.0 +--script-version=3.11.0 diff --git a/services/filestore/package-lock.json b/services/filestore/package-lock.json index 3b30103bc1..67e67e4fd2 100644 --- a/services/filestore/package-lock.json +++ b/services/filestore/package-lock.json @@ -13,54 +13,11 @@ "@babel/highlight": "^7.8.3" } }, - "@babel/generator": { - "version": "7.8.6", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.8.6.tgz", - "integrity": "sha512-4bpOR5ZBz+wWcMeVtcf7FbjcFzCp+817z2/gHNncIRcM9MmKzUhtWCYAq27RAfUrAFwb+OCG1s9WEaVxfi6cjg==", - "dev": true, - "requires": { - "@babel/types": "^7.8.6", - "jsesc": "^2.5.1", - "lodash": "^4.17.13", - "source-map": "^0.5.0" - }, - "dependencies": { - "source-map": { - "version": "0.5.7", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", - "integrity": "sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ==", - "dev": true - } - } - }, - "@babel/helper-function-name": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.8.3.tgz", - "integrity": "sha512-BCxgX1BC2hD/oBlIFUgOCQDOPV8nSINxCwM3o93xP4P9Fq6aV5sgv2cOOITDMtCfQ+3PvHp3l689XZvAM9QyOA==", - "dev": true, - "requires": { - "@babel/helper-get-function-arity": "^7.8.3", - "@babel/template": "^7.8.3", - "@babel/types": "^7.8.3" - } - }, - "@babel/helper-get-function-arity": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/helper-get-function-arity/-/helper-get-function-arity-7.8.3.tgz", - "integrity": "sha512-FVDR+Gd9iLjUMY1fzE2SR0IuaJToR4RkCDARVfsBBPSP53GEqSFjD8gNyxg246VUyc/ALRxFaAK8rVG7UT7xRA==", - "dev": true, - "requires": { - "@babel/types": "^7.8.3" - } - }, - "@babel/helper-split-export-declaration": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.8.3.tgz", - "integrity": "sha512-3x3yOeyBhW851hroze7ElzdkeRXQYQbFIb7gLK1WQYsw2GWDay5gAJNw1sWJ0VFP6z5J1whqeXH/WCdCjZv6dA==", - "dev": true, - "requires": { - "@babel/types": "^7.8.3" - } + "@babel/helper-validator-identifier": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.14.5.tgz", + "integrity": "sha512-5lsetuxCLilmVGyiLEfoHBRX8UCFD+1m2x3Rj97WrW3V7H3u4RWRXA4evMjImCsin2J2YT0QaVDGf+z8ondbAg==", + "dev": true }, "@babel/highlight": { "version": "7.8.3", @@ -73,47 +30,51 @@ "js-tokens": "^4.0.0" } }, - "@babel/parser": { - "version": "7.8.6", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.8.6.tgz", - "integrity": "sha512-trGNYSfwq5s0SgM1BMEB8hX3NDmO7EP2wsDGDexiaKMB92BaRpS+qZfpkMqUBhcsOTBwNy9B/jieo4ad/t/z2g==", - "dev": true - }, - "@babel/template": { - "version": "7.8.6", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.8.6.tgz", - "integrity": "sha512-zbMsPMy/v0PWFZEhQJ66bqjhH+z0JgMoBWuikXybgG3Gkd/3t5oQ1Rw2WQhnSrsOmsKXnZOx15tkC4qON/+JPg==", + "@eslint/eslintrc": { + "version": "0.4.2", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-0.4.2.tgz", + "integrity": "sha512-8nmGq/4ycLpIwzvhI4tNDmQztZ8sp+hI7cyG8i1nQDhkAbRzHpXPidRAHlNvCZQpJTKw5ItIpMw9RSToGF00mg==", "dev": true, "requires": { - "@babel/code-frame": "^7.8.3", - "@babel/parser": "^7.8.6", - "@babel/types": "^7.8.6" - } - }, - "@babel/traverse": { - "version": "7.8.6", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.8.6.tgz", - "integrity": "sha512-2B8l0db/DPi8iinITKuo7cbPznLCEk0kCxDoB9/N6gGNg/gxOXiR/IcymAFPiBwk5w6TtQ27w4wpElgp9btR9A==", - "dev": true, - "requires": { - "@babel/code-frame": "^7.8.3", - "@babel/generator": "^7.8.6", - "@babel/helper-function-name": "^7.8.3", - "@babel/helper-split-export-declaration": "^7.8.3", - "@babel/parser": "^7.8.6", - "@babel/types": "^7.8.6", - "debug": "^4.1.0", - "globals": "^11.1.0", - "lodash": "^4.17.13" + "ajv": "^6.12.4", + "debug": "^4.1.1", + "espree": "^7.3.0", + "globals": "^13.9.0", + "ignore": "^4.0.6", + "import-fresh": "^3.2.1", + "js-yaml": "^3.13.1", + "minimatch": "^3.0.4", + "strip-json-comments": "^3.1.1" }, "dependencies": { - "debug": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", - "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", "dev": true, "requires": { - "ms": "^2.1.1" + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + } + }, + "debug": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz", + "integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==", + "dev": true, + "requires": { + "ms": "2.1.2" + } + }, + "globals": { + "version": "13.10.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.10.0.tgz", + "integrity": "sha512-piHC3blgLGFjvOuMmWZX60f+na1lXFDhQXBf1UYp2fXPXqvEUbOhNwi6BsQ0bQishwedgnjkwv1d9zKf+MWw3g==", + "dev": true, + "requires": { + "type-fest": "^0.20.2" } }, "ms": { @@ -121,20 +82,15 @@ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", "dev": true + }, + "type-fest": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", + "dev": true } } }, - "@babel/types": { - "version": "7.8.6", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.8.6.tgz", - "integrity": "sha512-wqz7pgWMIrht3gquyEFPVXeXCti72Rm8ep9b5tQKz9Yg9LzJA3HxosF1SB3Kc81KD1A3XBkkVYtJvCKS2Z/QrA==", - "dev": true, - "requires": { - "esutils": "^2.0.2", - "lodash": "^4.17.13", - "to-fast-properties": "^2.0.0" - } - }, "@google-cloud/common": { "version": "2.4.0", "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-2.4.0.tgz", @@ -310,14 +266,6 @@ "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.10.0.tgz", "integrity": "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==" }, - "p-limit": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", - "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", - "requires": { - "yocto-queue": "^0.1.0" - } - }, "readable-stream": { "version": "3.6.0", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", @@ -752,14 +700,6 @@ "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.6.tgz", "integrity": "sha512-RZKhC3EmpBchfTGBVb8fb+RL2cWyw/32lshnsETttkBAyAUXSGHxbEJWWRXc751DrIxG1q04b8QwMbAwkRPpUA==" }, - "p-limit": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.0.2.tgz", - "integrity": "sha512-iwqZSOoWIW+Ew4kAGUlN16J4M7OB3ysMLSZtnhmqx7njIHFPlxWBX8xo3lVTyFVq6mI/lL9qt2IsN1sHwaxJkg==", - "requires": { - "p-try": "^2.0.0" - } - }, "readable-stream": { "version": "3.6.0", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", @@ -1042,6 +982,40 @@ "protobufjs": "^6.8.6" } }, + "@humanwhocodes/config-array": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.5.0.tgz", + "integrity": "sha512-FagtKFz74XrTl7y6HCzQpwDfXP0yhxe9lHLD1UZxjvZIcbyRz8zTFF/yYNfSfzU414eDwZ1SrO0Qvtyf+wFMQg==", + "dev": true, + "requires": { + "@humanwhocodes/object-schema": "^1.2.0", + "debug": "^4.1.1", + "minimatch": "^3.0.4" + }, + "dependencies": { + "debug": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz", + "integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==", + "dev": true, + "requires": { + "ms": "2.1.2" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + } + } + }, + "@humanwhocodes/object-schema": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.0.tgz", + "integrity": "sha512-wdppn25U8z/2yiaT6YGquE6X8sSv7hNMWSXYSSU1jGv/yd6XqjXgTDJ8KP4NgjTXfJ3GbRjeeb8RTV7a/VpM+w==", + "dev": true + }, "@opencensus/core": { "version": "0.0.20", "resolved": "https://registry.npmjs.org/@opencensus/core/-/core-0.0.20.tgz", @@ -1253,12 +1227,6 @@ "integrity": "sha512-7+2BITlgjgDhH0vvwZU/HZJVyk+2XUlvxXe8dFMedNX/aMkaOq++rMAFXc0tM7ij15QaWlbdQASBR9dihi+bDQ==", "dev": true }, - "@types/json5": { - "version": "0.0.29", - "resolved": "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz", - "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==", - "dev": true - }, "@types/long": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.1.tgz", @@ -1327,6 +1295,12 @@ } } }, + "@ungap/promise-all-settled": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@ungap/promise-all-settled/-/promise-all-settled-1.1.2.tgz", + "integrity": "sha512-sL/cEvJWAnClXw0wHk85/2L0G6Sj8UB0Ctc1TEMbKSsmpRosqhwj9gWgFRZSrBr2f9tiXISwNhCPmlfqUqyb9Q==", + "dev": true + }, "abbrev": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", @@ -1396,33 +1370,21 @@ } }, "ansi-colors": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-3.2.3.tgz", - "integrity": "sha512-LEHHyuhlPY3TmuUYMh2oz89lTShfvgbmzaBcxve9t/9Wuy7Dwf4yoAKcND7KFT1HAQfqZ12qtc+DUrBMeKF9nw==", + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz", + "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==", "dev": true }, "ansi-escapes": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.0.tgz", - "integrity": "sha512-EiYhwo0v255HUL6eDyuLrXEkTi7WwVCLAw+SeOQ7M7qdun1z1pum4DEm/nuqIVbPvi9RPPc9k9LbyBv6H0DwVg==", - "dev": true, - "requires": { - "type-fest": "^0.8.1" - }, - "dependencies": { - "type-fest": { - "version": "0.8.1", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", - "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==", - "dev": true - } - } + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.2.0.tgz", + "integrity": "sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ==", + "dev": true }, "ansi-regex": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", - "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", - "dev": true + "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==" }, "ansi-styles": { "version": "3.2.1", @@ -1434,9 +1396,9 @@ } }, "anymatch": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.1.tgz", - "integrity": "sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg==", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.2.tgz", + "integrity": "sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg==", "dev": true, "requires": { "normalize-path": "^3.0.0", @@ -1472,24 +1434,27 @@ "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==" }, "array-includes": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.1.tgz", - "integrity": "sha512-c2VXaCHl7zPsvpkFsw4nxvFie4fh1ur9bpcgsVkIjqn0H/Xwdg+7fv3n2r/isyS8EBj5b06M9kHyZuIr4El6WQ==", + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.3.tgz", + "integrity": "sha512-gcem1KlBU7c9rB+Rq8/3PPKsK2kjqeEBa3bD5kkQo4nYlOHQCJqIJFqBXDEfwaRuYTT4E+FxA9xez7Gf/e3Q7A==", "dev": true, "requires": { + "call-bind": "^1.0.2", "define-properties": "^1.1.3", - "es-abstract": "^1.17.0", + "es-abstract": "^1.18.0-next.2", + "get-intrinsic": "^1.1.1", "is-string": "^1.0.5" } }, "array.prototype.flat": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.2.3.tgz", - "integrity": "sha512-gBlRZV0VSmfPIeWfuuy56XZMvbVfbEUnOXUvt3F/eUUUSyzlgLxhEX4YAEpxNAogRGehPSnfXyPtYyKAhkzQhQ==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.2.4.tgz", + "integrity": "sha512-4470Xi3GAPAjZqFcljX2xzckv1qeKPizoNkiS0+O4IoPR2ZNpcjE0pkhdihlDouK+x6QOast26B4Q/O9DJnwSg==", "dev": true, "requires": { + "call-bind": "^1.0.0", "define-properties": "^1.1.3", - "es-abstract": "^1.17.0-next.1" + "es-abstract": "^1.18.0-next.1" } }, "arrify": { @@ -1576,20 +1541,6 @@ "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.9.1.tgz", "integrity": "sha512-wMHVg2EOHaMRxbzgFJ9gtjOOCrI80OHLG14rxi28XwOW8ux6IiEbRCGGGqCtdAIg4FQCbW20k9RsT4y3gJlFug==" }, - "babel-eslint": { - "version": "10.1.0", - "resolved": "https://registry.npmjs.org/babel-eslint/-/babel-eslint-10.1.0.tgz", - "integrity": "sha512-ifWaTHQ0ce+448CYop8AdrQiBsGrnC+bMgfyKFdi6EsPLTAWG+QfyDeM6OH+FmWnKvEq5NnBMLvlBUPKQZoDSg==", - "dev": true, - "requires": { - "@babel/code-frame": "^7.0.0", - "@babel/parser": "^7.7.0", - "@babel/traverse": "^7.7.0", - "@babel/types": "^7.7.0", - "eslint-visitor-keys": "^1.0.0", - "resolve": "^1.12.0" - } - }, "balanced-match": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", @@ -1614,9 +1565,9 @@ "integrity": "sha512-S4XzBk5sMB+Rcb/LNcpzXr57VRTxgAvaAEDAl1AwRx27j00hT84O6OkteE7u8UB3NuaaygCRrEpqox4uDOrbdQ==" }, "binary-extensions": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.1.0.tgz", - "integrity": "sha512-1Yj8h9Q+QDF5FzhMs/c9+6UntbD5MkRfRwac8DoEm9ZfUBZ7tZ55YcGVAzEe4bXsdQHEk+s9S5wsOKVdZrw0tQ==", + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", + "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", "dev": true }, "bindings": { @@ -1659,12 +1610,6 @@ "type-is": "~1.6.17" } }, - "boolify": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/boolify/-/boolify-1.0.1.tgz", - "integrity": "sha512-ma2q0Tc760dW54CdOyJjhrg/a54317o1zYADQJFgperNGKIKgAUGIcKnuMiff8z57+yGlrGNEt4lPgZfCgTJgA==", - "dev": true - }, "brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", @@ -1736,6 +1681,16 @@ "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.0.tgz", "integrity": "sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg==" }, + "call-bind": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", + "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "dev": true, + "requires": { + "function-bind": "^1.1.1", + "get-intrinsic": "^1.0.2" + } + }, "callsites": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", @@ -1743,22 +1698,11 @@ "dev": true }, "camelcase": { - "version": "5.3.1", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", - "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.2.0.tgz", + "integrity": "sha512-c7wVvbw3f37nuobQNtgsgG9POC9qMbNuMQmTCqZv23b6MIz0fcYpBiOlv9gEN/hdLdnZTDQhg6e9Dq5M1vKvfg==", "dev": true }, - "camelcase-keys": { - "version": "6.1.2", - "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-6.1.2.tgz", - "integrity": "sha512-QfFrU0CIw2oltVvpndW32kuJ/9YOJwUnmWrjlXt1nnJZHCaS9i6bfOpg9R4Lw8aZjStkJWM+jc0cdXjWBgVJSw==", - "dev": true, - "requires": { - "camelcase": "^5.3.1", - "map-obj": "^4.0.0", - "quick-lru": "^4.0.1" - } - }, "caseless": { "version": "0.12.0", "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", @@ -1816,19 +1760,18 @@ "dev": true }, "chokidar": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.3.0.tgz", - "integrity": "sha512-dGmKLDdT3Gdl7fBUe8XK+gAtGmzy5Fn0XkkWQuYxGIgWVPPse2CxFA5mtrlD0TOHaHjEUqkWNyP1XdHoJES/4A==", + "version": "3.5.1", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.1.tgz", + "integrity": "sha512-9+s+Od+W0VJJzawDma/gvBNQqkTiqYTWLuZoyAsivsI4AaWTCzHG06/TMjsf1cYe9Cb97UCEhjz7HvnPk2p/tw==", "dev": true, "requires": { "anymatch": "~3.1.1", "braces": "~3.0.2", - "fsevents": "~2.1.1", "glob-parent": "~5.1.0", "is-binary-path": "~2.1.0", "is-glob": "~4.0.1", "normalize-path": "~3.0.0", - "readdirp": "~3.2.0" + "readdirp": "~3.5.0" } }, "chownr": { @@ -1837,12 +1780,12 @@ "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==" }, "cli-cursor": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", - "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-2.1.0.tgz", + "integrity": "sha512-8lgKz8LmCRYZZQDpRyT2m5rKJ08TnU4tR9FFFW2rxpxR1FzWi4PQ/NfyODchAatHaUgnSPVcx/R5w6NuTBzFiw==", "dev": true, "requires": { - "restore-cursor": "^3.1.0" + "restore-cursor": "^2.0.0" } }, "cli-width": { @@ -1852,39 +1795,13 @@ "dev": true }, "cliui": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-5.0.0.tgz", - "integrity": "sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==", + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", "dev": true, "requires": { - "string-width": "^3.1.0", - "strip-ansi": "^5.2.0", - "wrap-ansi": "^5.1.0" - }, - "dependencies": { - "emoji-regex": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", - "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", - "dev": true - }, - "is-fullwidth-code-point": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w==", - "dev": true - }, - "string-width": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", - "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", - "dev": true, - "requires": { - "emoji-regex": "^7.0.1", - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^5.1.0" - } - } + "string-width": "^4.2.0", + "wrap-ansi": "^7.0.0" } }, "code-point-at": { @@ -2006,12 +1923,6 @@ "resolved": "https://registry.npmjs.org/console-log-level/-/console-log-level-1.4.1.tgz", "integrity": "sha512-VZzbIORbP+PPcN/gg3DXClTLPLg5Slwd5fL2MIc+o1qZ4BXBvWyc6QxPk6T/Mkr6IVjRpoAGf32XxP3ZWMVRcQ==" }, - "contains-path": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/contains-path/-/contains-path-0.1.0.tgz", - "integrity": "sha512-OKZnPGeMQy2RPaUIBPFFd71iNf4791H12MCRuVQDnzGRwCYNYmTDy5pdafo2SLAcEMKzTOQnLWG4QdcjeJUMEg==", - "dev": true - }, "content-disposition": { "version": "0.5.3", "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.3.tgz", @@ -2113,9 +2024,9 @@ } }, "decamelize": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", - "integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz", + "integrity": "sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ==", "dev": true }, "deep-eql": { @@ -2184,9 +2095,9 @@ "integrity": "sha1-+hN8S9aY7fVc1c0CrFWfkaTEups=" }, "diff": { - "version": "3.5.0", - "resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz", - "integrity": "sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz", + "integrity": "sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w==", "dev": true }, "disrequire": { @@ -2287,6 +2198,15 @@ "once": "^1.4.0" } }, + "enquirer": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/enquirer/-/enquirer-2.3.6.tgz", + "integrity": "sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg==", + "dev": true, + "requires": { + "ansi-colors": "^4.1.1" + } + }, "ent": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/ent/-/ent-2.2.0.tgz", @@ -2302,22 +2222,27 @@ } }, "es-abstract": { - "version": "1.17.6", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.6.tgz", - "integrity": "sha512-Fr89bON3WFyUi5EvAeI48QTWX0AyekGgLA8H+c+7fbfCkJwRWRMLd8CQedNEyJuoYYhmtEqY92pgte1FAhBlhw==", + "version": "1.18.3", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.18.3.tgz", + "integrity": "sha512-nQIr12dxV7SSxE6r6f1l3DtAeEYdsGpps13dR0TwJg1S8gyp4ZPgy3FZcHBgbiQqnoqSTb+oC+kO4UQ0C/J8vw==", "dev": true, "requires": { + "call-bind": "^1.0.2", "es-to-primitive": "^1.2.1", "function-bind": "^1.1.1", + "get-intrinsic": "^1.1.1", "has": "^1.0.3", - "has-symbols": "^1.0.1", - "is-callable": "^1.2.0", - "is-regex": "^1.1.0", - "object-inspect": "^1.7.0", + "has-symbols": "^1.0.2", + "is-callable": "^1.2.3", + "is-negative-zero": "^2.0.1", + "is-regex": "^1.1.3", + "is-string": "^1.0.6", + "object-inspect": "^1.10.3", "object-keys": "^1.1.1", - "object.assign": "^4.1.0", - "string.prototype.trimend": "^1.0.1", - "string.prototype.trimstart": "^1.0.1" + "object.assign": "^4.1.2", + "string.prototype.trimend": "^1.0.4", + "string.prototype.trimstart": "^1.0.4", + "unbox-primitive": "^1.0.1" } }, "es-to-primitive": { @@ -2331,6 +2256,12 @@ "is-symbol": "^1.0.2" } }, + "escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "dev": true + }, "escape-html": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", @@ -2343,66 +2274,276 @@ "dev": true }, "eslint": { - "version": "6.8.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-6.8.0.tgz", - "integrity": "sha512-K+Iayyo2LtyYhDSYwz5D5QdWw0hCacNzyq1Y821Xna2xSJj7cijoLLYmLxTQgcgZ9mC61nryMy9S7GRbYpI5Ig==", + "version": "7.30.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-7.30.0.tgz", + "integrity": "sha512-VLqz80i3as3NdloY44BQSJpFw534L9Oh+6zJOUaViV4JPd+DaHwutqP7tcpkW3YiXbK6s05RZl7yl7cQn+lijg==", "dev": true, "requires": { - "@babel/code-frame": "^7.0.0", + "@babel/code-frame": "7.12.11", + "@eslint/eslintrc": "^0.4.2", + "@humanwhocodes/config-array": "^0.5.0", "ajv": "^6.10.0", - "chalk": "^2.1.0", - "cross-spawn": "^6.0.5", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", "debug": "^4.0.1", "doctrine": "^3.0.0", - "eslint-scope": "^5.0.0", - "eslint-utils": "^1.4.3", - "eslint-visitor-keys": "^1.1.0", - "espree": "^6.1.2", - "esquery": "^1.0.1", + "enquirer": "^2.3.5", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^5.1.1", + "eslint-utils": "^2.1.0", + "eslint-visitor-keys": "^2.0.0", + "espree": "^7.3.1", + "esquery": "^1.4.0", "esutils": "^2.0.2", - "file-entry-cache": "^5.0.1", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^6.0.1", "functional-red-black-tree": "^1.0.1", - "glob-parent": "^5.0.0", - "globals": "^12.1.0", + "glob-parent": "^5.1.2", + "globals": "^13.6.0", "ignore": "^4.0.6", "import-fresh": "^3.0.0", "imurmurhash": "^0.1.4", - "inquirer": "^7.0.0", "is-glob": "^4.0.0", "js-yaml": "^3.13.1", "json-stable-stringify-without-jsonify": "^1.0.1", - "levn": "^0.3.0", - "lodash": "^4.17.14", + "levn": "^0.4.1", + "lodash.merge": "^4.6.2", "minimatch": "^3.0.4", - "mkdirp": "^0.5.1", "natural-compare": "^1.4.0", - "optionator": "^0.8.3", + "optionator": "^0.9.1", "progress": "^2.0.0", - "regexpp": "^2.0.1", - "semver": "^6.1.2", - "strip-ansi": "^5.2.0", - "strip-json-comments": "^3.0.1", - "table": "^5.2.3", + "regexpp": "^3.1.0", + "semver": "^7.2.1", + "strip-ansi": "^6.0.0", + "strip-json-comments": "^3.1.0", + "table": "^6.0.9", "text-table": "^0.2.0", "v8-compile-cache": "^2.0.3" }, "dependencies": { - "debug": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", - "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "@babel/code-frame": { + "version": "7.12.11", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.12.11.tgz", + "integrity": "sha512-Zt1yodBx1UcyiePMSkWnU4hPqhwq7hGi2nFL1LeA3EUl+q2LQx16MISgJ0+z7dnmgvP9QtIleuETGOiOH1RcIw==", "dev": true, "requires": { - "ms": "^2.1.1" + "@babel/highlight": "^7.10.4" } }, - "globals": { - "version": "12.3.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-12.3.0.tgz", - "integrity": "sha512-wAfjdLgFsPZsklLJvOBUBmzYE8/CwhEqSBEMRXA3qxIiNtyqvjYurAtIfDh6chlEPUfmTY3MnZh5Hfh4q0UlIw==", + "@babel/highlight": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.14.5.tgz", + "integrity": "sha512-qf9u2WFWVV0MppaL877j2dBtQIDgmidgjGk5VIMw3OadXvYaXn66U1BFlH2t4+t3i+8PhedppRv+i40ABzd+gg==", "dev": true, "requires": { - "type-fest": "^0.8.1" + "@babel/helper-validator-identifier": "^7.14.5", + "chalk": "^2.0.0", + "js-tokens": "^4.0.0" + }, + "dependencies": { + "chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + } + }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", + "dev": true + } + } + }, + "astral-regex": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz", + "integrity": "sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==", + "dev": true + }, + "chalk": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.1.tgz", + "integrity": "sha512-diHzdDKxcU+bAsUboHLPEDQiw0qEe0qd7SYUn3HgcFlWgbDcfLGswOHYeGrHKzG9z6UYf01d9VFMfZxPM1xZSg==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dev": true, + "requires": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + } + }, + "debug": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz", + "integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==", + "dev": true, + "requires": { + "ms": "2.1.2" + } + }, + "escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true + }, + "eslint-utils": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.1.0.tgz", + "integrity": "sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==", + "dev": true, + "requires": { + "eslint-visitor-keys": "^1.1.0" + }, + "dependencies": { + "eslint-visitor-keys": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", + "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", + "dev": true + } + } + }, + "eslint-visitor-keys": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz", + "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==", + "dev": true + }, + "esquery": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.4.0.tgz", + "integrity": "sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==", + "dev": true, + "requires": { + "estraverse": "^5.1.0" + } + }, + "estraverse": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.2.0.tgz", + "integrity": "sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ==", + "dev": true + }, + "fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true + }, + "file-entry-cache": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", + "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", + "dev": true, + "requires": { + "flat-cache": "^3.0.4" + } + }, + "flat-cache": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz", + "integrity": "sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==", + "dev": true, + "requires": { + "flatted": "^3.1.0", + "rimraf": "^3.0.2" + } + }, + "flatted": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.1.tgz", + "integrity": "sha512-OMQjaErSFHmHqZe+PSidH5n8j3O0F2DdnVh8JB4j4eUQ2k6KvB0qGfrKIhapvez5JerBbmWkaLYUYWISaESoXg==", + "dev": true + }, + "globals": { + "version": "13.10.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.10.0.tgz", + "integrity": "sha512-piHC3blgLGFjvOuMmWZX60f+na1lXFDhQXBf1UYp2fXPXqvEUbOhNwi6BsQ0bQishwedgnjkwv1d9zKf+MWw3g==", + "dev": true, + "requires": { + "type-fest": "^0.20.2" + } + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true + }, + "levn": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "dev": true, + "requires": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + } + }, + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "requires": { + "yallist": "^4.0.0" } }, "ms": { @@ -2411,27 +2552,172 @@ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", "dev": true }, + "optionator": { + "version": "0.9.1", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz", + "integrity": "sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==", + "dev": true, + "requires": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.3" + } + }, + "path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true + }, + "prelude-ls": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "dev": true + }, + "regexpp": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz", + "integrity": "sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==", + "dev": true + }, + "rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "dev": true, + "requires": { + "glob": "^7.1.3" + } + }, + "semver": { + "version": "7.3.5", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", + "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", + "dev": true, + "requires": { + "lru-cache": "^6.0.0" + } + }, + "shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "requires": { + "shebang-regex": "^3.0.0" + } + }, + "shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true + }, + "slice-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-4.0.0.tgz", + "integrity": "sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==", + "dev": true, + "requires": { + "ansi-styles": "^4.0.0", + "astral-regex": "^2.0.0", + "is-fullwidth-code-point": "^3.0.0" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + } + } + }, + "strip-ansi": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", + "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", + "dev": true, + "requires": { + "ansi-regex": "^5.0.0" + } + }, + "table": { + "version": "6.7.1", + "resolved": "https://registry.npmjs.org/table/-/table-6.7.1.tgz", + "integrity": "sha512-ZGum47Yi6KOOFDE8m223td53ath2enHcYLgOCjGr5ngu8bdIARQk6mN/wRMv4yMRcHnCSnHbCEha4sobQx5yWg==", + "dev": true, + "requires": { + "ajv": "^8.0.1", + "lodash.clonedeep": "^4.5.0", + "lodash.truncate": "^4.4.2", + "slice-ansi": "^4.0.0", + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0" + }, + "dependencies": { + "ajv": { + "version": "8.6.1", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.6.1.tgz", + "integrity": "sha512-42VLtQUOLefAvKFAQIxIZDaThq6om/PrfP0CYk3/vn+y4BMNkKnbli8ON2QCiHov4KkzOSJ/xSoBJdayiiYvVQ==", + "dev": true, + "requires": { + "fast-deep-equal": "^3.1.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", + "uri-js": "^4.2.2" + } + } + } + }, + "type-check": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "dev": true, + "requires": { + "prelude-ls": "^1.2.1" + } + }, "type-fest": { - "version": "0.8.1", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", - "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", + "dev": true + }, + "which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "requires": { + "isexe": "^2.0.0" + } + }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", "dev": true } } }, "eslint-config-prettier": { - "version": "6.11.0", - "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-6.11.0.tgz", - "integrity": "sha512-oB8cpLWSAjOVFEJhhyMZh6NOEOtBVziaqdDQ86+qhDHFbZXoRTM7pNSvFRfW/W/L/LrQ38C99J5CGuRBBzBsdA==", - "dev": true, - "requires": { - "get-stdin": "^6.0.0" - } + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-8.3.0.tgz", + "integrity": "sha512-BgZuLUSeKzvlL/VUjx/Yb787VQ26RU3gGjA3iiFvdsp/2bMfVIWUVP7tjxtjS0e+HP409cPlPvNkQloz8C91ew==", + "dev": true }, "eslint-config-standard": { - "version": "14.1.1", - "resolved": "https://registry.npmjs.org/eslint-config-standard/-/eslint-config-standard-14.1.1.tgz", - "integrity": "sha512-Z9B+VR+JIXRxz21udPTL9HpFMyoMUEeX1G251EQ6e05WD9aPVtVBn09XUmZ259wCMlCDmYDSZG62Hhm+ZTJcUg==", + "version": "16.0.3", + "resolved": "https://registry.npmjs.org/eslint-config-standard/-/eslint-config-standard-16.0.3.tgz", + "integrity": "sha512-x4fmJL5hGqNJKGHSjnLdgA6U6h1YW/G2dW9fA+cyVur4SK6lyue8+UgNKWlZtUDTXvgKDD/Oa3GQjmB5kjtVvg==", "dev": true }, "eslint-import-resolver-node": { @@ -2445,25 +2731,42 @@ } }, "eslint-module-utils": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.6.0.tgz", - "integrity": "sha512-6j9xxegbqe8/kZY8cYpcp0xhbK0EgJlg3g9mib3/miLaExuuwc3n5UEfSnU6hWMbT0FAYVvDbL9RrRgpUeQIvA==", + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.6.1.tgz", + "integrity": "sha512-ZXI9B8cxAJIH4nfkhTwcRTEAnrVfobYqwjWy/QMCZ8rHkZHFjf9yO4BzpiF9kCSfNlMG54eKigISHpX0+AaT4A==", "dev": true, "requires": { - "debug": "^2.6.9", + "debug": "^3.2.7", "pkg-dir": "^2.0.0" + }, + "dependencies": { + "debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + }, + "ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true + } } }, "eslint-plugin-chai-expect": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-chai-expect/-/eslint-plugin-chai-expect-2.1.0.tgz", - "integrity": "sha512-rd0/4mjMV6c3i0o4DKkWI4uaFN9DK707kW+/fDphaDI6HVgxXnhML9Xgt5vHnTXmSSnDhupuCFBgsEAEpchXmQ==", + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-chai-expect/-/eslint-plugin-chai-expect-2.2.0.tgz", + "integrity": "sha512-ExTJKhgeYMfY8wDj3UiZmgpMKJOUHGNHmWMlxT49JUDB1vTnw0sSNfXJSxnX+LcebyBD/gudXzjzD136WqPJrQ==", "dev": true }, "eslint-plugin-chai-friendly": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-chai-friendly/-/eslint-plugin-chai-friendly-0.5.0.tgz", - "integrity": "sha512-Pxe6z8C9fP0pn2X2nGFU/b3GBOCM/5FVus1hsMwJsXP3R7RiXFl7g0ksJbsc0GxiLyidTW4mEFk77qsNn7Tk7g==", + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-chai-friendly/-/eslint-plugin-chai-friendly-0.6.0.tgz", + "integrity": "sha512-Uvvv1gkbRGp/qfN15B0kQyQWg+oFA8buDSqrwmW3egNSk/FpqH2MjQqKOuKwmEL6w4QIQrIjDp+gg6kGGmD3oQ==", "dev": true }, "eslint-plugin-es": { @@ -2494,61 +2797,63 @@ } }, "eslint-plugin-import": { - "version": "2.22.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.22.0.tgz", - "integrity": "sha512-66Fpf1Ln6aIS5Gr/55ts19eUuoDhAbZgnr6UxK5hbDx6l/QgQgx61AePq+BV4PP2uXQFClgMVzep5zZ94qqsxg==", + "version": "2.23.4", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.23.4.tgz", + "integrity": "sha512-6/wP8zZRsnQFiR3iaPFgh5ImVRM1WN5NUWfTIRqwOdeiGJlBcSk82o1FEVq8yXmy4lkIzTo7YhHCIxlU/2HyEQ==", "dev": true, "requires": { - "array-includes": "^3.1.1", - "array.prototype.flat": "^1.2.3", - "contains-path": "^0.1.0", + "array-includes": "^3.1.3", + "array.prototype.flat": "^1.2.4", "debug": "^2.6.9", - "doctrine": "1.5.0", - "eslint-import-resolver-node": "^0.3.3", - "eslint-module-utils": "^2.6.0", + "doctrine": "^2.1.0", + "eslint-import-resolver-node": "^0.3.4", + "eslint-module-utils": "^2.6.1", + "find-up": "^2.0.0", "has": "^1.0.3", + "is-core-module": "^2.4.0", "minimatch": "^3.0.4", - "object.values": "^1.1.1", - "read-pkg-up": "^2.0.0", - "resolve": "^1.17.0", + "object.values": "^1.1.3", + "pkg-up": "^2.0.0", + "read-pkg-up": "^3.0.0", + "resolve": "^1.20.0", "tsconfig-paths": "^3.9.0" }, "dependencies": { "doctrine": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-1.5.0.tgz", - "integrity": "sha512-lsGyRuYr4/PIB0txi+Fy2xOMI2dGaTguCaotzFGkVZuKR5usKfcRWIFKNM3QNrU7hh/+w2bwTW+ZeXPK5l8uVg==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", + "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", "dev": true, "requires": { - "esutils": "^2.0.2", - "isarray": "^1.0.0" + "esutils": "^2.0.2" } }, "resolve": { - "version": "1.17.0", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.17.0.tgz", - "integrity": "sha512-ic+7JYiV8Vi2yzQGFWOkiZD5Z9z7O2Zhm9XMaTxdJExKasieFCr+yXZ/WmXsckHiKl12ar0y6XiXDx3m4RHn1w==", + "version": "1.20.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.20.0.tgz", + "integrity": "sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A==", "dev": true, "requires": { + "is-core-module": "^2.2.0", "path-parse": "^1.0.6" } } } }, "eslint-plugin-mocha": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-mocha/-/eslint-plugin-mocha-6.3.0.tgz", - "integrity": "sha512-Cd2roo8caAyG21oKaaNTj7cqeYRWW1I2B5SfpKRp0Ip1gkfwoR1Ow0IGlPWnNjzywdF4n+kHL8/9vM6zCJUxdg==", + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-mocha/-/eslint-plugin-mocha-8.2.0.tgz", + "integrity": "sha512-8oOR47Ejt+YJPNQzedbiklDqS1zurEaNrxXpRs+Uk4DMDPVmKNagShFeUaYsfvWP55AhI+P1non5QZAHV6K78A==", "dev": true, "requires": { - "eslint-utils": "^2.0.0", - "ramda": "^0.27.0" + "eslint-utils": "^2.1.0", + "ramda": "^0.27.1" }, "dependencies": { "eslint-utils": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.0.0.tgz", - "integrity": "sha512-0HCPuJv+7Wv1bACm8y5/ECVfYdfsAm9xmVb7saeFlxjPYALefjhbYoCkBjPdPzGH8wWyTpAez82Fh3VKYEZ8OA==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.1.0.tgz", + "integrity": "sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==", "dev": true, "requires": { "eslint-visitor-keys": "^1.1.0" @@ -2602,20 +2907,33 @@ "integrity": "sha512-VoM09vT7bfA7D+upt+FjeBO5eHIJQBUWki1aPvB+vbNiHS3+oGIJGIeyBtKQTME6UPXXy3vV07OL1tHd3ANuDw==", "dev": true }, - "eslint-plugin-standard": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-standard/-/eslint-plugin-standard-4.0.1.tgz", - "integrity": "sha512-v/KBnfyaOMPmZc/dmc6ozOdWqekGp7bBGq4jLAecEfPGmfKiWS4sA8sC0LqiV9w5qmXAtXVn4M3p1jSyhY85SQ==", - "dev": true - }, "eslint-scope": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.0.0.tgz", - "integrity": "sha512-oYrhJW7S0bxAFDvWqzvMPRm6pcgcnWc4QnofCAqRTRfQC0JcwenzGglTtsLyIuuWFfkqDG9vz67cnttSd53djw==", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", + "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", "dev": true, "requires": { - "esrecurse": "^4.1.0", + "esrecurse": "^4.3.0", "estraverse": "^4.1.1" + }, + "dependencies": { + "esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "requires": { + "estraverse": "^5.2.0" + }, + "dependencies": { + "estraverse": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.2.0.tgz", + "integrity": "sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ==", + "dev": true + } + } + } } }, "eslint-utils": { @@ -2634,20 +2952,32 @@ "dev": true }, "espree": { - "version": "6.1.2", - "resolved": "https://registry.npmjs.org/espree/-/espree-6.1.2.tgz", - "integrity": "sha512-2iUPuuPP+yW1PZaMSDM9eyVf8D5P0Hi8h83YtZ5bPc/zHYjII5khoixIUTMO794NOY8F/ThF1Bo8ncZILarUTA==", + "version": "7.3.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-7.3.1.tgz", + "integrity": "sha512-v3JCNCE64umkFpmkFGqzVKsOT0tN1Zr+ueqLZfpV1Ob8e+CEgPWa+OxCoGH3tnhimMKIaBm4m/vaRpJ/krRz2g==", "dev": true, "requires": { - "acorn": "^7.1.0", - "acorn-jsx": "^5.1.0", - "eslint-visitor-keys": "^1.1.0" + "acorn": "^7.4.0", + "acorn-jsx": "^5.3.1", + "eslint-visitor-keys": "^1.3.0" }, "dependencies": { "acorn": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.1.1.tgz", - "integrity": "sha512-add7dgA5ppRPxCFJoAGfMDi7PIBXq1RtGo7BhbLaxwrXPOmw8gq48Y9ozT01hUKy9byMjlR20EJhu5zlkErEkg==", + "version": "7.4.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", + "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==", + "dev": true + }, + "acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "dev": true + }, + "eslint-visitor-keys": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", + "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", "dev": true } } @@ -2813,9 +3143,9 @@ "integrity": "sha512-R9bHCvweUxxwkDwhjav5vxpFvdPGlVngtqmx4pIZfSUhM/Q4NiIUHB456BAf+Q1Nwu3HEZYONtu+Rya+af4jiQ==" }, "figures": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz", - "integrity": "sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-2.0.0.tgz", + "integrity": "sha512-Oa2M9atig69ZkfwiApY8F2Yy+tzMbazyvqv21R0NsSC8floSOC09BbT1ITWAdoMGQvJ/aZnR1KMwdx9tvHnTNA==", "dev": true, "requires": { "escape-string-regexp": "^1.0.5" @@ -2861,7 +3191,7 @@ "find-up": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", - "integrity": "sha512-NWzkk0jSJtTt08+FBFMvXoeZnOJD+jTtsRmBYbAIzJdX6l7dLgR7CTubCM5/eDdPUBvLCeVasP1brfVR/9/EZQ==", + "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=", "dev": true, "requires": { "locate-path": "^2.0.0" @@ -2873,13 +3203,10 @@ "integrity": "sha1-WKRmaX34piBc39vzlVNri9d3pfY=" }, "flat": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/flat/-/flat-4.1.0.tgz", - "integrity": "sha512-Px/TiLIznH7gEDlPXcUD4KnBusa6kR6ayRUVcnEAbreRIuhkqow/mun59BuRXwoYk7ZQOLW1ZM05ilIvK38hFw==", - "dev": true, - "requires": { - "is-buffer": "~2.0.3" - } + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz", + "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==", + "dev": true }, "flat-cache": { "version": "2.0.1", @@ -2947,13 +3274,6 @@ "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" }, - "fsevents": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.1.3.tgz", - "integrity": "sha512-Auw9a4AxqWpa9GUfj370BMPzzyncfBABW8Mab7BGWBYDj4Isgq+cDKtx0i6u9jcX9pQDnswsaaOTgTmA5pEjuQ==", - "dev": true, - "optional": true - }, "function-bind": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", @@ -3131,11 +3451,16 @@ "integrity": "sha512-Hm0ixYtaSZ/V7C8FJrtZIuBBI+iSgL+1Aq82zSu8VQNB4S3Gk8e7Qs3VwBDJAhmRZcFqkl3tQu36g/Foh5I5ig==", "dev": true }, - "get-stdin": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-6.0.0.tgz", - "integrity": "sha512-jp4tHawyV7+fkkSKyvjuLZswblUtz+SQKzSWnBbii16BuZksJlU1wuBYXY75r+duh/llF1ur6oNwi+2ZzjKZ7g==", - "dev": true + "get-intrinsic": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.1.tgz", + "integrity": "sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q==", + "dev": true, + "requires": { + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1" + } }, "getpass": { "version": "0.1.7", @@ -3159,9 +3484,9 @@ } }, "glob-parent": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.0.tgz", - "integrity": "sha512-qjtRgnIVmOfnKUE3NJAQEdk+lKrxfw8t5ke7SXtfMTHcjsBfOfWXCQfdb30zfDoZQ2IRSIiidmjtbHZPZ++Ihw==", + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", "dev": true, "requires": { "is-glob": "^4.0.1" @@ -3379,6 +3704,12 @@ } } }, + "has-bigints": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.1.tgz", + "integrity": "sha512-LSBS2LjbNBTf6287JEbEzvJgftkF5qFkmCo9hDRpAzKhUOlJ+hx8dd4USs00SgsUNwc4617J9ki5YtEClM2ffA==", + "dev": true + }, "has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -3386,9 +3717,9 @@ "dev": true }, "has-symbols": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.1.tgz", - "integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.2.tgz", + "integrity": "sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw==", "dev": true }, "has-unicode": { @@ -3571,24 +3902,29 @@ "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==" }, "inquirer": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-7.0.4.tgz", - "integrity": "sha512-Bu5Td5+j11sCkqfqmUTiwv+tWisMtP0L7Q8WrqA2C/BbBhy1YTdFrvjjlrKq8oagA/tLQBski2Gcx/Sqyi2qSQ==", + "version": "6.5.2", + "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-6.5.2.tgz", + "integrity": "sha512-cntlB5ghuB0iuO65Ovoi8ogLHiWGs/5yNrtUcKjFhSSiVeAIVpD7koaSU9RM8mpXw5YDi9RdYXGQMaOURB7ycQ==", "dev": true, "requires": { - "ansi-escapes": "^4.2.1", + "ansi-escapes": "^3.2.0", "chalk": "^2.4.2", - "cli-cursor": "^3.1.0", + "cli-cursor": "^2.1.0", "cli-width": "^2.0.0", "external-editor": "^3.0.3", - "figures": "^3.0.0", - "lodash": "^4.17.15", - "mute-stream": "0.0.8", + "figures": "^2.0.0", + "lodash": "^4.17.12", + "mute-stream": "0.0.7", "run-async": "^2.2.0", - "rxjs": "^6.5.3", - "string-width": "^4.1.0", + "rxjs": "^6.4.0", "strip-ansi": "^5.1.0", "through": "^2.3.6" + }, + "dependencies": { + "ansi-regex": { + "version": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==" + } } }, "ipaddr.js": { @@ -3604,7 +3940,13 @@ "is-arrayish": { "version": "0.2.1", "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", - "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", + "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", + "dev": true + }, + "is-bigint": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.2.tgz", + "integrity": "sha512-0JV5+SOCQkIdzjBK9buARcV804Ddu7A0Qet6sHi3FimE9ne6m4BGQZfRn+NZiXbBk4F4XmHfDZIipLj9pX8dSA==", "dev": true }, "is-binary-path": { @@ -3616,28 +3958,45 @@ "binary-extensions": "^2.0.0" } }, + "is-boolean-object": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.1.tgz", + "integrity": "sha512-bXdQWkECBUIAcCkeH1unwJLIpZYaa5VvuygSyS/c2lf719mTKZDU5UdDRlpd01UjADgmW8RfqaP+mRaVPdr/Ng==", + "dev": true, + "requires": { + "call-bind": "^1.0.2" + } + }, "is-buffer": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.4.tgz", - "integrity": "sha512-Kq1rokWXOPXWuaMAqZiJW4XxsmD9zGx9q4aePabbn3qCRGedtH7Cm+zV8WETitMfu1wdh+Rvd6w5egwSngUX2A==", - "dev": true + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", + "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==" }, "is-callable": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.0.tgz", - "integrity": "sha512-pyVD9AaGLxtg6srb2Ng6ynWJqkHU9bEM087AKck0w8QwDarTfNcpIYoU8x8Hv2Icm8u6kFJM18Dag8lyqGkviw==", + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.3.tgz", + "integrity": "sha512-J1DcMe8UYTBSrKezuIUTUwjXsho29693unXM2YhJUTR2txK/eG47bvNa/wipPFmZFgr/N6f1GA66dv0mEyTIyQ==", "dev": true }, + "is-core-module": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.5.0.tgz", + "integrity": "sha512-TXCMSDsEHMEEZ6eCA8rwRDbLu55MRGmrctljsBX/2v1d9/GzqHOxW5c5oPSgrUt2vBFXebu9rGqckXGPWOlYpg==", + "dev": true, + "requires": { + "has": "^1.0.3" + } + }, "is-date-object": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.2.tgz", - "integrity": "sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.4.tgz", + "integrity": "sha512-/b4ZVsG7Z5XVtIxs/h9W8nvfLgSAyKYdtGWQLbqy6jA1icmgjf8WCoTKgeS4wy5tYaPePouzFMANbnj94c2Z+A==", "dev": true }, "is-extglob": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", "dev": true }, "is-fullwidth-code-point": { @@ -3655,17 +4014,35 @@ "is-extglob": "^2.1.1" } }, + "is-negative-zero": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.1.tgz", + "integrity": "sha512-2z6JzQvZRa9A2Y7xC6dQQm4FSTSTNWjKIYYTt4246eMTJmIo0Q+ZyOsU66X8lxK1AbB92dFeglPLrhwpeRKO6w==", + "dev": true + }, "is-number": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", "dev": true }, + "is-number-object": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.5.tgz", + "integrity": "sha512-RU0lI/n95pMoUKu9v1BZP5MBcZuNSVJkMkAG2dJqC4z2GlkGUNeH68SuHuBKBD/XFe+LHZ+f9BKkLET60Niedw==", + "dev": true + }, "is-obj": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==" }, + "is-plain-obj": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", + "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==", + "dev": true + }, "is-promise": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-2.1.0.tgz", @@ -3673,12 +4050,13 @@ "dev": true }, "is-regex": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.0.tgz", - "integrity": "sha512-iI97M8KTWID2la5uYXlkbSDQIg4F6o1sYboZKKTDpnDQMLtUL86zxhgDet3Q2SriaYsyGqZ6Mn2SjbRKeLHdqw==", + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.3.tgz", + "integrity": "sha512-qSVXFz28HM7y+IWX6vLCsexdlvzT1PJNFSBuaQLQ5o0IEw8UDYW6/2+eCMVyIsbM8CNLX2a/QWmSpyxYEHY7CQ==", "dev": true, "requires": { - "has-symbols": "^1.0.1" + "call-bind": "^1.0.2", + "has-symbols": "^1.0.2" } }, "is-stream": { @@ -3692,18 +4070,18 @@ "integrity": "sha512-xj0XPvmr7bQFTvirqnFr50o0hQIh6ZItDqloxt5aJrR4NQsYeSsyFQERYGCAzfindAcnKjINnwEEgLx4IqVzQw==" }, "is-string": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.5.tgz", - "integrity": "sha512-buY6VNRjhQMiF1qWDouloZlQbRhDPCebwxSjxMjxgemYT46YMd2NR0/H+fBhEfWX4A/w9TBJ+ol+okqJKFE6vQ==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.6.tgz", + "integrity": "sha512-2gdzbKUuqtQ3lYNrUTQYoClPhm7oQu4UdpSZMp1/DGgkHBT8E2Z1l0yMdb6D4zNAxwDiMv8MdulKROJGNl0Q0w==", "dev": true }, "is-symbol": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.3.tgz", - "integrity": "sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz", + "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==", "dev": true, "requires": { - "has-symbols": "^1.0.1" + "has-symbols": "^1.0.2" } }, "is-typedarray": { @@ -3753,12 +4131,6 @@ "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", "integrity": "sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg==" }, - "jsesc": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", - "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", - "dev": true - }, "json-bigint": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-0.3.0.tgz", @@ -3767,6 +4139,12 @@ "bignumber.js": "^7.0.0" } }, + "json-parse-better-errors": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz", + "integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==", + "dev": true + }, "json-schema": { "version": "0.2.3", "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", @@ -3789,12 +4167,12 @@ "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==" }, "json5": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.1.tgz", - "integrity": "sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==", + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.0.tgz", + "integrity": "sha512-f+8cldu7X/y7RAJurMEJmdoKXGB/X550w2Nr3tTbezL6RwEE/iMcm+tZnXeoZtKuOq6ft8+CqzEkrIgx1fPoQA==", "dev": true, "requires": { - "minimist": "^1.2.0" + "minimist": "^1.2.5" } }, "jsprim": { @@ -3844,21 +4222,21 @@ } }, "load-json-file": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-2.0.0.tgz", - "integrity": "sha512-3p6ZOGNbiX4CdvEd1VcE6yi78UrGNpjHO33noGwHCnT/o2fyllJDepsm8+mFFv/DvtwFHht5HIHSyOy5a+ChVQ==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-4.0.0.tgz", + "integrity": "sha1-L19Fq5HjMhYjT9U62rZo607AmTs=", "dev": true, "requires": { "graceful-fs": "^4.1.2", - "parse-json": "^2.2.0", - "pify": "^2.0.0", + "parse-json": "^4.0.0", + "pify": "^3.0.0", "strip-bom": "^3.0.0" }, "dependencies": { "pify": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=", "dev": true } } @@ -3866,7 +4244,7 @@ "locate-path": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", - "integrity": "sha512-NCI2kiDkyR7VeEKm27Kda/iQHyKJe1Bu0FlTbYp3CqJu+9IFe9bLyAjMxf5ZDDbEg+iMPzB5zYyUTSm8wVTKmA==", + "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=", "dev": true, "requires": { "p-locate": "^2.0.0", @@ -3888,6 +4266,12 @@ "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==" }, + "lodash.clonedeep": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz", + "integrity": "sha1-4j8/nE+Pvd6HJSnBBxhXoIblzO8=", + "dev": true + }, "lodash.get": { "version": "4.4.2", "resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz", @@ -3899,12 +4283,6 @@ "resolved": "https://registry.npmjs.org/lodash.has/-/lodash.has-4.5.2.tgz", "integrity": "sha512-rnYUdIo6xRCJnQmbVFEwcxF144erlD+M3YcJUVesflU9paQaE8p+fJDcIQrlMYbxoANFL+AB9hZrzSBBk5PL+g==" }, - "lodash.memoize": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", - "integrity": "sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==", - "dev": true - }, "lodash.merge": { "version": "4.6.2", "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", @@ -3916,6 +4294,12 @@ "resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz", "integrity": "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==" }, + "lodash.truncate": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/lodash.truncate/-/lodash.truncate-4.4.2.tgz", + "integrity": "sha1-WjUNoLERO4N+z//VgSy+WNbq4ZM=", + "dev": true + }, "lodash.unescape": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/lodash.unescape/-/lodash.unescape-4.0.1.tgz", @@ -3928,12 +4312,63 @@ "integrity": "sha512-U7KCmLdqsGHBLeWqYlFA0V0Sl6P08EE1ZrmA9cxjUE0WVqT9qnyVDPz1kzpFEP0jdJuFnasWIfSd7fsaNXkpbg==" }, "log-symbols": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-3.0.0.tgz", - "integrity": "sha512-dSkNGuI7iG3mfvDzUuYZyvk5dD9ocYCYzNU6CYDE6+Xqd+gwme6Z00NS3dUh8mq/73HaEtT7m6W+yUPtU6BZnQ==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.0.0.tgz", + "integrity": "sha512-FN8JBzLx6CzeMrB0tg6pqlGU1wCrXW+ZXGH481kfsBqer0hToTIiHdjH4Mq8xJUbvATujKCvaREGWpGUionraA==", "dev": true, "requires": { - "chalk": "^2.4.2" + "chalk": "^4.0.0" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.1.tgz", + "integrity": "sha512-diHzdDKxcU+bAsUboHLPEDQiw0qEe0qd7SYUn3HgcFlWgbDcfLGswOHYeGrHKzG9z6UYf01d9VFMfZxPM1xZSg==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } } }, "logger-sharelatex": { @@ -4028,15 +4463,6 @@ "semver": "^6.0.0" } }, - "make-plural": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/make-plural/-/make-plural-4.3.0.tgz", - "integrity": "sha512-xTYd4JVHpSCW+aqDof6w/MebaMVNTVYBZhbB/vi513xXdiPT92JMVCo0Jq8W2UZnzYRFeVbQiQ+I25l13JuKvA==", - "dev": true, - "requires": { - "minimist": "^1.2.0" - } - }, "map-obj": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.1.0.tgz", @@ -4050,13 +4476,6 @@ "charenc": "0.0.2", "crypt": "0.0.2", "is-buffer": "~1.1.6" - }, - "dependencies": { - "is-buffer": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", - "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==" - } } }, "media-typer": { @@ -4076,29 +4495,6 @@ "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", "integrity": "sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==" }, - "messageformat": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/messageformat/-/messageformat-2.3.0.tgz", - "integrity": "sha512-uTzvsv0lTeQxYI2y1NPa1lItL5VRI8Gb93Y2K2ue5gBPyrbJxfDi/EYWxh2PKv5yO42AJeeqblS9MJSh/IEk4w==", - "dev": true, - "requires": { - "make-plural": "^4.3.0", - "messageformat-formatters": "^2.0.1", - "messageformat-parser": "^4.1.2" - } - }, - "messageformat-formatters": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/messageformat-formatters/-/messageformat-formatters-2.0.1.tgz", - "integrity": "sha512-E/lQRXhtHwGuiQjI7qxkLp8AHbMD5r2217XNe/SREbBlSawe0lOqsFb7rflZJmlQFSULNLIqlcjjsCPlB3m3Mg==", - "dev": true - }, - "messageformat-parser": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/messageformat-parser/-/messageformat-parser-4.1.2.tgz", - "integrity": "sha512-7dWuifeyldz7vhEuL96Kwq1fhZXBW+TUfbnHN4UCrCxoXQTYjHnR78eI66Gk9LaLLsAvzPNVJBaa66DRfFNaiA==", - "dev": true - }, "methods": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", @@ -4166,148 +4562,167 @@ } }, "mocha": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/mocha/-/mocha-7.2.0.tgz", - "integrity": "sha512-O9CIypScywTVpNaRrCAgoUnJgozpIofjKUYmJhiCIJMiuYnLI6otcb1/kpW9/n/tJODHGZ7i8aLQoDVsMtOKQQ==", + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-8.4.0.tgz", + "integrity": "sha512-hJaO0mwDXmZS4ghXsvPVriOhsxQ7ofcpQdm8dE+jISUOKopitvnXFQmpRR7jd2K6VBG6E26gU3IAbXXGIbu4sQ==", "dev": true, "requires": { - "ansi-colors": "3.2.3", + "@ungap/promise-all-settled": "1.1.2", + "ansi-colors": "4.1.1", "browser-stdout": "1.3.1", - "chokidar": "3.3.0", - "debug": "3.2.6", - "diff": "3.5.0", - "escape-string-regexp": "1.0.5", - "find-up": "3.0.0", - "glob": "7.1.3", + "chokidar": "3.5.1", + "debug": "4.3.1", + "diff": "5.0.0", + "escape-string-regexp": "4.0.0", + "find-up": "5.0.0", + "glob": "7.1.6", "growl": "1.10.5", "he": "1.2.0", - "js-yaml": "3.13.1", - "log-symbols": "3.0.0", + "js-yaml": "4.0.0", + "log-symbols": "4.0.0", "minimatch": "3.0.4", - "mkdirp": "0.5.5", - "ms": "2.1.1", - "node-environment-flags": "1.0.6", - "object.assign": "4.1.0", - "strip-json-comments": "2.0.1", - "supports-color": "6.0.0", - "which": "1.3.1", + "ms": "2.1.3", + "nanoid": "3.1.20", + "serialize-javascript": "5.0.1", + "strip-json-comments": "3.1.1", + "supports-color": "8.1.1", + "which": "2.0.2", "wide-align": "1.1.3", - "yargs": "13.3.2", - "yargs-parser": "13.1.2", - "yargs-unparser": "1.6.0" + "workerpool": "6.1.0", + "yargs": "16.2.0", + "yargs-parser": "20.2.4", + "yargs-unparser": "2.0.0" }, "dependencies": { - "debug": { - "version": "3.2.6", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", - "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", - "dev": true, + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "requires": { - "ms": "^2.1.1" + "color-convert": "^2.0.1" } }, - "emoji-regex": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", - "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", + "argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, + "debug": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", + "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", + "dev": true, + "requires": { + "ms": "2.1.2" + }, + "dependencies": { + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + } + } + }, + "escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", "dev": true }, "find-up": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", - "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", "dev": true, "requires": { - "locate-path": "^3.0.0" + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" } }, - "glob": { - "version": "7.1.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.3.tgz", - "integrity": "sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ==", - "dev": true, - "requires": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - } - }, - "is-fullwidth-code-point": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w==", + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true }, - "locate-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", - "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "js-yaml": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.0.0.tgz", + "integrity": "sha512-pqon0s+4ScYUvX30wxQi3PogGFAlUyH0awepWvwkj4jD4v+ova3RiYw8bmA6x2rDrEaj8i/oWKoRxpVNW+Re8Q==", "dev": true, "requires": { - "p-locate": "^3.0.0", - "path-exists": "^3.0.0" + "argparse": "^2.0.1" + } + }, + "locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "requires": { + "p-locate": "^5.0.0" } }, "ms": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz", - "integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==", + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", "dev": true }, "p-locate": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", - "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", "dev": true, "requires": { - "p-limit": "^2.0.0" + "p-limit": "^3.0.2" } }, - "string-width": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", - "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", - "dev": true, - "requires": { - "emoji-regex": "^7.0.1", - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^5.1.0" - } - }, - "strip-json-comments": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", - "integrity": "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==", + "path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", "dev": true }, - "supports-color": { + "strip-ansi": { "version": "6.0.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.0.0.tgz", - "integrity": "sha512-on9Kwidc1IUQo+bQdhi8+Tijpo0e1SS6RoGo2guUwn5vdaxw8RXOF9Vb2ws+ihWOmh4JnCJOvaziZWP1VABaLg==", - "dev": true, + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", + "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", "requires": { - "has-flag": "^3.0.0" + "ansi-regex": "^5.0.0" } }, - "yargs": { - "version": "13.3.2", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-13.3.2.tgz", - "integrity": "sha512-AX3Zw5iPruN5ie6xGRIDgqkT+ZhnRlZMLMHAs8tg7nRruy2Nb+i5o9bwghAogtM08q1dpr2LVoS8KSTMYpWXUw==", + "supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", "dev": true, "requires": { - "cliui": "^5.0.0", - "find-up": "^3.0.0", - "get-caller-file": "^2.0.1", - "require-directory": "^2.1.1", - "require-main-filename": "^2.0.0", - "set-blocking": "^2.0.0", - "string-width": "^3.0.0", - "which-module": "^2.0.0", - "y18n": "^4.0.0", - "yargs-parser": "^13.1.2" + "has-flag": "^4.0.0" + } + }, + "which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "requires": { + "isexe": "^2.0.0" } } } @@ -4343,9 +4758,9 @@ "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" }, "mute-stream": { - "version": "0.0.8", - "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz", - "integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==", + "version": "0.0.7", + "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz", + "integrity": "sha512-r65nCZhrbXXb6dXOACihYApHw2Q6pV0M3V0PSxd74N0+D8nzAdEAITq2oAjA1jVnKI+tGvEBUpqiMh0+rW6zDQ==", "dev": true }, "mv": { @@ -4388,6 +4803,12 @@ "resolved": "https://registry.npmjs.org/nan/-/nan-2.14.0.tgz", "integrity": "sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg==" }, + "nanoid": { + "version": "3.1.20", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.1.20.tgz", + "integrity": "sha512-a1cQNyczgKbLX9jwbS/+d7W8fX/RfgYR7lVWwWOGIPNgK2m0MWvrGF6/m4kk6U3QcFMnZf3RIhL0v2Jgh/0Uxw==", + "dev": true + }, "natural-compare": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", @@ -4477,24 +4898,6 @@ "integrity": "sha512-2+DuKodWvwRTrCfKOeR24KIc5unKjOh8mz17NCzVnHWfjAdDqbfbjqh7gUT+BkXBRQM52+xCHciKWonJ3CbJMQ==", "optional": true }, - "node-environment-flags": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/node-environment-flags/-/node-environment-flags-1.0.6.tgz", - "integrity": "sha512-5Evy2epuL+6TM0lCQGpFIj6KwiEsGh1SrHUhTbNX+sLbBtjidPZFAnVK9y5yU1+h//RitLbRHTIMyxQPtxMdHw==", - "dev": true, - "requires": { - "object.getownpropertydescriptors": "^2.0.3", - "semver": "^5.7.0" - }, - "dependencies": { - "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", - "dev": true - } - } - }, "node-fetch": { "version": "2.6.1", "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.1.tgz", @@ -4619,9 +5022,9 @@ "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=" }, "object-inspect": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.8.0.tgz", - "integrity": "sha512-jLdtEOB112fORuypAyl/50VRVIBIdVQOSUUGQHzJ4xBSbit81zRarz7GThkEFZy1RceYrWYcPcBFPQwHyAc1gA==", + "version": "1.11.0", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.11.0.tgz", + "integrity": "sha512-jp7ikS6Sd3GxQfZJPyH3cjcbJF6GZPClgdV+EFygjFLQ5FmW/dRUnTd9PQ9k0JhoNDabWFbpF1yCdSWCC6gexg==", "dev": true }, "object-keys": { @@ -4631,37 +5034,26 @@ "dev": true }, "object.assign": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.0.tgz", - "integrity": "sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w==", - "dev": true, - "requires": { - "define-properties": "^1.1.2", - "function-bind": "^1.1.1", - "has-symbols": "^1.0.0", - "object-keys": "^1.0.11" - } - }, - "object.getownpropertydescriptors": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.0.tgz", - "integrity": "sha512-Z53Oah9A3TdLoblT7VKJaTDdXdT+lQO+cNpKVnya5JDe9uLvzu1YyY1yFDFrcxrlRgWrEFH0jJtD/IbuwjcEVg==", + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.2.tgz", + "integrity": "sha512-ixT2L5THXsApyiUPYKmW+2EHpXXe5Ii3M+f4e+aJFAHao5amFRW6J0OO6c/LU8Be47utCx2GL89hxGB6XSmKuQ==", "dev": true, "requires": { + "call-bind": "^1.0.0", "define-properties": "^1.1.3", - "es-abstract": "^1.17.0-next.1" + "has-symbols": "^1.0.1", + "object-keys": "^1.1.1" } }, "object.values": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.1.tgz", - "integrity": "sha512-WTa54g2K8iu0kmS/us18jEmdv1a4Wi//BZ/DTVYEcH0XhLM5NYdpDHja3gt57VrZLcNAO2WGA+KpWsDBaHt6eA==", + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.4.tgz", + "integrity": "sha512-TnGo7j4XSnKQoK3MfvkzqKCi0nVe/D9I9IjwTNYdb/fxYHpjrluHVOgw0AF6jrRFGMPHdfuidR09tIDiIvnaSg==", "dev": true, "requires": { + "call-bind": "^1.0.2", "define-properties": "^1.1.3", - "es-abstract": "^1.17.0-next.1", - "function-bind": "^1.1.1", - "has": "^1.0.3" + "es-abstract": "^1.18.2" } }, "on-finished": { @@ -4727,18 +5119,17 @@ } }, "p-limit": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.2.2.tgz", - "integrity": "sha512-WGR+xHecKTr7EbUEhyLSh5Dube9JtdiG78ufaeLxTgpudf/20KqyMioIUZJAezlTIi6evxuoUs9YXc11cU+yzQ==", - "dev": true, + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", "requires": { - "p-try": "^2.0.0" + "yocto-queue": "^0.1.0" } }, "p-locate": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", - "integrity": "sha512-nQja7m7gSKuewoVRen45CtVfODR3crN3goVQ0DDZ9N3yHxgpkuBhZqsaiotSQRrADUrne346peY7kT3TSACykg==", + "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=", "dev": true, "requires": { "p-limit": "^1.1.0" @@ -4756,16 +5147,11 @@ "p-try": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", - "integrity": "sha512-U1etNYuMJoIz3ZXSrrySFjsXQTWOx2/jdi86L+2pRvph/qMKL6sbcCYdH23fqsbm8TH2Gn0OybpT4eSFlCVHww==", + "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=", "dev": true } } }, - "p-try": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", - "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==" - }, "parent-module": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", @@ -4781,12 +5167,13 @@ "integrity": "sha512-KbAJuYGUhZkB9gotDiKLnZ7Z3VTacK3fgwmDdB6ZVDtJbMBT6MfLga0WJaYpPDu0mzqT0NgHtHDt5PY4l0nidg==" }, "parse-json": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", - "integrity": "sha512-QR/GGaKCkhwk1ePQNYDRKYZ3mwU9ypsKhB0XyFnLQdomyEqk3e8wpW3V5Jp88zbxK4n5ST1nqo+g9juTpownhQ==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz", + "integrity": "sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=", "dev": true, "requires": { - "error-ex": "^1.2.0" + "error-ex": "^1.3.1", + "json-parse-better-errors": "^1.0.1" } }, "parse-ms": { @@ -4833,18 +5220,18 @@ "integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==" }, "path-type": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-2.0.0.tgz", - "integrity": "sha512-dUnb5dXUf+kzhC/W/F4e5/SkluXIFf5VUHolW1Eg1irn1hGWjPGdsRcvYJ1nD6lhk8Ir7VM0bHJKsYTx8Jx9OQ==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz", + "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==", "dev": true, "requires": { - "pify": "^2.0.0" + "pify": "^3.0.0" }, "dependencies": { "pify": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=", "dev": true } } @@ -4861,9 +5248,9 @@ "integrity": "sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow==" }, "picomatch": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.2.2.tgz", - "integrity": "sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.0.tgz", + "integrity": "sha512-lY1Q/PiJGC2zOv/z391WOTD+Z02bCgsFfvxoXXf6h7kv9o+WmsmzYqrAwY63sNgOxE4xEdq0WyUnXfKeBrSvYw==", "dev": true }, "pify": { @@ -4874,7 +5261,16 @@ "pkg-dir": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-2.0.0.tgz", - "integrity": "sha512-ojakdnUgL5pzJYWw2AIDEupaQCX5OPbM688ZevubICjdIX01PRSYKqm33fJoCOJBRseYCTUlQRnBNX+Pchaejw==", + "integrity": "sha1-9tXREJ4Z1j7fQo4L1X4Sd3YVM0s=", + "dev": true, + "requires": { + "find-up": "^2.1.0" + } + }, + "pkg-up": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/pkg-up/-/pkg-up-2.0.0.tgz", + "integrity": "sha1-yBmscoBZpGHKscOImivjxJoATX8=", "dev": true, "requires": { "find-up": "^2.1.0" @@ -4897,14 +5293,6 @@ "split": "^1.0.1" }, "dependencies": { - "p-limit": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", - "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", - "requires": { - "yocto-queue": "^0.1.0" - } - }, "source-map": { "version": "0.7.3", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz", @@ -4919,9 +5307,9 @@ "dev": true }, "prettier": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.0.5.tgz", - "integrity": "sha512-7PtVymN48hGcO4fGjybyBSIWDsLU4H4XlvOHfq91pz9kkGlonzwTfYkaIEwiRg/dAJF9YlbsduBAgtYLi+8cFg==", + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.3.2.tgz", + "integrity": "sha512-lnJzDfJ66zkMy58OL5/NY5zp70S7Nz6KqcKkXYzn2tMVrNxvbqaBpg7H3qHaLxCJ5lNMsGuM8+ohS7cZrthdLQ==", "dev": true }, "prettier-eslint": { @@ -4945,26 +5333,10 @@ "vue-eslint-parser": "^2.0.2" }, "dependencies": { - "ansi-escapes": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.2.0.tgz", - "integrity": "sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ==", - "dev": true - }, "ansi-regex": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", - "integrity": "sha512-wFUFA5bg5dviipbQQ32yOQhl6gcJaJXiHE7dvR8VYPG97+J/GNC5FKGepKdEDUFeXRzDxPF1X/Btc8L+v7oqIQ==", - "dev": true - }, - "cli-cursor": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-2.1.0.tgz", - "integrity": "sha512-8lgKz8LmCRYZZQDpRyT2m5rKJ08TnU4tR9FFFW2rxpxR1FzWi4PQ/NfyODchAatHaUgnSPVcx/R5w6NuTBzFiw==", - "dev": true, - "requires": { - "restore-cursor": "^2.0.0" - } + "integrity": "sha512-wFUFA5bg5dviipbQQ32yOQhl6gcJaJXiHE7dvR8VYPG97+J/GNC5FKGepKdEDUFeXRzDxPF1X/Btc8L+v7oqIQ==" }, "debug": { "version": "4.1.1", @@ -5040,64 +5412,15 @@ "eslint-visitor-keys": "^1.0.0" } }, - "figures": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/figures/-/figures-2.0.0.tgz", - "integrity": "sha512-Oa2M9atig69ZkfwiApY8F2Yy+tzMbazyvqv21R0NsSC8floSOC09BbT1ITWAdoMGQvJ/aZnR1KMwdx9tvHnTNA==", - "dev": true, - "requires": { - "escape-string-regexp": "^1.0.5" - } - }, - "inquirer": { - "version": "6.5.2", - "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-6.5.2.tgz", - "integrity": "sha512-cntlB5ghuB0iuO65Ovoi8ogLHiWGs/5yNrtUcKjFhSSiVeAIVpD7koaSU9RM8mpXw5YDi9RdYXGQMaOURB7ycQ==", - "dev": true, - "requires": { - "ansi-escapes": "^3.2.0", - "chalk": "^2.4.2", - "cli-cursor": "^2.1.0", - "cli-width": "^2.0.0", - "external-editor": "^3.0.3", - "figures": "^2.0.0", - "lodash": "^4.17.12", - "mute-stream": "0.0.7", - "run-async": "^2.2.0", - "rxjs": "^6.4.0", - "string-width": "^2.1.0", - "strip-ansi": "^5.1.0", - "through": "^2.3.6" - }, - "dependencies": { - "ansi-regex": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", - "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", - "dev": true - }, - "strip-ansi": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", - "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", - "dev": true, - "requires": { - "ansi-regex": "^4.1.0" - } - } - } - }, "is-fullwidth-code-point": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w==", - "dev": true + "integrity": "sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w==" }, "mimic-fn": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-1.2.0.tgz", - "integrity": "sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ==", - "dev": true + "integrity": "sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ==" }, "ms": { "version": "2.1.2", @@ -5105,17 +5428,9 @@ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", "dev": true }, - "mute-stream": { - "version": "0.0.7", - "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz", - "integrity": "sha512-r65nCZhrbXXb6dXOACihYApHw2Q6pV0M3V0PSxd74N0+D8nzAdEAITq2oAjA1jVnKI+tGvEBUpqiMh0+rW6zDQ==", - "dev": true - }, "onetime": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-2.0.1.tgz", + "version": "https://registry.npmjs.org/onetime/-/onetime-2.0.1.tgz", "integrity": "sha512-oyyPpiMaKARvvcgip+JV+7zci5L8D1W9RZIz2l1o08AM3pfspitVWnPt3mzHcBPp12oYMTy0pqrFs/C+m3EwsQ==", - "dev": true, "requires": { "mimic-fn": "^1.0.0" } @@ -5126,16 +5441,6 @@ "integrity": "sha512-s7PoyDv/II1ObgQunCbB9PdLmUcBZcnWOcxDh7O0N/UwDEsHyqkW+Qh28jW+mVuCdx7gLB0BotYI1Y6uI9iyew==", "dev": true }, - "restore-cursor": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz", - "integrity": "sha512-6IzJLuGi4+R14vwagDHX+JrXmPVtPpn4mffDJ1UdR7/Edm87fl6yi8mMBIVvFtJaNTUvjughmW4hwLhRG7gC1Q==", - "dev": true, - "requires": { - "onetime": "^2.0.0", - "signal-exit": "^3.0.2" - } - }, "semver": { "version": "5.7.1", "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", @@ -5143,10 +5448,8 @@ "dev": true }, "string-width": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", + "version": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", - "dev": true, "requires": { "is-fullwidth-code-point": "^2.0.0", "strip-ansi": "^4.0.0" @@ -5156,304 +5459,6 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", "integrity": "sha512-4XaJ2zQdCzROZDivEVIDPkcQn8LMFSa8kj8Gxb/Lnwzv9A8VctNZ+lfivC/sV3ivW8ElJTERXZoPBRrZKkNKow==", - "dev": true, - "requires": { - "ansi-regex": "^3.0.0" - } - }, - "strip-json-comments": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", - "integrity": "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==", - "dev": true - } - } - }, - "prettier-eslint-cli": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/prettier-eslint-cli/-/prettier-eslint-cli-5.0.0.tgz", - "integrity": "sha512-cei9UbN1aTrz3sQs88CWpvY/10PYTevzd76zoG1tdJ164OhmNTFRKPTOZrutVvscoQWzbnLKkviS3gu5JXwvZg==", - "dev": true, - "requires": { - "arrify": "^2.0.1", - "boolify": "^1.0.0", - "camelcase-keys": "^6.0.0", - "chalk": "^2.4.2", - "common-tags": "^1.8.0", - "core-js": "^3.1.4", - "eslint": "^5.0.0", - "find-up": "^4.1.0", - "get-stdin": "^7.0.0", - "glob": "^7.1.4", - "ignore": "^5.1.2", - "lodash.memoize": "^4.1.2", - "loglevel-colored-level-prefix": "^1.0.0", - "messageformat": "^2.2.1", - "prettier-eslint": "^9.0.0", - "rxjs": "^6.5.2", - "yargs": "^13.2.4" - }, - "dependencies": { - "ansi-escapes": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.2.0.tgz", - "integrity": "sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ==", - "dev": true - }, - "ansi-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", - "integrity": "sha512-wFUFA5bg5dviipbQQ32yOQhl6gcJaJXiHE7dvR8VYPG97+J/GNC5FKGepKdEDUFeXRzDxPF1X/Btc8L+v7oqIQ==", - "dev": true - }, - "cli-cursor": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-2.1.0.tgz", - "integrity": "sha512-8lgKz8LmCRYZZQDpRyT2m5rKJ08TnU4tR9FFFW2rxpxR1FzWi4PQ/NfyODchAatHaUgnSPVcx/R5w6NuTBzFiw==", - "dev": true, - "requires": { - "restore-cursor": "^2.0.0" - } - }, - "debug": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", - "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", - "dev": true, - "requires": { - "ms": "^2.1.1" - } - }, - "eslint": { - "version": "5.16.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-5.16.0.tgz", - "integrity": "sha512-S3Rz11i7c8AA5JPv7xAH+dOyq/Cu/VXHiHXBPOU1k/JAM5dXqQPt3qcrhpHSorXmrpu2g0gkIBVXAqCpzfoZIg==", - "dev": true, - "requires": { - "@babel/code-frame": "^7.0.0", - "ajv": "^6.9.1", - "chalk": "^2.1.0", - "cross-spawn": "^6.0.5", - "debug": "^4.0.1", - "doctrine": "^3.0.0", - "eslint-scope": "^4.0.3", - "eslint-utils": "^1.3.1", - "eslint-visitor-keys": "^1.0.0", - "espree": "^5.0.1", - "esquery": "^1.0.1", - "esutils": "^2.0.2", - "file-entry-cache": "^5.0.1", - "functional-red-black-tree": "^1.0.1", - "glob": "^7.1.2", - "globals": "^11.7.0", - "ignore": "^4.0.6", - "import-fresh": "^3.0.0", - "imurmurhash": "^0.1.4", - "inquirer": "^6.2.2", - "js-yaml": "^3.13.0", - "json-stable-stringify-without-jsonify": "^1.0.1", - "levn": "^0.3.0", - "lodash": "^4.17.11", - "minimatch": "^3.0.4", - "mkdirp": "^0.5.1", - "natural-compare": "^1.4.0", - "optionator": "^0.8.2", - "path-is-inside": "^1.0.2", - "progress": "^2.0.0", - "regexpp": "^2.0.1", - "semver": "^5.5.1", - "strip-ansi": "^4.0.0", - "strip-json-comments": "^2.0.1", - "table": "^5.2.3", - "text-table": "^0.2.0" - }, - "dependencies": { - "ignore": { - "version": "4.0.6", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz", - "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==", - "dev": true - } - } - }, - "eslint-scope": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-4.0.3.tgz", - "integrity": "sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg==", - "dev": true, - "requires": { - "esrecurse": "^4.1.0", - "estraverse": "^4.1.1" - } - }, - "espree": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/espree/-/espree-5.0.1.tgz", - "integrity": "sha512-qWAZcWh4XE/RwzLJejfcofscgMc9CamR6Tn1+XRXNzrvUSSbiAjGOI/fggztjIi7y9VLPqnICMIPiGyr8JaZ0A==", - "dev": true, - "requires": { - "acorn": "^6.0.7", - "acorn-jsx": "^5.0.0", - "eslint-visitor-keys": "^1.0.0" - } - }, - "figures": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/figures/-/figures-2.0.0.tgz", - "integrity": "sha512-Oa2M9atig69ZkfwiApY8F2Yy+tzMbazyvqv21R0NsSC8floSOC09BbT1ITWAdoMGQvJ/aZnR1KMwdx9tvHnTNA==", - "dev": true, - "requires": { - "escape-string-regexp": "^1.0.5" - } - }, - "find-up": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", - "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", - "dev": true, - "requires": { - "locate-path": "^5.0.0", - "path-exists": "^4.0.0" - } - }, - "get-stdin": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-7.0.0.tgz", - "integrity": "sha512-zRKcywvrXlXsA0v0i9Io4KDRaAw7+a1ZpjRwl9Wox8PFlVCCHra7E9c4kqXCoCM9nR5tBkaTTZRBoCm60bFqTQ==", - "dev": true - }, - "ignore": { - "version": "5.1.4", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.1.4.tgz", - "integrity": "sha512-MzbUSahkTW1u7JpKKjY7LCARd1fU5W2rLdxlM4kdkayuCwZImjkpluF9CM1aLewYJguPDqewLam18Y6AU69A8A==", - "dev": true - }, - "inquirer": { - "version": "6.5.2", - "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-6.5.2.tgz", - "integrity": "sha512-cntlB5ghuB0iuO65Ovoi8ogLHiWGs/5yNrtUcKjFhSSiVeAIVpD7koaSU9RM8mpXw5YDi9RdYXGQMaOURB7ycQ==", - "dev": true, - "requires": { - "ansi-escapes": "^3.2.0", - "chalk": "^2.4.2", - "cli-cursor": "^2.1.0", - "cli-width": "^2.0.0", - "external-editor": "^3.0.3", - "figures": "^2.0.0", - "lodash": "^4.17.12", - "mute-stream": "0.0.7", - "run-async": "^2.2.0", - "rxjs": "^6.4.0", - "string-width": "^2.1.0", - "strip-ansi": "^5.1.0", - "through": "^2.3.6" - }, - "dependencies": { - "ansi-regex": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", - "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", - "dev": true - }, - "strip-ansi": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", - "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", - "dev": true, - "requires": { - "ansi-regex": "^4.1.0" - } - } - } - }, - "is-fullwidth-code-point": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w==", - "dev": true - }, - "locate-path": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", - "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", - "dev": true, - "requires": { - "p-locate": "^4.1.0" - } - }, - "mimic-fn": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-1.2.0.tgz", - "integrity": "sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ==", - "dev": true - }, - "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "dev": true - }, - "mute-stream": { - "version": "0.0.7", - "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz", - "integrity": "sha512-r65nCZhrbXXb6dXOACihYApHw2Q6pV0M3V0PSxd74N0+D8nzAdEAITq2oAjA1jVnKI+tGvEBUpqiMh0+rW6zDQ==", - "dev": true - }, - "onetime": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-2.0.1.tgz", - "integrity": "sha512-oyyPpiMaKARvvcgip+JV+7zci5L8D1W9RZIz2l1o08AM3pfspitVWnPt3mzHcBPp12oYMTy0pqrFs/C+m3EwsQ==", - "dev": true, - "requires": { - "mimic-fn": "^1.0.0" - } - }, - "p-locate": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", - "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", - "dev": true, - "requires": { - "p-limit": "^2.2.0" - } - }, - "path-exists": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", - "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", - "dev": true - }, - "restore-cursor": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz", - "integrity": "sha512-6IzJLuGi4+R14vwagDHX+JrXmPVtPpn4mffDJ1UdR7/Edm87fl6yi8mMBIVvFtJaNTUvjughmW4hwLhRG7gC1Q==", - "dev": true, - "requires": { - "onetime": "^2.0.0", - "signal-exit": "^3.0.2" - } - }, - "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", - "dev": true - }, - "string-width": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", - "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", - "dev": true, - "requires": { - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^4.0.0" - } - }, - "strip-ansi": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", - "integrity": "sha512-4XaJ2zQdCzROZDivEVIDPkcQn8LMFSa8kj8Gxb/Lnwzv9A8VctNZ+lfivC/sV3ivW8ElJTERXZoPBRrZKkNKow==", - "dev": true, "requires": { "ansi-regex": "^3.0.0" } @@ -5611,17 +5616,20 @@ "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", "integrity": "sha512-X/xY82scca2tau62i9mDyU9K+I+djTMUsvwf7xnUX5GLvVzgJybOJf4Y6o9Zx3oJK/LSXg5tTZBjwzqVPaPO2g==" }, - "quick-lru": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-4.0.1.tgz", - "integrity": "sha512-ARhCpm70fzdcvNQfPoy49IaanKkTlRWF2JMzqhcJbhSFRZv7nPTvZJdcY7301IPmvW+/p0RgIWnQDLJxifsQ7g==", + "ramda": { + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/ramda/-/ramda-0.27.1.tgz", + "integrity": "sha512-PgIdVpn5y5Yns8vqb8FzBUEYn98V3xcPgawAkkgj0YJ0qDsnHCiNmZYfOGMgOvoB0eWFLpYbhxUR3mxfDIMvpw==", "dev": true }, - "ramda": { - "version": "0.27.0", - "resolved": "https://registry.npmjs.org/ramda/-/ramda-0.27.0.tgz", - "integrity": "sha512-pVzZdDpWwWqEVVLshWUHjNwuVP7SfcmPraYuqocJp1yo2U1R7P+5QAfDhdItkuoGqIBnBYrtPp7rEPqDn9HlZA==", - "dev": true + "randombytes": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", + "dev": true, + "requires": { + "safe-buffer": "^5.1.0" + } }, "range-parser": { "version": "1.2.1", @@ -5687,24 +5695,24 @@ } }, "read-pkg": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-2.0.0.tgz", - "integrity": "sha512-eFIBOPW7FGjzBuk3hdXEuNSiTZS/xEMlH49HxMyzb0hyPfu4EhVjT2DH32K1hSSmVq4sebAWnZuuY5auISUTGA==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-3.0.0.tgz", + "integrity": "sha1-nLxoaXj+5l0WwA4rGcI3/Pbjg4k=", "dev": true, "requires": { - "load-json-file": "^2.0.0", + "load-json-file": "^4.0.0", "normalize-package-data": "^2.3.2", - "path-type": "^2.0.0" + "path-type": "^3.0.0" } }, "read-pkg-up": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-2.0.0.tgz", - "integrity": "sha512-1orxQfbWGUiTn9XsPlChs6rLie/AV9jwZTGmu2NZw/CUDJQchXJFYE0Fq5j7+n558T1JhDWLdhyd1Zj+wLY//w==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-3.0.0.tgz", + "integrity": "sha1-PtSWaF26D4/hGNBpHcUfSh/5bwc=", "dev": true, "requires": { "find-up": "^2.0.0", - "read-pkg": "^2.0.0" + "read-pkg": "^3.0.0" } }, "readable-stream": { @@ -5722,12 +5730,12 @@ } }, "readdirp": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.2.0.tgz", - "integrity": "sha512-crk4Qu3pmXwgxdSgGhgA/eXiJAPQiX4GMOZZMXnqKxHX7TaoL+3gQVo/WeuAiogr07DpnfjIMpXXa+PAIvwPGQ==", + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.5.0.tgz", + "integrity": "sha512-cMhu7c/8rdhkHXWsY+osBhfSy0JikwpHK/5+imo+LpeasTF8ouErHrlYkwT0++njiyuDvc7OFY5T3ukvZ8qmFQ==", "dev": true, "requires": { - "picomatch": "^2.0.4" + "picomatch": "^2.2.1" } }, "regexpp": { @@ -5813,6 +5821,12 @@ "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", "dev": true }, + "require-from-string": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", + "dev": true + }, "require-in-the-middle": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/require-in-the-middle/-/require-in-the-middle-5.1.0.tgz", @@ -5844,12 +5858,6 @@ "integrity": "sha512-oyrU88skkMtDdauHDuKVrgR+zuItqr6/c//FXzvmxRGMexSDc6hNvJInGW3LL46n+8b50RykrvwSUIIQH2LQ5A==", "dev": true }, - "require-main-filename": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", - "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", - "dev": true - }, "require-relative": { "version": "0.8.7", "resolved": "https://registry.npmjs.org/require-relative/-/require-relative-0.8.7.tgz", @@ -5895,12 +5903,11 @@ "dev": true }, "restore-cursor": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz", - "integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz", + "integrity": "sha512-6IzJLuGi4+R14vwagDHX+JrXmPVtPpn4mffDJ1UdR7/Edm87fl6yi8mMBIVvFtJaNTUvjughmW4hwLhRG7gC1Q==", "dev": true, "requires": { - "onetime": "^5.1.0", "signal-exit": "^3.0.2" } }, @@ -6027,6 +6034,15 @@ } } }, + "serialize-javascript": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-5.0.1.tgz", + "integrity": "sha512-SaaNal9imEO737H2c05Og0/8LUXG7EnsZyMa8MzkmuHoELfT6txuj0cMqRj6zfPKnmQ1yasR4PCJc8x+M4JSPA==", + "dev": true, + "requires": { + "randombytes": "^2.1.0" + } + }, "serve-static": { "version": "1.14.1", "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.14.1.tgz", @@ -6201,9 +6217,9 @@ } }, "spdx-license-ids": { - "version": "3.0.5", - "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.5.tgz", - "integrity": "sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q==", + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.9.tgz", + "integrity": "sha512-Ki212dKK4ogX+xDo4CtOZBVIwhsKBEfsEEcwmJfLQzirgc2jIWdzg40Unxz/HzEUqM1WFzVlQSMF9kZZ2HboLQ==", "dev": true }, "split": { @@ -6287,9 +6303,9 @@ "dev": true }, "string-width": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", - "integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.2.tgz", + "integrity": "sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA==", "dev": true, "requires": { "emoji-regex": "^8.0.0", @@ -6309,23 +6325,23 @@ } }, "string.prototype.trimend": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.1.tgz", - "integrity": "sha512-LRPxFUaTtpqYsTeNKaFOw3R4bxIzWOnbQ837QfBylo8jIxtcbK/A/sMV7Q+OAV/vWo+7s25pOE10KYSjaSO06g==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.4.tgz", + "integrity": "sha512-y9xCjw1P23Awk8EvTpcyL2NIr1j7wJ39f+k6lvRnSMz+mz9CGz9NYPelDk42kOz6+ql8xjfK8oYzy3jAP5QU5A==", "dev": true, "requires": { - "define-properties": "^1.1.3", - "es-abstract": "^1.17.5" + "call-bind": "^1.0.2", + "define-properties": "^1.1.3" } }, "string.prototype.trimstart": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.1.tgz", - "integrity": "sha512-XxZn+QpvrBI1FOcg6dIpxUPgWCPuNXvMD72aaRaUQv1eD4e/Qy8i/hFTe0BUmD60p/QA6bh1avmuPTfNjqVWRw==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.4.tgz", + "integrity": "sha512-jh6e984OBfvxS50tdY2nRZnoC5/mLFKOREQfw8t5yytkoUsJRNxvI/E39qu1sD0OtWI3OC0XgKSmcWwziwYuZw==", "dev": true, "requires": { - "define-properties": "^1.1.3", - "es-abstract": "^1.17.5" + "call-bind": "^1.0.2", + "define-properties": "^1.1.3" } }, "string_decoder": { @@ -6356,13 +6372,13 @@ "strip-bom": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", - "integrity": "sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==", + "integrity": "sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=", "dev": true }, "strip-json-comments": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.0.1.tgz", - "integrity": "sha512-VTyMAUfdm047mwKl+u79WIdrZxtFtn+nBxHeb844XBQ9uMNTuTHdx2hc5RiAJYqwTj3wc/xe5HLSdJSkJ+WfZw==", + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", "dev": true }, "stubs": { @@ -6505,12 +6521,6 @@ "os-tmpdir": "~1.0.2" } }, - "to-fast-properties": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", - "integrity": "sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==", - "dev": true - }, "to-no-case": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/to-no-case/-/to-no-case-1.0.2.tgz", @@ -6563,13 +6573,12 @@ } }, "tsconfig-paths": { - "version": "3.9.0", - "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.9.0.tgz", - "integrity": "sha512-dRcuzokWhajtZWkQsDVKbWyY+jgcLC5sqJhg2PSgf4ZkH2aHPvaOY8YWGhmjb68b5qqTfasSsDO9k7RUiEmZAw==", + "version": "3.10.1", + "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.10.1.tgz", + "integrity": "sha512-rETidPDgCpltxF7MjBZlAFPUHv5aHH2MymyPvh+vEyWAED4Eb/WeMbsnD/JDr4OKPOA1TssDHgIcpTN5Kh0p6Q==", "dev": true, "requires": { - "@types/json5": "^0.0.29", - "json5": "^1.0.1", + "json5": "^2.2.0", "minimist": "^1.2.0", "strip-bom": "^3.0.0" } @@ -6641,6 +6650,18 @@ "integrity": "sha512-Pspx3oKAPJtjNwE92YS05HQoY7z2SFyOpHo9MqJor3BXAGNaPUs83CuVp9VISFkSjyRfiTpmKuAYGJB7S7hOxw==", "dev": true }, + "unbox-primitive": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.1.tgz", + "integrity": "sha512-tZU/3NqK3dA5gpE1KtyiJUrEB0lxnGkMFHptJ7q6ewdZ8s12QrODwNbhIJStmJkd1QDXa1NRA8aF2A1zk/Ypyw==", + "dev": true, + "requires": { + "function-bind": "^1.1.1", + "has-bigints": "^1.0.1", + "has-symbols": "^1.0.2", + "which-boxed-primitive": "^1.0.2" + } + }, "underscore": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz", @@ -6699,9 +6720,9 @@ "integrity": "sha512-DPSke0pXhTZgoF/d+WSt2QaKMCFSfx7QegxEWT+JOuHF5aWrKEn0G+ztjuJg/gG8/ItK+rbPCD/yNv8yyih6Cg==" }, "v8-compile-cache": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.1.0.tgz", - "integrity": "sha512-usZBT3PW+LOjM25wbqIlZwPeJV+3OSz3M1k1Ws8snlW39dZyYL9lOGC5FgPVHfk0jKmjiDV8Z0mIbVQPiwFs7g==", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz", + "integrity": "sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA==", "dev": true }, "validate-npm-package-license": { @@ -6817,11 +6838,18 @@ "isexe": "^2.0.0" } }, - "which-module": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz", - "integrity": "sha512-B+enWhmw6cjfVC7kS8Pj9pCrKSc5txArRyaYGe088shv/FGWH+0Rjx/xPgtsWfsUtS27FkP697E4DDhgrgoc0Q==", - "dev": true + "which-boxed-primitive": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz", + "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==", + "dev": true, + "requires": { + "is-bigint": "^1.0.1", + "is-boolean-object": "^1.1.0", + "is-number-object": "^1.0.4", + "is-string": "^1.0.5", + "is-symbol": "^1.0.3" + } }, "wide-align": { "version": "1.1.3", @@ -6866,40 +6894,19 @@ "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", "dev": true }, + "workerpool": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.1.0.tgz", + "integrity": "sha512-toV7q9rWNYha963Pl/qyeZ6wG+3nnsyvolaNUS8+R5Wtw6qJPTxIlOP1ZSvcGhEJw+l3HMMmtiNo9Gl61G4GVg==", + "dev": true + }, "wrap-ansi": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-5.1.0.tgz", - "integrity": "sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", "dev": true, "requires": { - "ansi-styles": "^3.2.0", - "string-width": "^3.0.0", - "strip-ansi": "^5.0.0" - }, - "dependencies": { - "emoji-regex": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", - "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", - "dev": true - }, - "is-fullwidth-code-point": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w==", - "dev": true - }, - "string-width": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", - "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", - "dev": true, - "requires": { - "emoji-regex": "^7.0.1", - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^5.1.0" - } - } + "string-width": "^4.1.0" } }, "wrappy": { @@ -6952,9 +6959,9 @@ "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==" }, "y18n": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.1.tgz", - "integrity": "sha512-wNcy4NvjMYL8gogWWYAO7ZFWFfHcbdbE57tZO8e4cbpj8tfUcwrwqSl3ad8HxpYWCdXcJUCeKKZS62Av1affwQ==", + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", "dev": true }, "yaassertion": { @@ -6968,95 +6975,36 @@ "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==" }, "yargs": { - "version": "13.3.0", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-13.3.0.tgz", - "integrity": "sha512-2eehun/8ALW8TLoIl7MVaRUrg+yCnenu8B4kBlRxj3GJGDKU1Og7sMXPNm1BYyM1DOJmTZ4YeN/Nwxv+8XJsUA==", + "version": "16.2.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", + "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", "dev": true, "requires": { - "cliui": "^5.0.0", - "find-up": "^3.0.0", - "get-caller-file": "^2.0.1", + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", "require-directory": "^2.1.1", - "require-main-filename": "^2.0.0", - "set-blocking": "^2.0.0", - "string-width": "^3.0.0", - "which-module": "^2.0.0", - "y18n": "^4.0.0", - "yargs-parser": "^13.1.1" - }, - "dependencies": { - "emoji-regex": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", - "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", - "dev": true - }, - "find-up": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", - "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", - "dev": true, - "requires": { - "locate-path": "^3.0.0" - } - }, - "is-fullwidth-code-point": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w==", - "dev": true - }, - "locate-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", - "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", - "dev": true, - "requires": { - "p-locate": "^3.0.0", - "path-exists": "^3.0.0" - } - }, - "p-locate": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", - "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", - "dev": true, - "requires": { - "p-limit": "^2.0.0" - } - }, - "string-width": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", - "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", - "dev": true, - "requires": { - "emoji-regex": "^7.0.1", - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^5.1.0" - } - } + "string-width": "^4.2.0", + "y18n": "^5.0.5", + "yargs-parser": "^20.2.2" } }, "yargs-parser": { - "version": "13.1.2", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-13.1.2.tgz", - "integrity": "sha512-3lbsNRf/j+A4QuSZfDRA7HRSfWrzO0YjqTJd5kjAq37Zep1CEgaYmrH9Q3GwPiB9cHyd1Y1UwggGhJGoxipbzg==", - "dev": true, - "requires": { - "camelcase": "^5.0.0", - "decamelize": "^1.2.0" - } + "version": "20.2.4", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.4.tgz", + "integrity": "sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA==", + "dev": true }, "yargs-unparser": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-1.6.0.tgz", - "integrity": "sha512-W9tKgmSn0DpSatfri0nx52Joq5hVXgeLiqR/5G0sZNDoLZFOr/xjBUDcShCOGNsBnEMNo1KAMBkTej1Hm62HTw==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-2.0.0.tgz", + "integrity": "sha512-7pRTIA9Qc1caZ0bZ6RYRGbHJthJWuakf+WmHK0rVeLkNrrGhfoabBNdue6kdINI6r4if7ocq9aD/n7xwKOdzOA==", "dev": true, "requires": { - "flat": "^4.1.0", - "lodash": "^4.17.15", - "yargs": "^13.3.0" + "camelcase": "^6.0.0", + "decamelize": "^4.0.0", + "flat": "^5.0.2", + "is-plain-obj": "^2.1.0" } }, "yn": { diff --git a/services/filestore/package.json b/services/filestore/package.json index 6cb9dab872..4dd8aab999 100644 --- a/services/filestore/package.json +++ b/services/filestore/package.json @@ -13,11 +13,12 @@ "test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP", "start": "node $NODE_APP_OPTIONS app.js", "nodemon": "nodemon --config nodemon.json", - "lint": "node_modules/.bin/eslint --max-warnings 0 .", - "format": "node_modules/.bin/prettier-eslint $PWD'/**/*.js' --list-different", - "format:fix": "node_modules/.bin/prettier-eslint $PWD'/**/*.js' --write", + "lint": "eslint --max-warnings 0 --format unix .", + "format": "prettier --list-different $PWD/'**/*.js'", + "format:fix": "prettier --write $PWD/'**/*.js'", "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", - "test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js" + "test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js", + "lint:fix": "eslint --fix ." }, "dependencies": { "@overleaf/metrics": "^3.5.1", @@ -41,26 +42,23 @@ "devDependencies": { "@google-cloud/storage": "^5.1.2", "aws-sdk": "^2.718.0", - "babel-eslint": "^10.1.0", - "chai": "4.2.0", + "chai": "^4.2.0", "chai-as-promised": "^7.1.1", "disrequire": "^1.1.0", - "eslint": "^6.8.0", - "eslint-config-prettier": "^6.10.0", - "eslint-config-standard": "^14.1.0", - "eslint-plugin-chai-expect": "^2.1.0", - "eslint-plugin-chai-friendly": "^0.5.0", - "eslint-plugin-import": "^2.20.1", - "eslint-plugin-mocha": "^6.3.0", - "eslint-plugin-node": "^11.0.0", + "eslint": "^7.21.0", + "eslint-config-prettier": "^8.1.0", + "eslint-config-standard": "^16.0.2", + "eslint-plugin-chai-expect": "^2.2.0", + "eslint-plugin-chai-friendly": "^0.6.0", + "eslint-plugin-import": "^2.22.1", + "eslint-plugin-mocha": "^8.0.0", + "eslint-plugin-node": "^11.1.0", "eslint-plugin-prettier": "^3.1.2", "eslint-plugin-promise": "^4.2.1", - "eslint-plugin-standard": "^4.0.1", - "mocha": "7.2.0", + "mocha": "^8.3.2", "mongodb": "^3.5.9", - "prettier": "^2.0.0", + "prettier": "^2.2.1", "prettier-eslint": "^9.0.2", - "prettier-eslint-cli": "^5.0.0", "sandboxed-module": "2.0.4", "sinon": "9.0.2", "sinon-chai": "^3.5.0", From e3d7afeb96b1f336a248507ef335a0934fb03dc7 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Tue, 13 Jul 2021 12:04:46 +0100 Subject: [PATCH 553/555] [misc] run format_fix and lint:fix --- services/filestore/app.js | 4 +- services/filestore/app/js/Errors.js | 4 +- services/filestore/app/js/FileController.js | 12 ++-- services/filestore/app/js/FileConverter.js | 14 ++-- services/filestore/app/js/FileHandler.js | 12 ++-- .../filestore/app/js/HealthCheckController.js | 6 +- services/filestore/app/js/ImageOptimiser.js | 6 +- services/filestore/app/js/KeyBuilder.js | 4 +- services/filestore/app/js/LocalFileWriter.js | 4 +- services/filestore/app/js/RequestLogger.js | 8 +-- services/filestore/app/js/SafeExec.js | 8 +-- .../filestore/config/settings.defaults.js | 26 +++---- .../test/acceptance/js/FilestoreApp.js | 8 +-- .../test/acceptance/js/FilestoreTests.js | 69 +++++++++---------- .../test/acceptance/js/TestConfig.js | 38 +++++----- .../test/acceptance/js/TestHelper.js | 6 +- .../test/unit/js/FileControllerTests.js | 52 +++++++------- .../test/unit/js/FileConverterTests.js | 12 ++-- .../test/unit/js/FileHandlerTests.js | 60 ++++++++-------- .../test/unit/js/ImageOptimiserTests.js | 18 ++--- .../filestore/test/unit/js/KeybuilderTests.js | 4 +- .../test/unit/js/LocalFileWriterTests.js | 20 +++--- .../filestore/test/unit/js/SafeExecTests.js | 12 ++-- .../filestore/test/unit/js/SettingsTests.js | 6 +- 24 files changed, 206 insertions(+), 207 deletions(-) diff --git a/services/filestore/app.js b/services/filestore/app.js index 4a5f34fc5b..11ece01180 100644 --- a/services/filestore/app.js +++ b/services/filestore/app.js @@ -140,7 +140,7 @@ const host = '0.0.0.0' if (!module.parent) { // Called directly - app.listen(port, host, (error) => { + app.listen(port, host, error => { if (error) { logger.error('Error starting Filestore', error) throw error @@ -153,7 +153,7 @@ process .on('unhandledRejection', (reason, p) => { logger.err(reason, 'Unhandled Rejection at Promise', p) }) - .on('uncaughtException', (err) => { + .on('uncaughtException', err => { logger.err(err, 'Uncaught Exception thrown') process.exit(1) }) diff --git a/services/filestore/app/js/Errors.js b/services/filestore/app/js/Errors.js index 6aa91a65f3..c7d19d8484 100644 --- a/services/filestore/app/js/Errors.js +++ b/services/filestore/app/js/Errors.js @@ -11,7 +11,7 @@ class FailedCommandError extends OError { constructor(command, code, stdout, stderr) { super('command failed with error exit code', { command, - code + code, }) this.stdout = stdout this.stderr = stderr @@ -26,5 +26,5 @@ module.exports = { HealthCheckError, TimeoutError, InvalidParametersError, - ...Errors + ...Errors, } diff --git a/services/filestore/app/js/FileController.js b/services/filestore/app/js/FileController.js index e087746811..7304e4d6ba 100644 --- a/services/filestore/app/js/FileController.js +++ b/services/filestore/app/js/FileController.js @@ -14,7 +14,7 @@ module.exports = { copyFile, deleteFile, deleteProject, - directorySize + directorySize, } function getFile(req, res, next) { @@ -24,7 +24,7 @@ function getFile(req, res, next) { key, bucket, format, - style + style, } metrics.inc('getFile') @@ -34,7 +34,7 @@ function getFile(req, res, next) { bucket, format, style, - cacheWarm: req.query.cacheWarm + cacheWarm: req.query.cacheWarm, }) if (req.headers.range) { @@ -70,7 +70,7 @@ function getFile(req, res, next) { return res.sendStatus(200).end() } - pipeline(fileStream, res, (err) => { + pipeline(fileStream, res, err => { if (err && err.code === 'ERR_STREAM_PREMATURE_CLOSE') { res.end() } else if (err) { @@ -134,13 +134,13 @@ function copyFile(req, res, next) { key, bucket, oldProject_id: oldProjectId, - oldFile_id: oldFileId + oldFile_id: oldFileId, }) req.requestLogger.setMessage('copying file') PersistorManager.copyObject(bucket, `${oldProjectId}/${oldFileId}`, key) .then(() => res.sendStatus(200)) - .catch((err) => { + .catch(err => { if (err) { if (err instanceof Errors.NotFoundError) { res.sendStatus(404) diff --git a/services/filestore/app/js/FileConverter.js b/services/filestore/app/js/FileConverter.js index 3a8549fd6c..88161479ec 100644 --- a/services/filestore/app/js/FileConverter.js +++ b/services/filestore/app/js/FileConverter.js @@ -16,8 +16,8 @@ module.exports = { promises: { convert, thumbnail, - preview - } + preview, + }, } async function convert(sourcePath, requestedFormat) { @@ -29,7 +29,7 @@ async function convert(sourcePath, requestedFormat) { '-flatten', '-density', '300', - `${sourcePath}[0]` + `${sourcePath}[0]`, ]) } @@ -46,7 +46,7 @@ async function thumbnail(sourcePath) { `pdf:fit-page=${width}`, `${sourcePath}[0]`, '-resize', - width + width, ]) } @@ -63,14 +63,14 @@ async function preview(sourcePath) { `pdf:fit-page=${width}`, `${sourcePath}[0]`, '-resize', - width + width, ]) } async function _convert(sourcePath, requestedFormat, command) { if (!APPROVED_FORMATS.includes(requestedFormat)) { throw new ConversionError('invalid format requested', { - format: requestedFormat + format: requestedFormat, }) } @@ -83,7 +83,7 @@ async function _convert(sourcePath, requestedFormat, command) { try { await safeExec(command, { killSignal: KILL_SIGNAL, - timeout: FOURTY_SECONDS + timeout: FOURTY_SECONDS, }) } catch (err) { throw new ConversionError( diff --git a/services/filestore/app/js/FileHandler.js b/services/filestore/app/js/FileHandler.js index 872ab65746..e20aca2d37 100644 --- a/services/filestore/app/js/FileHandler.js +++ b/services/filestore/app/js/FileHandler.js @@ -23,8 +23,8 @@ module.exports = { deleteFile, deleteProject, getFileSize, - getDirectorySize - } + getDirectorySize, + }, } async function insertFile(bucket, key, stream) { @@ -33,7 +33,7 @@ async function insertFile(bucket, key, stream) { throw new InvalidParametersError('key does not match validation regex', { bucket, key, - convertedKey + convertedKey, }) } if (Settings.enableConversions) { @@ -48,7 +48,7 @@ async function deleteFile(bucket, key) { throw new InvalidParametersError('key does not match validation regex', { bucket, key, - convertedKey + convertedKey, }) } const jobs = [PersistorManager.deleteObject(bucket, key)] @@ -62,7 +62,7 @@ async function deleteProject(bucket, key) { if (!key.match(/^[0-9a-f]{24}\//i)) { throw new InvalidParametersError('key does not match validation regex', { bucket, - key + key, }) } await PersistorManager.deleteDirectory(bucket, key) @@ -172,7 +172,7 @@ async function _convertFile(bucket, originalKey, opts) { throw new ConversionError('invalid file conversion options', { bucket, originalKey, - opts + opts, }) } let destPath diff --git a/services/filestore/app/js/HealthCheckController.js b/services/filestore/app/js/HealthCheckController.js index 9f32f479fd..512dca8e32 100644 --- a/services/filestore/app/js/HealthCheckController.js +++ b/services/filestore/app/js/HealthCheckController.js @@ -24,7 +24,7 @@ async function checkCanGetFiles() { const bucket = Settings.filestore.stores.user_files const buffer = new streamBuffers.WritableStreamBuffer({ - initialSize: 100 + initialSize: 100, }) const sourceStream = await FileHandler.getFile(bucket, key, {}) @@ -62,8 +62,8 @@ module.exports = { check(req, res, next) { Promise.all([checkCanGetFiles(), checkFileConvert()]) .then(() => res.sendStatus(200)) - .catch((err) => { + .catch(err => { next(err) }) - } + }, } diff --git a/services/filestore/app/js/ImageOptimiser.js b/services/filestore/app/js/ImageOptimiser.js index 1ee9d6acc7..833f3792d9 100644 --- a/services/filestore/app/js/ImageOptimiser.js +++ b/services/filestore/app/js/ImageOptimiser.js @@ -6,8 +6,8 @@ const safeExec = require('./SafeExec').promises module.exports = { compressPng: callbackify(compressPng), promises: { - compressPng - } + compressPng, + }, } async function compressPng(localPath, callback) { @@ -15,7 +15,7 @@ async function compressPng(localPath, callback) { const args = ['optipng', localPath] const opts = { timeout: 30 * 1000, - killSignal: 'SIGKILL' + killSignal: 'SIGKILL', } try { diff --git a/services/filestore/app/js/KeyBuilder.js b/services/filestore/app/js/KeyBuilder.js index b615e69743..fa6d969922 100644 --- a/services/filestore/app/js/KeyBuilder.js +++ b/services/filestore/app/js/KeyBuilder.js @@ -8,7 +8,7 @@ module.exports = { publicFileKeyMiddleware, publicProjectKeyMiddleware, bucketFileKeyMiddleware, - templateFileKeyMiddleware + templateFileKeyMiddleware, } function getConvertedFolderKey(key) { @@ -68,7 +68,7 @@ function templateFileKeyMiddleware(req, res, next) { template_id: templateId, format, version, - sub_type: subType + sub_type: subType, } = req.params req.key = `${templateId}/v/${version}/${format}` diff --git a/services/filestore/app/js/LocalFileWriter.js b/services/filestore/app/js/LocalFileWriter.js index 9b0cc7b289..860c54c052 100644 --- a/services/filestore/app/js/LocalFileWriter.js +++ b/services/filestore/app/js/LocalFileWriter.js @@ -10,10 +10,10 @@ const { WriteError } = require('./Errors') module.exports = { promises: { writeStream, - deleteFile + deleteFile, }, writeStream: callbackify(writeStream), - deleteFile: callbackify(deleteFile) + deleteFile: callbackify(deleteFile), } const pipeline = promisify(Stream.pipeline) diff --git a/services/filestore/app/js/RequestLogger.js b/services/filestore/app/js/RequestLogger.js index 956fd3287d..19695b74b0 100644 --- a/services/filestore/app/js/RequestLogger.js +++ b/services/filestore/app/js/RequestLogger.js @@ -38,7 +38,7 @@ class RequestLogger { metrics.timing('http_request', responseTime, null, { method: req.method, status_code: res.statusCode, - path: routePath.replace(/\//g, '_').replace(/:/g, '').slice(1) + path: routePath.replace(/\//g, '_').replace(/:/g, '').slice(1), }) } @@ -57,14 +57,14 @@ class RequestLogger { req.socket.socket && req.socket.socket.remoteAddress), 'user-agent': req.headers['user-agent'], - 'content-length': req.headers['content-length'] + 'content-length': req.headers['content-length'], }, res: { 'content-length': res._headers['content-length'], statusCode: res.statusCode, - 'response-time': responseTime + 'response-time': responseTime, }, - info: req.requestLogger._logInfo + info: req.requestLogger._logInfo, }, req.requestLogger._logMessage ) diff --git a/services/filestore/app/js/SafeExec.js b/services/filestore/app/js/SafeExec.js index 655ecf2f5c..63177b8057 100644 --- a/services/filestore/app/js/SafeExec.js +++ b/services/filestore/app/js/SafeExec.js @@ -45,7 +45,7 @@ function safeExec(command, options, callback) { new FailedCommandError('failed to kill process after timeout', { command, options, - pid: child.pid + pid: child.pid, }) ) } @@ -62,13 +62,13 @@ function safeExec(command, options, callback) { cleanup() }) - child.on('error', (err) => { + child.on('error', err => { cleanup(err) }) - child.stdout.on('data', (chunk) => { + child.stdout.on('data', chunk => { stdout += chunk }) - child.stderr.on('data', (chunk) => { + child.stderr.on('data', chunk => { stderr += chunk }) } diff --git a/services/filestore/config/settings.defaults.js b/services/filestore/config/settings.defaults.js index d10e17ac3b..8c2ccfee28 100644 --- a/services/filestore/config/settings.defaults.js +++ b/services/filestore/config/settings.defaults.js @@ -40,8 +40,8 @@ const settings = { internal: { filestore: { port: 3009, - host: process.env.LISTEN_ADDRESS || 'localhost' - } + host: process.env.LISTEN_ADDRESS || 'localhost', + }, }, filestore: { @@ -57,13 +57,13 @@ const settings = { ? { apiEndpoint: process.env.GCS_API_ENDPOINT, apiScheme: process.env.GCS_API_SCHEME, - projectId: process.env.GCS_PROJECT_ID + projectId: process.env.GCS_PROJECT_ID, } : undefined, unlockBeforeDelete: process.env.GCS_UNLOCK_BEFORE_DELETE === 'true', // unlock an event-based hold before deleting. default false deletedBucketSuffix: process.env.GCS_DELETED_BUCKET_SUFFIX, // if present, copy file to another bucket on delete. default null deleteConcurrency: parseInt(process.env.GCS_DELETE_CONCURRENCY) || 50, - signedUrlExpiryInMs: parseInt(process.env.LINK_EXPIRY_TIMEOUT || 60000) + signedUrlExpiryInMs: parseInt(process.env.LINK_EXPIRY_TIMEOUT || 60000), }, s3: @@ -76,7 +76,7 @@ const settings = { partSize: process.env.AWS_S3_PARTSIZE || 100 * 1024 * 1024, bucketCreds: process.env.S3_BUCKET_CREDENTIALS ? JSON.parse(process.env.S3_BUCKET_CREDENTIALS) - : undefined + : undefined, } : undefined, @@ -86,7 +86,7 @@ const settings = { stores: { user_files: process.env.USER_FILES_BUCKET_NAME, template_files: process.env.TEMPLATE_FILES_BUCKET_NAME, - public_files: process.env.PUBLIC_FILES_BUCKET_NAME + public_files: process.env.PUBLIC_FILES_BUCKET_NAME, }, fallback: process.env.FALLBACK_BACKEND @@ -95,28 +95,28 @@ const settings = { // mapping of bucket names on the fallback, to bucket names on the primary. // e.g. { myS3UserFilesBucketName: 'myGoogleUserFilesBucketName' } buckets: JSON.parse(process.env.FALLBACK_BUCKET_MAPPING || '{}'), - copyOnMiss: process.env.COPY_ON_MISS === 'true' + copyOnMiss: process.env.COPY_ON_MISS === 'true', } : undefined, - allowRedirects: process.env.ALLOW_REDIRECTS === 'true' + allowRedirects: process.env.ALLOW_REDIRECTS === 'true', }, path: { // eslint-disable-next-line no-path-concat - uploadFolder: Path.resolve(__dirname + '/../uploads') + uploadFolder: Path.resolve(__dirname + '/../uploads'), }, commands: { // Any commands to wrap the convert utility in, for example ["nice"], or ["firejail", "--profile=/etc/firejail/convert.profile"] - convertCommandPrefix: [] + convertCommandPrefix: [], }, enableConversions: process.env.ENABLE_CONVERSIONS === 'true', sentry: { - dsn: process.env.SENTRY_DSN - } + dsn: process.env.SENTRY_DSN, + }, } // Filestore health check @@ -125,7 +125,7 @@ const settings = { if (process.env.HEALTH_CHECK_PROJECT_ID && process.env.HEALTH_CHECK_FILE_ID) { settings.health_check = { project_id: process.env.HEALTH_CHECK_PROJECT_ID, - file_id: process.env.HEALTH_CHECK_FILE_ID + file_id: process.env.HEALTH_CHECK_FILE_ID, } } diff --git a/services/filestore/test/acceptance/js/FilestoreApp.js b/services/filestore/test/acceptance/js/FilestoreApp.js index 495edb2273..dc69eec23c 100644 --- a/services/filestore/test/acceptance/js/FilestoreApp.js +++ b/services/filestore/test/acceptance/js/FilestoreApp.js @@ -33,7 +33,7 @@ class FilestoreApp { this.server = this.app.listen( Settings.internal.filestore.port, 'localhost', - (err) => { + err => { if (err) { return reject(err) } @@ -82,7 +82,7 @@ class FilestoreApp { secretAccessKey: Settings.filestore.s3.secret, endpoint: Settings.filestore.s3.endpoint, s3ForcePathStyle: true, - signatureVersion: 'v4' + signatureVersion: 'v4', }) while (true) { @@ -91,7 +91,7 @@ class FilestoreApp { .putObject({ Key: 'startup', Body: '42', - Bucket: Settings.filestore.stores.user_files + Bucket: Settings.filestore.stores.user_files, }) .promise() } catch (err) { @@ -110,7 +110,7 @@ class FilestoreApp { // unload the app, as we may be doing this on multiple runs with // different settings, which affect startup in some cases const files = await fsReaddir(Path.resolve(__dirname, '../../../app/js')) - files.forEach((file) => { + files.forEach(file => { disrequire(Path.resolve(__dirname, '../../../app/js', file)) }) disrequire(Path.resolve(__dirname, '../../../app')) diff --git a/services/filestore/test/acceptance/js/FilestoreTests.js b/services/filestore/test/acceptance/js/FilestoreTests.js index b7a2744ed3..47d252b07c 100644 --- a/services/filestore/test/acceptance/js/FilestoreTests.js +++ b/services/filestore/test/acceptance/js/FilestoreTests.js @@ -6,7 +6,7 @@ const Path = require('path') const FilestoreApp = require('./FilestoreApp') const TestHelper = require('./TestHelper') const rp = require('request-promise-native').defaults({ - resolveWithFullResponse: true + resolveWithFullResponse: true, }) const S3 = require('aws-sdk/clients/s3') const Stream = require('stream') @@ -29,7 +29,7 @@ if (!process.env.AWS_ACCESS_KEY_ID) { throw new Error('please provide credentials for the AWS S3 test server') } -process.on('unhandledRejection', (e) => { +process.on('unhandledRejection', e => { // eslint-disable-next-line no-console console.log('** Unhandled Promise Rejection **\n', e) throw e @@ -51,7 +51,7 @@ describe('Filestore', function () { const badSockets = [] for (const socket of stdout.split('\n')) { - const fields = socket.split(' ').filter((part) => part !== '') + const fields = socket.split(' ').filter(part => part !== '') if ( fields.length > 2 && parseInt(fields[1]) && @@ -79,7 +79,7 @@ describe('Filestore', function () { } // redefine the test suite for every available backend - Object.keys(BackendSettings).forEach((backend) => { + Object.keys(BackendSettings).forEach(backend => { describe(backend, function () { let app, previousEgress, previousIngress, metricPrefix, projectId @@ -150,7 +150,7 @@ describe('Filestore', function () { constantFileContent = [ 'hello world', `line 2 goes here ${Math.random()}`, - 'there are 3 lines in all' + 'there are 3 lines in all', ].join('\n') await fsWriteFile(localFileReadPath, constantFileContent) @@ -204,8 +204,8 @@ describe('Filestore', function () { const options = { uri: fileUrl, headers: { - Range: 'bytes=0-8' - } + Range: 'bytes=0-8', + }, } const res = await rp.get(options) expect(res.body).to.equal('hello wor') @@ -215,8 +215,8 @@ describe('Filestore', function () { const options = { uri: fileUrl, headers: { - Range: 'bytes=4-10' - } + Range: 'bytes=4-10', + }, } const res = await rp.get(options) expect(res.body).to.equal('o world') @@ -240,9 +240,9 @@ describe('Filestore', function () { json: { source: { project_id: projectId, - file_id: fileId - } - } + file_id: fileId, + }, + }, } let response = await rp(opts) expect(response.statusCode).to.equal(200) @@ -288,8 +288,8 @@ describe('Filestore', function () { const options = { uri: fileUrl, headers: { - Range: 'bytes=0-8' - } + Range: 'bytes=0-8', + }, } await rp.get(options) const metric = await TestHelper.getMetric( @@ -305,25 +305,25 @@ describe('Filestore', function () { let fileIds, fileUrls, projectUrl const localFileReadPaths = [ '/tmp/filestore_acceptance_tests_file_read_1.txt', - '/tmp/filestore_acceptance_tests_file_read_2.txt' + '/tmp/filestore_acceptance_tests_file_read_2.txt', ] const constantFileContents = [ [ 'hello world', `line 2 goes here ${Math.random()}`, - 'there are 3 lines in all' + 'there are 3 lines in all', ].join('\n'), [ `for reference: ${Math.random()}`, 'cats are the best animals', - 'wombats are a close second' - ].join('\n') + 'wombats are a close second', + ].join('\n'), ] before(async function () { return Promise.all([ fsWriteFile(localFileReadPaths[0], constantFileContents[0]), - fsWriteFile(localFileReadPaths[1], constantFileContents[1]) + fsWriteFile(localFileReadPaths[1], constantFileContents[1]), ]) }) @@ -332,25 +332,25 @@ describe('Filestore', function () { fileIds = [ObjectId().toString(), ObjectId().toString()] fileUrls = [ `${projectUrl}/file/${fileIds[0]}`, - `${projectUrl}/file/${fileIds[1]}` + `${projectUrl}/file/${fileIds[1]}`, ] const writeStreams = [ request.post(fileUrls[0]), - request.post(fileUrls[1]) + request.post(fileUrls[1]), ] const readStreams = [ fs.createReadStream(localFileReadPaths[0]), - fs.createReadStream(localFileReadPaths[1]) + fs.createReadStream(localFileReadPaths[1]), ] // hack to consume the result to ensure the http request has been fully processed const resultStreams = [ fs.createWriteStream('/dev/null'), - fs.createWriteStream('/dev/null') + fs.createWriteStream('/dev/null'), ] return Promise.all([ pipeline(readStreams[0], writeStreams[0], resultStreams[0]), - pipeline(readStreams[1], writeStreams[1], resultStreams[1]) + pipeline(readStreams[1], writeStreams[1], resultStreams[1]), ]) }) @@ -433,7 +433,7 @@ describe('Filestore', function () { for (let i = 0; i < 5; i++) { // test is not 100% reliable, so repeat // create a new connection and have it time out before reading any data - await new Promise((resolve) => { + await new Promise(resolve => { const streamThatHangs = new Stream.PassThrough() const stream = request({ url: fileUrl, timeout: 1000 }) stream.pipe(streamThatHangs) @@ -461,24 +461,24 @@ describe('Filestore', function () { const s3ClientSettings = { credentials: { accessKeyId: process.env.AWS_ACCESS_KEY_ID, - secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY + secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY, }, endpoint: process.env.AWS_S3_ENDPOINT, sslEnabled: false, - s3ForcePathStyle: true + s3ForcePathStyle: true, } const s3 = new S3(s3ClientSettings) await s3 .createBucket({ - Bucket: bucketName + Bucket: bucketName, }) .promise() await s3 .upload({ Bucket: bucketName, Key: fileId, - Body: constantFileContent + Body: constantFileContent, }) .promise() }) @@ -648,9 +648,9 @@ describe('Filestore', function () { json: { source: { project_id: projectId, - file_id: fileId - } - } + file_id: fileId, + }, + }, } }) @@ -752,9 +752,8 @@ describe('Filestore', function () { describe('when sending a file', function () { beforeEach(async function () { const writeStream = request.post(fileUrl) - const readStream = streamifier.createReadStream( - constantFileContent - ) + const readStream = + streamifier.createReadStream(constantFileContent) // hack to consume the result to ensure the http request has been fully processed const resultStream = fs.createWriteStream('/dev/null') await pipeline(readStream, writeStream, resultStream) diff --git a/services/filestore/test/acceptance/js/TestConfig.js b/services/filestore/test/acceptance/js/TestConfig.js index ec80e45c1f..97791f0e51 100644 --- a/services/filestore/test/acceptance/js/TestConfig.js +++ b/services/filestore/test/acceptance/js/TestConfig.js @@ -7,7 +7,7 @@ function s3Config() { secret: process.env.AWS_SECRET_ACCESS_KEY, endpoint: process.env.AWS_S3_ENDPOINT, pathStyle: true, - partSize: 100 * 1024 * 1024 + partSize: 100 * 1024 * 1024, } } @@ -15,7 +15,7 @@ function s3Stores() { return { user_files: process.env.AWS_S3_USER_FILES_BUCKET_NAME, template_files: process.env.AWS_S3_TEMPLATE_FILES_BUCKET_NAME, - public_files: process.env.AWS_S3_PUBLIC_FILES_BUCKET_NAME + public_files: process.env.AWS_S3_PUBLIC_FILES_BUCKET_NAME, } } @@ -24,11 +24,11 @@ function gcsConfig() { endpoint: { apiEndpoint: process.env.GCS_API_ENDPOINT, apiScheme: process.env.GCS_API_SCHEME, - projectId: 'fake' + projectId: 'fake', }, directoryKeyRegex: new RegExp('^[0-9a-fA-F]{24}/[0-9a-fA-F]{24}'), unlockBeforeDelete: false, // fake-gcs does not support this - deletedBucketSuffix: '-deleted' + deletedBucketSuffix: '-deleted', } } @@ -36,7 +36,7 @@ function gcsStores() { return { user_files: process.env.GCS_USER_FILES_BUCKET_NAME, template_files: process.env.GCS_TEMPLATE_FILES_BUCKET_NAME, - public_files: process.env.GCS_PUBLIC_FILES_BUCKET_NAME + public_files: process.env.GCS_PUBLIC_FILES_BUCKET_NAME, } } @@ -44,7 +44,7 @@ function fsStores() { return { user_files: Path.resolve(__dirname, '../../../user_files'), public_files: Path.resolve(__dirname, '../../../public_files'), - template_files: Path.resolve(__dirname, '../../../template_files') + template_files: Path.resolve(__dirname, '../../../template_files'), } } @@ -52,24 +52,24 @@ function fallbackStores(primaryConfig, fallbackConfig) { return { [primaryConfig.user_files]: fallbackConfig.user_files, [primaryConfig.public_files]: fallbackConfig.public_files, - [primaryConfig.template_files]: fallbackConfig.template_files + [primaryConfig.template_files]: fallbackConfig.template_files, } } module.exports = { FSPersistor: { backend: 'fs', - stores: fsStores() + stores: fsStores(), }, S3Persistor: { backend: 's3', s3: s3Config(), - stores: s3Stores() + stores: s3Stores(), }, GcsPersistor: { backend: 'gcs', gcs: gcsConfig(), - stores: gcsStores() + stores: gcsStores(), }, FallbackS3ToFSPersistor: { backend: 's3', @@ -77,8 +77,8 @@ module.exports = { stores: s3Stores(), fallback: { backend: 'fs', - buckets: fallbackStores(s3Stores(), fsStores()) - } + buckets: fallbackStores(s3Stores(), fsStores()), + }, }, FallbackFSToS3Persistor: { backend: 'fs', @@ -86,8 +86,8 @@ module.exports = { stores: fsStores(), fallback: { backend: 's3', - buckets: fallbackStores(fsStores(), s3Stores()) - } + buckets: fallbackStores(fsStores(), s3Stores()), + }, }, FallbackGcsToS3Persistor: { backend: 'gcs', @@ -96,8 +96,8 @@ module.exports = { s3: s3Config(), fallback: { backend: 's3', - buckets: fallbackStores(gcsStores(), s3Stores()) - } + buckets: fallbackStores(gcsStores(), s3Stores()), + }, }, FallbackS3ToGcsPersistor: { backend: 's3', @@ -107,7 +107,7 @@ module.exports = { gcs: gcsConfig(), fallback: { backend: 'gcs', - buckets: fallbackStores(s3Stores(), gcsStores()) - } - } + buckets: fallbackStores(s3Stores(), gcsStores()), + }, + }, } diff --git a/services/filestore/test/acceptance/js/TestHelper.js b/services/filestore/test/acceptance/js/TestHelper.js index d09b68a053..6773a2c400 100644 --- a/services/filestore/test/acceptance/js/TestHelper.js +++ b/services/filestore/test/acceptance/js/TestHelper.js @@ -1,6 +1,6 @@ const streamifier = require('streamifier') const rp = require('request-promise-native').defaults({ - resolveWithFullResponse: true + resolveWithFullResponse: true, }) const { expect } = require('chai') @@ -11,7 +11,7 @@ module.exports = { expectPersistorToHaveFile, expectPersistorNotToHaveFile, streamToString, - getMetric + getMetric, } async function getMetric(filestoreUrl, metric) { @@ -25,7 +25,7 @@ async function getMetric(filestoreUrl, metric) { function streamToString(stream) { const chunks = [] return new Promise((resolve, reject) => { - stream.on('data', (chunk) => chunks.push(chunk)) + stream.on('data', chunk => chunks.push(chunk)) stream.on('error', reject) stream.on('end', () => resolve(Buffer.concat(chunks).toString('utf8'))) stream.resume() diff --git a/services/filestore/test/unit/js/FileControllerTests.js b/services/filestore/test/unit/js/FileControllerTests.js index f731c8f212..7809a0794e 100644 --- a/services/filestore/test/unit/js/FileControllerTests.js +++ b/services/filestore/test/unit/js/FileControllerTests.js @@ -17,9 +17,9 @@ describe('FileController', function () { const settings = { s3: { buckets: { - user_files: 'user_files' - } - } + user_files: 'user_files', + }, + }, } const fileSize = 1234 const fileStream = 'fileStream' @@ -33,7 +33,7 @@ describe('FileController', function () { PersistorManager = { sendStream: sinon.stub().yields(), copyObject: sinon.stub().resolves(), - deleteObject: sinon.stub().yields() + deleteObject: sinon.stub().yields(), } FileHandler = { @@ -43,12 +43,12 @@ describe('FileController', function () { deleteProject: sinon.stub().yields(), insertFile: sinon.stub().yields(), getDirectorySize: sinon.stub().yields(null, fileSize), - getRedirectUrl: sinon.stub().yields(null, null) + getRedirectUrl: sinon.stub().yields(null, null), } LocalFileWriter = {} stream = { - pipeline: sinon.stub() + pipeline: sinon.stub(), } FileController = SandboxedModule.require(modulePath, { @@ -60,10 +60,10 @@ describe('FileController', function () { stream: stream, '@overleaf/settings': settings, '@overleaf/metrics': { - inc() {} - } + inc() {}, + }, }, - globals: { console } + globals: { console }, }) req = { @@ -73,19 +73,19 @@ describe('FileController', function () { query: {}, params: { project_id: projectId, - file_id: fileId + file_id: fileId, }, headers: {}, requestLogger: { setMessage: sinon.stub(), - addFields: sinon.stub() - } + addFields: sinon.stub(), + }, } res = { set: sinon.stub().returnsThis(), sendStatus: sinon.stub().returnsThis(), - status: sinon.stub().returnsThis() + status: sinon.stub().returnsThis(), } next = sinon.stub() @@ -104,7 +104,7 @@ describe('FileController', function () { it('should send a 200 if the cacheWarm param is true', function (done) { req.query.cacheWarm = true - res.sendStatus = (statusCode) => { + res.sendStatus = statusCode => { statusCode.should.equal(200) done() } @@ -165,7 +165,7 @@ describe('FileController', function () { bucket, key, format: undefined, - style: undefined + style: undefined, } }) @@ -220,7 +220,7 @@ describe('FileController', function () { new Errors.NotFoundError({ message: 'not found', info: {} }) ) - res.sendStatus = (code) => { + res.sendStatus = code => { expect(code).to.equal(404) done() } @@ -238,7 +238,7 @@ describe('FileController', function () { describe('insertFile', function () { it('should send bucket name key and res to PersistorManager', function (done) { - res.sendStatus = (code) => { + res.sendStatus = code => { expect(FileHandler.insertFile).to.have.been.calledWith(bucket, key, req) expect(code).to.equal(200) done() @@ -256,13 +256,13 @@ describe('FileController', function () { req.body = { source: { project_id: oldProjectId, - file_id: oldFileId - } + file_id: oldFileId, + }, } }) it('should send bucket name and both keys to PersistorManager', function (done) { - res.sendStatus = (code) => { + res.sendStatus = code => { code.should.equal(200) expect(PersistorManager.copyObject).to.have.been.calledWith( bucket, @@ -278,7 +278,7 @@ describe('FileController', function () { PersistorManager.copyObject.rejects( new Errors.NotFoundError({ message: 'not found', info: {} }) ) - res.sendStatus = (code) => { + res.sendStatus = code => { code.should.equal(404) done() } @@ -287,7 +287,7 @@ describe('FileController', function () { it('should send an error if there was an error', function (done) { PersistorManager.copyObject.rejects(error) - FileController.copyFile(req, res, (err) => { + FileController.copyFile(req, res, err => { expect(err).to.equal(error) done() }) @@ -296,7 +296,7 @@ describe('FileController', function () { describe('delete file', function () { it('should tell the file handler', function (done) { - res.sendStatus = (code) => { + res.sendStatus = code => { code.should.equal(204) expect(FileHandler.deleteFile).to.have.been.calledWith(bucket, key) done() @@ -313,7 +313,7 @@ describe('FileController', function () { describe('delete project', function () { it('should tell the file handler', function (done) { - res.sendStatus = (code) => { + res.sendStatus = code => { code.should.equal(204) expect(FileHandler.deleteProject).to.have.been.calledWith(bucket, key) done() @@ -331,10 +331,10 @@ describe('FileController', function () { describe('directorySize', function () { it('should return total directory size bytes', function (done) { FileController.directorySize(req, { - json: (result) => { + json: result => { expect(result['total bytes']).to.equal(fileSize) done() - } + }, }) }) diff --git a/services/filestore/test/unit/js/FileConverterTests.js b/services/filestore/test/unit/js/FileConverterTests.js index 923983a7ae..131bead22e 100644 --- a/services/filestore/test/unit/js/FileConverterTests.js +++ b/services/filestore/test/unit/js/FileConverterTests.js @@ -14,13 +14,13 @@ describe('FileConverter', function () { const errorMessage = 'guru meditation error' const Settings = { commands: { - convertCommandPrefix: [] - } + convertCommandPrefix: [], + }, } beforeEach(function () { SafeExec = { - promises: sinon.stub().resolves(destPath) + promises: sinon.stub().resolves(destPath), } const ObjectPersistor = { Errors } @@ -30,11 +30,11 @@ describe('FileConverter', function () { './SafeExec': SafeExec, '@overleaf/metrics': { inc: sinon.stub(), - Timer: sinon.stub().returns({ done: sinon.stub() }) + Timer: sinon.stub().returns({ done: sinon.stub() }), }, '@overleaf/settings': Settings, - '@overleaf/object-persistor': ObjectPersistor - } + '@overleaf/object-persistor': ObjectPersistor, + }, }) }) diff --git a/services/filestore/test/unit/js/FileHandlerTests.js b/services/filestore/test/unit/js/FileHandlerTests.js index 23a0922c0e..3242ae3414 100644 --- a/services/filestore/test/unit/js/FileHandlerTests.js +++ b/services/filestore/test/unit/js/FileHandlerTests.js @@ -28,7 +28,7 @@ describe('FileHandler', function () { const redirectUrl = 'https://wombat.potato/giraffe' const readStream = { stream: 'readStream', - on: sinon.stub() + on: sinon.stub(), } beforeEach(function () { @@ -41,35 +41,35 @@ describe('FileHandler', function () { sendStream: sinon.stub().resolves(), insertFile: sinon.stub().resolves(), sendFile: sinon.stub().resolves(), - directorySize: sinon.stub().resolves() + directorySize: sinon.stub().resolves(), } LocalFileWriter = { // the callback style is used for detached cleanup calls deleteFile: sinon.stub().yields(), promises: { writeStream: sinon.stub().resolves(), - deleteFile: sinon.stub().resolves() - } + deleteFile: sinon.stub().resolves(), + }, } FileConverter = { promises: { convert: sinon.stub().resolves(), thumbnail: sinon.stub().resolves(), - preview: sinon.stub().resolves() - } + preview: sinon.stub().resolves(), + }, } KeyBuilder = { addCachingToKey: sinon.stub().returns(convertedKey), - getConvertedFolderKey: sinon.stub().returns(convertedFolderKey) + getConvertedFolderKey: sinon.stub().returns(convertedFolderKey), } ImageOptimiser = { promises: { - compressPng: sinon.stub().resolves() - } + compressPng: sinon.stub().resolves(), + }, } Settings = {} fs = { - createReadStream: sinon.stub().returns(readStream) + createReadStream: sinon.stub().returns(readStream), } const ObjectPersistor = { Errors } @@ -83,9 +83,9 @@ describe('FileHandler', function () { './ImageOptimiser': ImageOptimiser, '@overleaf/settings': Settings, '@overleaf/object-persistor': ObjectPersistor, - fs: fs + fs: fs, }, - globals: { console } + globals: { console }, }) }) @@ -93,7 +93,7 @@ describe('FileHandler', function () { const stream = 'stream' it('should send file to the filestore', function (done) { - FileHandler.insertFile(bucket, key, stream, (err) => { + FileHandler.insertFile(bucket, key, stream, err => { expect(err).not.to.exist expect(PersistorManager.sendStream).to.have.been.calledWith( bucket, @@ -105,7 +105,7 @@ describe('FileHandler', function () { }) it('should not make a delete request for the convertedKey folder', function (done) { - FileHandler.insertFile(bucket, key, stream, (err) => { + FileHandler.insertFile(bucket, key, stream, err => { expect(err).not.to.exist expect(PersistorManager.deleteDirectory).not.to.have.been.called done() @@ -116,7 +116,7 @@ describe('FileHandler', function () { KeyBuilder.getConvertedFolderKey.returns( '5ecba29f1a294e007d0bccb4/v/0/pdf' ) - FileHandler.insertFile(bucket, key, stream, (err) => { + FileHandler.insertFile(bucket, key, stream, err => { expect(err).not.to.exist done() }) @@ -124,7 +124,7 @@ describe('FileHandler', function () { it('should throw an error when the key is in the wrong format', function (done) { KeyBuilder.getConvertedFolderKey.returns('wombat') - FileHandler.insertFile(bucket, key, stream, (err) => { + FileHandler.insertFile(bucket, key, stream, err => { expect(err).to.exist done() }) @@ -136,7 +136,7 @@ describe('FileHandler', function () { }) it('should delete the convertedKey folder', function (done) { - FileHandler.insertFile(bucket, key, stream, (err) => { + FileHandler.insertFile(bucket, key, stream, err => { expect(err).not.to.exist expect(PersistorManager.deleteDirectory).to.have.been.calledWith( bucket, @@ -150,7 +150,7 @@ describe('FileHandler', function () { describe('deleteFile', function () { it('should tell the filestore manager to delete the file', function (done) { - FileHandler.deleteFile(bucket, key, (err) => { + FileHandler.deleteFile(bucket, key, err => { expect(err).not.to.exist expect(PersistorManager.deleteObject).to.have.been.calledWith( bucket, @@ -161,7 +161,7 @@ describe('FileHandler', function () { }) it('should not tell the filestore manager to delete the cached folder', function (done) { - FileHandler.deleteFile(bucket, key, (err) => { + FileHandler.deleteFile(bucket, key, err => { expect(err).not.to.exist expect(PersistorManager.deleteDirectory).not.to.have.been.called done() @@ -172,7 +172,7 @@ describe('FileHandler', function () { KeyBuilder.getConvertedFolderKey.returns( '5ecba29f1a294e007d0bccb4/v/0/pdf' ) - FileHandler.deleteFile(bucket, key, (err) => { + FileHandler.deleteFile(bucket, key, err => { expect(err).not.to.exist done() }) @@ -180,7 +180,7 @@ describe('FileHandler', function () { it('should throw an error when the key is in the wrong format', function (done) { KeyBuilder.getConvertedFolderKey.returns('wombat') - FileHandler.deleteFile(bucket, key, (err) => { + FileHandler.deleteFile(bucket, key, err => { expect(err).to.exist done() }) @@ -192,7 +192,7 @@ describe('FileHandler', function () { }) it('should delete the convertedKey folder', function (done) { - FileHandler.deleteFile(bucket, key, (err) => { + FileHandler.deleteFile(bucket, key, err => { expect(err).not.to.exist expect(PersistorManager.deleteDirectory).to.have.been.calledWith( bucket, @@ -206,7 +206,7 @@ describe('FileHandler', function () { describe('deleteProject', function () { it('should tell the filestore manager to delete the folder', function (done) { - FileHandler.deleteProject(bucket, projectKey, (err) => { + FileHandler.deleteProject(bucket, projectKey, err => { expect(err).not.to.exist expect(PersistorManager.deleteDirectory).to.have.been.calledWith( bucket, @@ -217,7 +217,7 @@ describe('FileHandler', function () { }) it('should throw an error when the key is in the wrong format', function (done) { - FileHandler.deleteProject(bucket, 'wombat', (err) => { + FileHandler.deleteProject(bucket, 'wombat', err => { expect(err).to.exist done() }) @@ -235,7 +235,7 @@ describe('FileHandler', function () { it('should pass options through to PersistorManager', function (done) { const options = { start: 0, end: 8 } - FileHandler.getFile(bucket, key, options, (err) => { + FileHandler.getFile(bucket, key, options, err => { expect(err).not.to.exist expect(PersistorManager.getObjectStream).to.have.been.calledWith( bucket, @@ -305,7 +305,7 @@ describe('FileHandler', function () { describe('when a style is defined', function () { it('generates a thumbnail when requested', function (done) { - FileHandler.getFile(bucket, key, { style: 'thumbnail' }, (err) => { + FileHandler.getFile(bucket, key, { style: 'thumbnail' }, err => { expect(err).not.to.exist expect(FileConverter.promises.thumbnail).to.have.been.called expect(FileConverter.promises.preview).not.to.have.been.called @@ -314,7 +314,7 @@ describe('FileHandler', function () { }) it('generates a preview when requested', function (done) { - FileHandler.getFile(bucket, key, { style: 'preview' }, (err) => { + FileHandler.getFile(bucket, key, { style: 'preview' }, err => { expect(err).not.to.exist expect(FileConverter.promises.thumbnail).not.to.have.been.called expect(FileConverter.promises.preview).to.have.been.called @@ -329,8 +329,8 @@ describe('FileHandler', function () { Settings.filestore = { allowRedirects: true, stores: { - userFiles: bucket - } + userFiles: bucket, + }, } }) @@ -385,7 +385,7 @@ describe('FileHandler', function () { describe('getDirectorySize', function () { it('should call the filestore manager to get directory size', function (done) { - FileHandler.getDirectorySize(bucket, key, (err) => { + FileHandler.getDirectorySize(bucket, key, err => { expect(err).not.to.exist expect(PersistorManager.directorySize).to.have.been.calledWith( bucket, diff --git a/services/filestore/test/unit/js/ImageOptimiserTests.js b/services/filestore/test/unit/js/ImageOptimiserTests.js index 3a0007342f..581b191ad3 100644 --- a/services/filestore/test/unit/js/ImageOptimiserTests.js +++ b/services/filestore/test/unit/js/ImageOptimiserTests.js @@ -11,29 +11,29 @@ describe('ImageOptimiser', function () { beforeEach(function () { SafeExec = { - promises: sinon.stub().resolves() + promises: sinon.stub().resolves(), } logger = { - warn: sinon.stub() + warn: sinon.stub(), } ImageOptimiser = SandboxedModule.require(modulePath, { requires: { './SafeExec': SafeExec, 'logger-sharelatex': logger, '@overleaf/metrics': { - Timer: sinon.stub().returns({ done: sinon.stub() }) - } - } + Timer: sinon.stub().returns({ done: sinon.stub() }), + }, + }, }) }) describe('compressPng', function () { it('should convert the file', function (done) { - ImageOptimiser.compressPng(sourcePath, (err) => { + ImageOptimiser.compressPng(sourcePath, err => { expect(err).not.to.exist expect(SafeExec.promises).to.have.been.calledWith([ 'optipng', - sourcePath + sourcePath, ]) done() }) @@ -41,7 +41,7 @@ describe('ImageOptimiser', function () { it('should return the error', function (done) { SafeExec.promises.rejects('wombat herding failure') - ImageOptimiser.compressPng(sourcePath, (err) => { + ImageOptimiser.compressPng(sourcePath, err => { expect(err.toString()).to.equal('wombat herding failure') done() }) @@ -54,7 +54,7 @@ describe('ImageOptimiser', function () { beforeEach(function (done) { SafeExec.promises.rejects(expectedError) - ImageOptimiser.compressPng(sourcePath, (err) => { + ImageOptimiser.compressPng(sourcePath, err => { error = err done() }) diff --git a/services/filestore/test/unit/js/KeybuilderTests.js b/services/filestore/test/unit/js/KeybuilderTests.js index 883364fb22..96f4d67904 100644 --- a/services/filestore/test/unit/js/KeybuilderTests.js +++ b/services/filestore/test/unit/js/KeybuilderTests.js @@ -8,7 +8,7 @@ describe('KeybuilderTests', function () { beforeEach(function () { KeyBuilder = SandboxedModule.require(modulePath, { - requires: { '@overleaf/settings': {} } + requires: { '@overleaf/settings': {} }, }) }) @@ -28,7 +28,7 @@ describe('KeybuilderTests', function () { it('should add format first, then style', function () { const opts = { style: 'thumbnail', - format: 'png' + format: 'png', } const newKey = KeyBuilder.addCachingToKey(key, opts) newKey.should.equal(`${key}-converted-cache/format-png-style-thumbnail`) diff --git a/services/filestore/test/unit/js/LocalFileWriterTests.js b/services/filestore/test/unit/js/LocalFileWriterTests.js index 3635b9a994..d5fdb92a63 100644 --- a/services/filestore/test/unit/js/LocalFileWriterTests.js +++ b/services/filestore/test/unit/js/LocalFileWriterTests.js @@ -17,10 +17,10 @@ describe('LocalFileWriter', function () { beforeEach(function () { fs = { createWriteStream: sinon.stub().returns(writeStream), - unlink: sinon.stub().yields() + unlink: sinon.stub().yields(), } stream = { - pipeline: sinon.stub().yields() + pipeline: sinon.stub().yields(), } const ObjectPersistor = { Errors } @@ -32,10 +32,10 @@ describe('LocalFileWriter', function () { '@overleaf/settings': settings, '@overleaf/metrics': { inc: sinon.stub(), - Timer: sinon.stub().returns({ done: sinon.stub() }) + Timer: sinon.stub().returns({ done: sinon.stub() }), }, - '@overleaf/object-persistor': ObjectPersistor - } + '@overleaf/object-persistor': ObjectPersistor, + }, }) }) @@ -57,7 +57,7 @@ describe('LocalFileWriter', function () { }) it('should wrap the error', function () { - LocalFileWriter.writeStream(readStream, filename, (err) => { + LocalFileWriter.writeStream(readStream, filename, err => { expect(err).to.exist expect(err.cause).to.equal(error) }) @@ -73,7 +73,7 @@ describe('LocalFileWriter', function () { describe('deleteFile', function () { it('should unlink the file', function (done) { - LocalFileWriter.deleteFile(fsPath, (err) => { + LocalFileWriter.deleteFile(fsPath, err => { expect(err).not.to.exist expect(fs.unlink).to.have.been.calledWith(fsPath) done() @@ -81,7 +81,7 @@ describe('LocalFileWriter', function () { }) it('should not call unlink with an empty path', function (done) { - LocalFileWriter.deleteFile('', (err) => { + LocalFileWriter.deleteFile('', err => { expect(err).not.to.exist expect(fs.unlink).not.to.have.been.called done() @@ -92,7 +92,7 @@ describe('LocalFileWriter', function () { const error = new Error('file not found') error.code = 'ENOENT' fs.unlink = sinon.stub().yields(error) - LocalFileWriter.deleteFile(fsPath, (err) => { + LocalFileWriter.deleteFile(fsPath, err => { expect(err).not.to.exist done() }) @@ -101,7 +101,7 @@ describe('LocalFileWriter', function () { it('should wrap the error', function (done) { const error = new Error('failed to reticulate splines') fs.unlink = sinon.stub().yields(error) - LocalFileWriter.deleteFile(fsPath, (err) => { + LocalFileWriter.deleteFile(fsPath, err => { expect(err).to.exist expect(err.cause).to.equal(error) done() diff --git a/services/filestore/test/unit/js/SafeExecTests.js b/services/filestore/test/unit/js/SafeExecTests.js index 0bcfb44133..169c9fbf37 100644 --- a/services/filestore/test/unit/js/SafeExecTests.js +++ b/services/filestore/test/unit/js/SafeExecTests.js @@ -18,8 +18,8 @@ describe('SafeExec', function () { globals: { process }, requires: { '@overleaf/settings': settings, - '@overleaf/object-persistor': ObjectPersistor - } + '@overleaf/object-persistor': ObjectPersistor, + }, }) }) @@ -35,14 +35,14 @@ describe('SafeExec', function () { it('should error when conversions are disabled', function (done) { settings.enableConversions = false - safeExec(['/bin/echo', 'hello'], options, (err) => { + safeExec(['/bin/echo', 'hello'], options, err => { expect(err).to.exist done() }) }) it('should execute a command with non-zero exit status', function (done) { - safeExec(['/usr/bin/env', 'false'], options, (err) => { + safeExec(['/usr/bin/env', 'false'], options, err => { expect(err).to.exist expect(err.name).to.equal('FailedCommandError') expect(err.code).to.equal(1) @@ -53,7 +53,7 @@ describe('SafeExec', function () { }) it('should handle an invalid command', function (done) { - safeExec(['/bin/foobar'], options, (err) => { + safeExec(['/bin/foobar'], options, err => { err.code.should.equal('ENOENT') done() }) @@ -63,7 +63,7 @@ describe('SafeExec', function () { safeExec( ['/bin/sleep', '10'], { timeout: 500, killSignal: 'SIGTERM' }, - (err) => { + err => { expect(err).to.exist expect(err.name).to.equal('FailedCommandError') expect(err.code).to.equal('SIGTERM') diff --git a/services/filestore/test/unit/js/SettingsTests.js b/services/filestore/test/unit/js/SettingsTests.js index 7ddff93c5c..a7092cb543 100644 --- a/services/filestore/test/unit/js/SettingsTests.js +++ b/services/filestore/test/unit/js/SettingsTests.js @@ -8,12 +8,12 @@ describe('Settings', function () { const s3Settings = { bucket1: { auth_key: 'bucket1_key', - auth_secret: 'bucket1_secret' - } + auth_secret: 'bucket1_secret', + }, } process.env.S3_BUCKET_CREDENTIALS = JSON.stringify(s3Settings) const settings = SandboxedModule.require('@overleaf/settings', { - globals: { console, process } + globals: { console, process }, }) expect(settings.filestore.s3.bucketCreds).to.deep.equal(s3Settings) }) From 1ac3c0763d6c9dd5c39fc7728c48ea583f370c9e Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Tue, 13 Jul 2021 12:21:12 +0100 Subject: [PATCH 554/555] [misc] temporary override a few new/changed eslint rules --- services/filestore/.eslintrc | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/services/filestore/.eslintrc b/services/filestore/.eslintrc index 1c14f50efe..a97661b15f 100644 --- a/services/filestore/.eslintrc +++ b/services/filestore/.eslintrc @@ -5,7 +5,7 @@ "extends": [ "eslint:recommended", "standard", - "prettier", + "prettier" ], "parserOptions": { "ecmaVersion": 2018 @@ -20,6 +20,19 @@ "mocha": true }, "rules": { + // TODO(das7pad): remove overrides after fixing all the violations manually (https://github.com/overleaf/issues/issues/3882#issuecomment-878999671) + // START of temporary overrides + "array-callback-return": "off", + "no-dupe-else-if": "off", + "no-var": "off", + "no-empty": "off", + "node/handle-callback-err": "off", + "no-loss-of-precision": "off", + "node/no-callback-literal": "off", + "node/no-path-concat": "off", + "prefer-regex-literals": "off", + // END of temporary overrides + // Swap the no-unused-expressions rule with a more chai-friendly one "no-unused-expressions": 0, "chai-friendly/no-unused-expressions": "error", From 84f5ae1dfc5c47e72f6d87a4c356071f0bbca709 Mon Sep 17 00:00:00 2001 From: Jakob Ackermann Date: Tue, 13 Jul 2021 12:26:34 +0100 Subject: [PATCH 555/555] [misc] upgrade node version to latest v12 LTS version 12.22.3 --- services/filestore/.nvmrc | 2 +- services/filestore/Dockerfile | 2 +- services/filestore/buildscript.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/services/filestore/.nvmrc b/services/filestore/.nvmrc index ec101caa3f..5a80a7e912 100644 --- a/services/filestore/.nvmrc +++ b/services/filestore/.nvmrc @@ -1 +1 @@ -12.20.1 +12.22.3 diff --git a/services/filestore/Dockerfile b/services/filestore/Dockerfile index 6df4c7de58..46a5510544 100644 --- a/services/filestore/Dockerfile +++ b/services/filestore/Dockerfile @@ -2,7 +2,7 @@ # Instead run bin/update_build_scripts from # https://github.com/sharelatex/sharelatex-dev-environment -FROM node:12.20.1 as base +FROM node:12.22.3 as base WORKDIR /app COPY install_deps.sh /app diff --git a/services/filestore/buildscript.txt b/services/filestore/buildscript.txt index 31316376cd..27d5d3f189 100644 --- a/services/filestore/buildscript.txt +++ b/services/filestore/buildscript.txt @@ -4,6 +4,6 @@ filestore --docker-repos=gcr.io/overleaf-ops --env-add=ENABLE_CONVERSIONS="true",USE_PROM_METRICS="true",AWS_S3_USER_FILES_BUCKET_NAME=fake_user_files,AWS_S3_TEMPLATE_FILES_BUCKET_NAME=fake_template_files,AWS_S3_PUBLIC_FILES_BUCKET_NAME=fake_public_files,GCS_USER_FILES_BUCKET_NAME=fake_userfiles,GCS_TEMPLATE_FILES_BUCKET_NAME=fake_templatefiles,GCS_PUBLIC_FILES_BUCKET_NAME=fake_publicfiles --env-pass-through= ---node-version=12.20.1 +--node-version=12.22.3 --public-repo=True --script-version=3.11.0